cipher.h 9.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312
  1. /*
  2. * Copyright (c) 2016 Intel Corporation.
  3. *
  4. * SPDX-License-Identifier: Apache-2.0
  5. */
  6. /**
  7. * @file
  8. * @brief Crypto Cipher APIs
  9. *
  10. * This file contains the Crypto Abstraction layer APIs.
  11. *
  12. * [Experimental] Users should note that the APIs can change
  13. * as a part of ongoing development.
  14. */
  15. /**
  16. * @brief Crypto APIs
  17. * @defgroup crypto Crypto
  18. * @{
  19. * @}
  20. */
  21. /**
  22. * @brief Crypto Cipher APIs
  23. * @defgroup crypto_cipher Cipher
  24. * @ingroup crypto
  25. * @{
  26. */
  27. #ifndef ZEPHYR_INCLUDE_CRYPTO_CIPHER_H_
  28. #define ZEPHYR_INCLUDE_CRYPTO_CIPHER_H_
  29. #include <device.h>
  30. #include <errno.h>
  31. #include <sys/util.h>
  32. #include <sys/__assert.h>
  33. #include "cipher_structs.h"
  34. /* The API a crypto driver should implement */
  35. __subsystem struct crypto_driver_api {
  36. int (*query_hw_caps)(const struct device *dev);
  37. /* Setup a crypto session */
  38. int (*begin_session)(const struct device *dev, struct cipher_ctx *ctx,
  39. enum cipher_algo algo, enum cipher_mode mode,
  40. enum cipher_op op_type);
  41. /* Tear down an established session */
  42. int (*free_session)(const struct device *dev, struct cipher_ctx *ctx);
  43. /* Register async crypto op completion callback with the driver */
  44. int (*crypto_async_callback_set)(const struct device *dev,
  45. crypto_completion_cb cb);
  46. };
  47. /* Following are the public API a user app may call.
  48. * The first two relate to crypto "session" setup / teardown. Further we
  49. * have four cipher mode specific (CTR, CCM, CBC ...) calls to perform the
  50. * actual crypto operation in the context of a session. Also we have an
  51. * API to provide the callback for async operations.
  52. */
  53. /**
  54. * @brief Query the crypto hardware capabilities
  55. *
  56. * This API is used by the app to query the capabilities supported by the
  57. * crypto device. Based on this the app can specify a subset of the supported
  58. * options to be honored for a session during cipher_begin_session().
  59. *
  60. * @param dev Pointer to the device structure for the driver instance.
  61. *
  62. * @return bitmask of supported options.
  63. */
  64. static inline int cipher_query_hwcaps(const struct device *dev)
  65. {
  66. struct crypto_driver_api *api;
  67. int tmp;
  68. api = (struct crypto_driver_api *) dev->api;
  69. tmp = api->query_hw_caps(dev);
  70. __ASSERT((tmp & (CAP_OPAQUE_KEY_HNDL | CAP_RAW_KEY)) != 0,
  71. "Driver should support at least one key type: RAW/Opaque");
  72. __ASSERT((tmp & (CAP_INPLACE_OPS | CAP_SEPARATE_IO_BUFS)) != 0,
  73. "Driver should support at least one IO buf type: Inplace/separate");
  74. __ASSERT((tmp & (CAP_SYNC_OPS | CAP_ASYNC_OPS)) != 0,
  75. "Driver should support at least one op-type: sync/async");
  76. return tmp;
  77. }
  78. /**
  79. * @brief Setup a crypto session
  80. *
  81. * Initializes one time parameters, like the session key, algorithm and cipher
  82. * mode which may remain constant for all operations in the session. The state
  83. * may be cached in hardware and/or driver data state variables.
  84. *
  85. * @param dev Pointer to the device structure for the driver instance.
  86. * @param ctx Pointer to the context structure. Various one time
  87. * parameters like key, keylength, etc. are supplied via
  88. * this structure. The structure documentation specifies
  89. * which fields are to be populated by the app before
  90. * making this call.
  91. * @param algo The crypto algorithm to be used in this session. e.g AES
  92. * @param mode The cipher mode to be used in this session. e.g CBC, CTR
  93. * @param optype Whether we should encrypt or decrypt in this session
  94. *
  95. * @return 0 on success, negative errno code on fail.
  96. */
  97. static inline int cipher_begin_session(const struct device *dev,
  98. struct cipher_ctx *ctx,
  99. enum cipher_algo algo,
  100. enum cipher_mode mode,
  101. enum cipher_op optype)
  102. {
  103. struct crypto_driver_api *api;
  104. uint32_t flags;
  105. api = (struct crypto_driver_api *) dev->api;
  106. ctx->device = dev;
  107. ctx->ops.cipher_mode = mode;
  108. flags = (ctx->flags & (CAP_OPAQUE_KEY_HNDL | CAP_RAW_KEY));
  109. __ASSERT(flags != 0U, "Keytype missing: RAW Key or OPAQUE handle");
  110. __ASSERT(flags != (CAP_OPAQUE_KEY_HNDL | CAP_RAW_KEY),
  111. "conflicting options for keytype");
  112. flags = (ctx->flags & (CAP_INPLACE_OPS | CAP_SEPARATE_IO_BUFS));
  113. __ASSERT(flags != 0U, "IO buffer type missing");
  114. __ASSERT(flags != (CAP_INPLACE_OPS | CAP_SEPARATE_IO_BUFS),
  115. "conflicting options for IO buffer type");
  116. flags = (ctx->flags & (CAP_SYNC_OPS | CAP_ASYNC_OPS));
  117. __ASSERT(flags != 0U, "sync/async type missing");
  118. __ASSERT(flags != (CAP_SYNC_OPS | CAP_ASYNC_OPS),
  119. "conflicting options for sync/async");
  120. return api->begin_session(dev, ctx, algo, mode, optype);
  121. }
  122. /**
  123. * @brief Cleanup a crypto session
  124. *
  125. * Clears the hardware and/or driver state of a previous session.
  126. *
  127. * @param dev Pointer to the device structure for the driver instance.
  128. * @param ctx Pointer to the crypto context structure of the session
  129. * to be freed.
  130. *
  131. * @return 0 on success, negative errno code on fail.
  132. */
  133. static inline int cipher_free_session(const struct device *dev,
  134. struct cipher_ctx *ctx)
  135. {
  136. struct crypto_driver_api *api;
  137. api = (struct crypto_driver_api *) dev->api;
  138. return api->free_session(dev, ctx);
  139. }
  140. /**
  141. * @brief Registers an async crypto op completion callback with the driver
  142. *
  143. * The application can register an async crypto op completion callback handler
  144. * to be invoked by the driver, on completion of a prior request submitted via
  145. * crypto_do_op(). Based on crypto device hardware semantics, this is likely to
  146. * be invoked from an ISR context.
  147. *
  148. * @param dev Pointer to the device structure for the driver instance.
  149. * @param cb Pointer to application callback to be called by the driver.
  150. *
  151. * @return 0 on success, -ENOTSUP if the driver does not support async op,
  152. * negative errno code on other error.
  153. */
  154. static inline int cipher_callback_set(const struct device *dev,
  155. crypto_completion_cb cb)
  156. {
  157. struct crypto_driver_api *api;
  158. api = (struct crypto_driver_api *) dev->api;
  159. if (api->crypto_async_callback_set) {
  160. return api->crypto_async_callback_set(dev, cb);
  161. }
  162. return -ENOTSUP;
  163. }
  164. /**
  165. * @brief Perform single-block crypto operation (ECB cipher mode). This
  166. * should not be overloaded to operate on multiple blocks for security reasons.
  167. *
  168. * @param ctx Pointer to the crypto context of this op.
  169. * @param pkt Structure holding the input/output buffer pointers.
  170. *
  171. * @return 0 on success, negative errno code on fail.
  172. */
  173. static inline int cipher_block_op(struct cipher_ctx *ctx,
  174. struct cipher_pkt *pkt)
  175. {
  176. __ASSERT(ctx->ops.cipher_mode == CRYPTO_CIPHER_MODE_ECB, "ECB mode "
  177. "session invoking a different mode handler");
  178. pkt->ctx = ctx;
  179. return ctx->ops.block_crypt_hndlr(ctx, pkt);
  180. }
  181. /**
  182. * @brief Perform Cipher Block Chaining (CBC) crypto operation.
  183. *
  184. * @param ctx Pointer to the crypto context of this op.
  185. * @param pkt Structure holding the input/output buffer pointers.
  186. * @param iv Initialization Vector (IV) for the operation. Same
  187. * IV value should not be reused across multiple
  188. * operations (within a session context) for security.
  189. *
  190. * @return 0 on success, negative errno code on fail.
  191. */
  192. static inline int cipher_cbc_op(struct cipher_ctx *ctx,
  193. struct cipher_pkt *pkt, uint8_t *iv)
  194. {
  195. __ASSERT(ctx->ops.cipher_mode == CRYPTO_CIPHER_MODE_CBC, "CBC mode "
  196. "session invoking a different mode handler");
  197. pkt->ctx = ctx;
  198. return ctx->ops.cbc_crypt_hndlr(ctx, pkt, iv);
  199. }
  200. /**
  201. * @brief Perform Counter (CTR) mode crypto operation.
  202. *
  203. * @param ctx Pointer to the crypto context of this op.
  204. * @param pkt Structure holding the input/output buffer pointers.
  205. * @param iv Initialization Vector (IV) for the operation. We use a
  206. * split counter formed by appending IV and ctr.
  207. * Consequently ivlen = keylen - ctrlen. 'ctrlen' is
  208. * specified during session setup through the
  209. * 'ctx.mode_params.ctr_params.ctr_len' parameter. IV
  210. * should not be reused across multiple operations
  211. * (within a session context) for security. The non-IV
  212. * part of the split counter is transparent to the caller
  213. * and is fully managed by the crypto provider.
  214. *
  215. * @return 0 on success, negative errno code on fail.
  216. */
  217. static inline int cipher_ctr_op(struct cipher_ctx *ctx,
  218. struct cipher_pkt *pkt, uint8_t *iv)
  219. {
  220. __ASSERT(ctx->ops.cipher_mode == CRYPTO_CIPHER_MODE_CTR, "CTR mode "
  221. "session invoking a different mode handler");
  222. pkt->ctx = ctx;
  223. return ctx->ops.ctr_crypt_hndlr(ctx, pkt, iv);
  224. }
  225. /**
  226. * @brief Perform Counter with CBC-MAC (CCM) mode crypto operation
  227. *
  228. * @param ctx Pointer to the crypto context of this op.
  229. * @param pkt Structure holding the input/output, Assosciated
  230. * Data (AD) and auth tag buffer pointers.
  231. * @param nonce Nonce for the operation. Same nonce value should not
  232. * be reused across multiple operations (within a
  233. * session context) for security.
  234. *
  235. * @return 0 on success, negative errno code on fail.
  236. */
  237. static inline int cipher_ccm_op(struct cipher_ctx *ctx,
  238. struct cipher_aead_pkt *pkt, uint8_t *nonce)
  239. {
  240. __ASSERT(ctx->ops.cipher_mode == CRYPTO_CIPHER_MODE_CCM, "CCM mode "
  241. "session invoking a different mode handler");
  242. pkt->pkt->ctx = ctx;
  243. return ctx->ops.ccm_crypt_hndlr(ctx, pkt, nonce);
  244. }
  245. /**
  246. * @brief Perform Galois/Counter Mode (GCM) crypto operation
  247. *
  248. * @param ctx Pointer to the crypto context of this op.
  249. * @param pkt Structure holding the input/output, Associated
  250. * Data (AD) and auth tag buffer pointers.
  251. * @param nonce Nonce for the operation. Same nonce value should not
  252. * be reused across multiple operations (within a
  253. * session context) for security.
  254. *
  255. * @return 0 on success, negative errno code on fail.
  256. */
  257. static inline int cipher_gcm_op(struct cipher_ctx *ctx,
  258. struct cipher_aead_pkt *pkt, uint8_t *nonce)
  259. {
  260. __ASSERT(ctx->ops.cipher_mode == CRYPTO_CIPHER_MODE_GCM, "GCM mode "
  261. "session invoking a different mode handler");
  262. pkt->pkt->ctx = ctx;
  263. return ctx->ops.gcm_crypt_hndlr(ctx, pkt, nonce);
  264. }
  265. /**
  266. * @}
  267. */
  268. #endif /* ZEPHYR_INCLUDE_CRYPTO_CIPHER_H_ */