+bool crypt_cipher_kernel_only(struct crypt_cipher *ctx);
+
+/* Benchmark of kernel cipher performance */
+int crypt_cipher_perf_kernel(const char *name, const char *mode, char *buffer, size_t buffer_size,
+ const char *key, size_t key_size, const char *iv, size_t iv_size,
+ double *encryption_mbs, double *decryption_mbs);
+
+/* Check availability of a cipher (in kernel only) */
+int crypt_cipher_check_kernel(const char *name, const char *mode,
+ const char *integrity, size_t key_length);
+
+/* Storage encryption wrappers */
+int crypt_storage_init(struct crypt_storage **ctx, size_t sector_size,
+ const char *cipher, const char *cipher_mode,
+ const void *key, size_t key_length);
+void crypt_storage_destroy(struct crypt_storage *ctx);
+int crypt_storage_decrypt(struct crypt_storage *ctx, uint64_t iv_offset,
+ uint64_t length, char *buffer);
+int crypt_storage_encrypt(struct crypt_storage *ctx, uint64_t iv_offset,
+ uint64_t length, char *buffer);
+
+bool crypt_storage_kernel_only(struct crypt_storage *ctx);
+
+/* Temporary Bitlk helper */
+int crypt_bitlk_decrypt_key(const void *key, size_t key_length,
+ const char *in, char *out, size_t length,
+ const char *iv, size_t iv_length,
+ const char *tag, size_t tag_length);
+
+/* Memzero helper (memset on stack can be optimized out) */
+static inline void crypt_backend_memzero(void *s, size_t n)
+{
+#ifdef HAVE_EXPLICIT_BZERO
+ explicit_bzero(s, n);
+#else
+ volatile uint8_t *p = (volatile uint8_t *)s;
+ while(n--) *p++ = 0;
+#endif
+}