Created
February 5, 2024 15:18
-
-
Save mRrvz/3fb8943a7487ab7b943ec140706995e7 to your computer and use it in GitHub Desktop.
Amlogic Crypto Tests
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#define pr_fmt(fmt) "CRYPTO: " fmt | |
#include <linux/gfp.h> | |
#include <linux/module.h> | |
#include <linux/scatterlist.h> | |
#include <linux/highmem.h> | |
#include <linux/of.h> | |
#include <linux/of_device.h> | |
#include <linux/platform_device.h> | |
#include <linux/of.h> | |
#include <linux/of_device.h> | |
#include <linux/dma-map-ops.h> | |
#include <linux/cma.h> | |
#include <linux/of_reserved_mem.h> | |
#include <crypto/skcipher.h> | |
#include <crypto/hash.h> | |
#include <crypto/aes.h> | |
#define ASSERT(x) BUG_ON(!(x)) | |
static char *checksum(struct scatterlist **sgs, unsigned int *sgs_len, | |
unsigned int count) | |
{ | |
struct crypto_ahash *tfm = NULL; | |
struct ahash_request *req = NULL; | |
struct crypto_wait wait; | |
u8 *digest = NULL; | |
char *digest_hex = NULL; | |
unsigned int i; | |
int r = 0; | |
tfm = crypto_alloc_ahash("sha256-generic", 0, 0); | |
if (IS_ERR(tfm)) | |
goto out_err; | |
req = ahash_request_alloc(tfm, GFP_KERNEL); | |
if (!req) { | |
r = -ENOMEM; | |
goto out_err; | |
} | |
digest = kzalloc(crypto_ahash_digestsize(tfm), GFP_KERNEL); | |
if (!digest) { | |
r = -ENOMEM; | |
goto out_err; | |
} | |
digest_hex = kzalloc(crypto_ahash_digestsize(tfm) + 1, GFP_KERNEL); | |
if (!digest_hex) { | |
r = -ENOMEM; | |
goto out_err; | |
} | |
crypto_init_wait(&wait); | |
ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_SLEEP | | |
CRYPTO_TFM_REQ_MAY_BACKLOG, | |
crypto_req_done, &wait); | |
r = crypto_wait_req(crypto_ahash_init(req), &wait); | |
if (r) | |
goto out_err; | |
for (i = 0; i < count; i++) { | |
ahash_request_set_crypt(req, sgs[i], NULL, sgs_len[i]); | |
r = crypto_wait_req(crypto_ahash_update(req), &wait); | |
if (r) | |
goto out_err; | |
} | |
ahash_request_set_crypt(req, NULL, digest, 0); | |
r = crypto_wait_req(crypto_ahash_final(req), &wait); | |
if (r) | |
goto out_err; | |
*bin2hex(digest_hex, digest, crypto_ahash_digestsize(tfm) / 2) = '\0'; | |
goto out; | |
out_err: | |
kfree(digest_hex); | |
digest_hex = NULL; | |
out: | |
kfree(digest); | |
ahash_request_free(req); | |
crypto_free_ahash(tfm); | |
return digest_hex; | |
} | |
struct mem_spec { | |
unsigned int *len; | |
unsigned int count; | |
}; | |
struct mem_entry { | |
struct list_head list; | |
struct mem_spec *src_spec; | |
struct scatterlist *src; | |
unsigned int src_total_len; | |
struct scatterlist *dst; | |
struct mem_spec *dst_spec; | |
unsigned int dst_total_len; | |
unsigned int total_len; | |
}; | |
struct crypto_test { | |
u8 iv[AES_BLOCK_SIZE]; | |
u8 key[AES_MAX_KEY_SIZE]; | |
unsigned long memsize; | |
void *ref_mem; | |
void *src_mem; | |
void *dst_mem; | |
unsigned int src_offset; | |
unsigned int dst_offset; | |
struct list_head mem_head; | |
unsigned int mem_count; | |
unsigned int mem_len; | |
}; | |
static struct mem_spec *mem_spec_alloc(unsigned int len[], unsigned int count) | |
{ | |
struct mem_spec *spec; | |
ASSERT(spec = kzalloc(sizeof(struct mem_spec), GFP_KERNEL)); | |
ASSERT(spec->len = kmemdup(len, count * sizeof(unsigned int), GFP_KERNEL)); | |
spec->count = count; | |
return spec; | |
} | |
static void mem_spec_free(struct mem_spec *spec) | |
{ | |
if (spec) { | |
kfree(spec->len); | |
kfree(spec); | |
} | |
} | |
#define MEM_SPEC(...) ({ \ | |
unsigned int __spec[] = { __VA_ARGS__ }; \ | |
\ | |
mem_spec_alloc(__spec, ARRAY_SIZE(__spec)); \ | |
}) | |
static struct scatterlist *mem_add_sg(struct crypto_test *ct, | |
struct mem_spec *spec, unsigned int *total_len, void *base, | |
unsigned int *offset) | |
{ | |
struct scatterlist *sg; | |
unsigned int i; | |
ASSERT(sg = kmalloc_array(spec->count, sizeof(struct scatterlist), | |
GFP_KERNEL)); | |
sg_init_table(sg, spec->count); | |
*total_len = 0; | |
for (i = 0; i < spec->count; i++) { | |
unsigned long addr = roundup((unsigned long)base + *offset, | |
sizeof(u32)); | |
*offset += spec->len[i]; | |
ASSERT(*offset <= ct->memsize); | |
sg_set_buf(&sg[i], (void *)addr, spec->len[i]); | |
*total_len += spec->len[i]; | |
} | |
return sg; | |
} | |
static void mem_add(struct crypto_test *ct, struct mem_spec *src_spec, | |
struct mem_spec *dst_spec) | |
{ | |
struct mem_entry *entry; | |
ASSERT(entry = kzalloc(sizeof(struct mem_entry), GFP_KERNEL)); | |
entry->total_len = UINT_MAX; | |
if (src_spec) { | |
entry->src_spec = src_spec; | |
entry->src = mem_add_sg(ct, src_spec, &entry->src_total_len, | |
ct->src_mem, &ct->src_offset); | |
entry->total_len = min(entry->total_len, entry->src_total_len); | |
} | |
if (dst_spec) { | |
entry->dst_spec = dst_spec; | |
entry->dst = mem_add_sg(ct, src_spec, &entry->dst_total_len, | |
ct->dst_mem, &ct->dst_offset); | |
entry->total_len = min(entry->total_len, entry->dst_total_len); | |
} | |
if (!src_spec && !dst_spec) | |
entry->total_len = 0; | |
list_add_tail(&entry->list, &ct->mem_head); | |
ct->mem_count++; | |
ct->mem_len += entry->total_len; | |
} | |
static void mem_cleanup(struct crypto_test *ct) | |
{ | |
while (!list_empty(&ct->mem_head)) { | |
struct mem_entry *entry = list_first_entry(&ct->mem_head, | |
struct mem_entry, list); | |
list_del(&entry->list); | |
kfree(entry->src); | |
kfree(entry->dst); | |
mem_spec_free(entry->src_spec); | |
mem_spec_free(entry->dst_spec); | |
kfree(entry); | |
} | |
ct->src_offset = 0; | |
ct->dst_offset = 0; | |
ct->mem_count = 0; | |
ct->mem_len = 0; | |
} | |
static void test_cipher_one(struct crypto_test *ct, const char *algo_ref, | |
const char *algo_test, unsigned int key_len, bool inplace) | |
{ | |
const char *algo_names[] = { algo_ref, algo_test }; | |
char *hash_enc[ARRAY_SIZE(algo_names)] = {}; | |
char *hash_dec[ARRAY_SIZE(algo_names)] = {}; | |
struct mem_entry *entry; | |
struct scatterlist **sgs; | |
unsigned int *sgs_len; | |
unsigned int count; | |
unsigned int algo; | |
ASSERT(sgs = kmalloc_array(ct->mem_count, | |
sizeof(struct scatterlist *), GFP_KERNEL)); | |
ASSERT(sgs_len = kmalloc_array(ct->mem_count, | |
sizeof(unsigned int), GFP_KERNEL)); | |
for (algo = 0; algo < ARRAY_SIZE(algo_names); algo++) { | |
const char *inplace_str = inplace ? " inplace" : ""; | |
const char *status = ""; | |
struct crypto_skcipher *cipher; | |
struct skcipher_request *req; | |
struct crypto_wait wait; | |
void *iv = NULL; | |
ktime_t begin, end; | |
u64 time, speed; | |
/* Prepare cipher */ | |
ASSERT(!IS_ERR(cipher = crypto_alloc_skcipher(algo_names[algo], | |
0, 0))); | |
ASSERT(req = skcipher_request_alloc(cipher, GFP_KERNEL)); | |
crypto_init_wait(&wait); | |
skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG, | |
crypto_req_done, &wait); | |
ASSERT(!crypto_skcipher_setkey(cipher, ct->key, key_len)); | |
if (crypto_skcipher_ivsize(cipher)) | |
ASSERT(iv = kmalloc(crypto_skcipher_ivsize(cipher), | |
GFP_KERNEL)); | |
/* Prepare data in src */ | |
memcpy(ct->src_mem, ct->ref_mem, ct->memsize); | |
/* clear dst */ | |
memset(ct->dst_mem, 0xff, ct->memsize); | |
/* Encrypt */ | |
memcpy(iv, ct->iv, crypto_skcipher_ivsize(cipher)); | |
begin = ktime_get(); | |
count = 0; | |
list_for_each_entry(entry, &ct->mem_head, list) { | |
struct scatterlist *src = entry->src; | |
struct scatterlist *dst = inplace ? | |
entry->src : entry->dst; | |
skcipher_request_set_crypt(req, src, dst, | |
entry->total_len, iv); | |
ASSERT(!crypto_wait_req(crypto_skcipher_encrypt(req), | |
&wait)); | |
sgs[count] = dst; | |
sgs_len[count] = entry->total_len; | |
count++; | |
} | |
end = ktime_get(); | |
/* Hash encryption result */ | |
ASSERT(count == ct->mem_count); | |
ASSERT(hash_enc[algo] = checksum(sgs, sgs_len, count)); | |
time = ktime_us_delta(end, begin); | |
speed = ct->mem_len * USEC_PER_MSEC; | |
do_div(speed, time); | |
if (algo > 0) { | |
if (!strcmp(hash_enc[0], hash_enc[algo])) | |
status = " OK"; | |
else | |
status = " FAIL"; | |
} | |
pr_info("%16s %u enc%s: %s %llu us %llu B/ms%s\n", | |
algo_names[algo], key_len * 8, inplace_str, | |
hash_enc[algo], time, speed, status); | |
/* clear src */ | |
memset(ct->src_mem, 0xff, ct->memsize); | |
/* Decrypt */ | |
memcpy(iv, ct->iv, crypto_skcipher_ivsize(cipher)); | |
begin = ktime_get(); | |
count = 0; | |
list_for_each_entry(entry, &ct->mem_head, list) { | |
struct scatterlist *src = inplace ? | |
entry->src : entry->dst; | |
struct scatterlist *dst = entry->src; | |
skcipher_request_set_crypt(req, src, dst, | |
entry->total_len, iv); | |
ASSERT(!crypto_wait_req(crypto_skcipher_decrypt(req), | |
&wait)); | |
sgs[count] = dst; | |
sgs_len[count] = entry->total_len; | |
count++; | |
} | |
end = ktime_get(); | |
/* Hash decryption result */ | |
ASSERT(count == ct->mem_count); | |
ASSERT(hash_dec[algo] = checksum(sgs, sgs_len, count)); | |
time = ktime_us_delta(end, begin); | |
speed = ct->mem_len * USEC_PER_MSEC; | |
do_div(speed, time); | |
if (algo > 0) { | |
if (!strcmp(hash_dec[0], hash_dec[algo])) | |
status = " OK"; | |
else | |
status = " FAIL"; | |
} | |
pr_info("%16s %u dec%s: %s %llu us %llu B/ms%s\n", | |
algo_names[algo], key_len * 8, inplace_str, | |
hash_dec[algo], time, speed, status); | |
kfree(iv); | |
skcipher_request_free(req); | |
crypto_free_skcipher(cipher); | |
} | |
for (algo = 0; algo < ARRAY_SIZE(algo_names); algo++) { | |
kfree(hash_enc[algo]); | |
kfree(hash_dec[algo]); | |
} | |
kfree(sgs_len); | |
kfree(sgs); | |
} | |
static void __test_cipher(struct crypto_test *ct, unsigned int line) | |
{ | |
const char *algo[][2] = { | |
{ "ecb(aes-generic)", "ecb-aes-gxl" }, | |
{ "cbc(aes-generic)", "cbc-aes-gxl" }, | |
}; | |
unsigned int algo_idx; | |
unsigned int keylen[] = { | |
AES_KEYSIZE_128, | |
AES_KEYSIZE_192, | |
AES_KEYSIZE_256, | |
}; | |
unsigned int keylen_idx; | |
pr_info("TEST CIPHER @ LINE %u. mem_count = %u, mem_len = %u\n", line, | |
ct->mem_count, ct->mem_len); | |
for (algo_idx = 0; algo_idx < ARRAY_SIZE(algo); algo_idx++) { | |
for (keylen_idx = 0; keylen_idx < ARRAY_SIZE(keylen); keylen_idx++) { | |
test_cipher_one(ct, algo[algo_idx][0], algo[algo_idx][1], | |
keylen[keylen_idx], false); | |
// test_cipher_one(ct, algo[algo_idx][0], algo[algo_idx][1], | |
// keylen[keylen_idx], true); | |
} | |
} | |
mem_cleanup(ct); | |
} | |
#define test_cipher(__ct) __test_cipher((__ct), __LINE__) | |
static void crypto_test_cipher(struct crypto_test *ct) | |
{ | |
const unsigned int desc_maxlen = rounddown((1 << 17) - 1, | |
AES_BLOCK_SIZE); | |
pr_info("TEST CIPHER\n"); | |
mem_add(ct, | |
MEM_SPEC(16), | |
MEM_SPEC(16)); | |
test_cipher(ct); | |
mem_add(ct, | |
MEM_SPEC(4096), | |
MEM_SPEC(4096)); | |
test_cipher(ct); | |
mem_add(ct, | |
MEM_SPEC(16 * 1024), | |
MEM_SPEC(16 * 1024)); | |
test_cipher(ct); | |
mem_add(ct, | |
MEM_SPEC([0 ... 1023] = 16), | |
MEM_SPEC(1024 * 16)); | |
test_cipher(ct); | |
mem_add(ct, | |
MEM_SPEC(4096 * 8, 1024), | |
MEM_SPEC(4096 * 8, 1024)); | |
mem_add(ct, | |
MEM_SPEC([0 ... 3] = 4096), | |
MEM_SPEC([0 ... 3] = 4096)); | |
mem_add(ct, | |
MEM_SPEC(4096), | |
MEM_SPEC(4096)); | |
test_cipher(ct); | |
mem_add(ct, | |
MEM_SPEC(1024, 2048, 2048, 1024), | |
MEM_SPEC(2048, 2048, 512, 1024, 512)); | |
test_cipher(ct); | |
mem_add(ct, | |
MEM_SPEC(2 * desc_maxlen), | |
MEM_SPEC(2 * desc_maxlen)); | |
test_cipher(ct); | |
mem_add(ct, | |
MEM_SPEC(ct->memsize), | |
MEM_SPEC(ct->memsize)); | |
test_cipher(ct); | |
} | |
static void test_hash_one(struct crypto_test *ct, const char *algo_ref, | |
const char *algo_test) | |
{ | |
const char *algo_names[] = { algo_ref, algo_test }; | |
unsigned int algo; | |
char *hash_hex[ARRAY_SIZE(algo_names)] = {}; | |
/* Prepare data in src */ | |
memcpy(ct->src_mem, ct->ref_mem, ct->memsize); | |
for (algo = 0; algo < ARRAY_SIZE(algo_names); algo++) { | |
struct crypto_ahash *tfm = NULL; | |
struct ahash_request *req = NULL; | |
struct crypto_wait wait; | |
u8 *hash = NULL; | |
struct mem_entry *entry; | |
ktime_t begin, end; | |
u64 time, speed; | |
const char *status = ""; | |
uint8_t *ptr; | |
ASSERT(tfm = crypto_alloc_ahash(algo_names[algo], 0, 0)); | |
ASSERT(req = ahash_request_alloc(tfm, GFP_KERNEL)); | |
ASSERT(hash = kzalloc(crypto_ahash_digestsize(tfm), | |
GFP_KERNEL)); | |
ASSERT(hash_hex[algo] = kzalloc(crypto_ahash_digestsize(tfm) * 2 + 1, | |
GFP_KERNEL)); | |
crypto_init_wait(&wait); | |
ahash_request_set_callback(req, | |
CRYPTO_TFM_REQ_MAY_SLEEP |CRYPTO_TFM_REQ_MAY_BACKLOG, | |
crypto_req_done, &wait); | |
begin = ktime_get(); | |
ASSERT(!crypto_wait_req(crypto_ahash_init(req), &wait)); | |
list_for_each_entry(entry, &ct->mem_head, list) { | |
ahash_request_set_crypt(req, entry->src, NULL, | |
entry->total_len); | |
ASSERT(!crypto_wait_req(crypto_ahash_update(req), | |
&wait)); | |
} | |
ahash_request_set_crypt(req, NULL, hash, 0); | |
ASSERT(!crypto_wait_req(crypto_ahash_final(req), &wait)); | |
end = ktime_get(); | |
time = ktime_us_delta(end, begin); | |
speed = ct->mem_len * USEC_PER_MSEC; | |
do_div(speed, time); | |
*bin2hex(hash_hex[algo], hash, crypto_ahash_digestsize(tfm)) = '\0'; | |
if (algo > 0) { | |
if (!strcmp(hash_hex[0], hash_hex[algo])) | |
status = " OK"; | |
else | |
status = " FAIL"; | |
} | |
pr_info("%16s: %s %llu us %llu B/ms%s\n", | |
algo_names[algo], hash_hex[algo], time, speed, status); | |
kfree(hash); | |
ahash_request_free(req); | |
crypto_free_ahash(tfm); | |
} | |
for (algo = 0; algo < ARRAY_SIZE(algo_names); algo++) | |
kfree(hash_hex[algo]); | |
} | |
static void __test_hash(struct crypto_test *ct, unsigned int line) | |
{ | |
pr_info("TEST HASH @ LINE %u. mem_count = %u, mem_len = %u\n", line, | |
ct->mem_count, ct->mem_len); | |
test_hash_one(ct, "sha256-generic", "sha256-gxl"); | |
test_hash_one(ct, "sha224-generic", "sha224-gxl"); | |
test_hash_one(ct, "sha1-generic", "sha1-gxl"); | |
mem_cleanup(ct); | |
} | |
static void __test_hash_sha1(struct crypto_test *ct, unsigned int line) | |
{ | |
return; | |
pr_info("TEST HASH @ LINE %u. mem_count = %u, mem_len = %u\n", line, | |
ct->mem_count, ct->mem_len); | |
test_hash_one(ct, "sha1-generic", "sha1-gxl"); | |
} | |
#define test_hash(__ct) __test_hash((__ct), __LINE__) | |
#define test_hash_sha1(__ct) __test_hash_sha1((__ct), __LINE__) | |
static void crypto_test_hash(struct crypto_test *ct) | |
{ | |
pr_info("TEST HASH\n"); | |
test_hash(ct); | |
mem_add(ct, NULL, NULL); | |
mem_add(ct, NULL, NULL); | |
mem_add(ct, NULL, NULL); | |
mem_add(ct, NULL, NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(4096), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(4096), NULL); | |
mem_add(ct, MEM_SPEC(4096), NULL); | |
mem_add(ct, MEM_SPEC(4096), NULL); | |
mem_add(ct, MEM_SPEC(4096), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(4096, 4096, 4096, 4096), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(ct->memsize), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2, ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64), NULL); | |
mem_add(ct, MEM_SPEC(64), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64, 63), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64, 62), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64, 1), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64), NULL); | |
mem_add(ct, MEM_SPEC(63), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(63, 64), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(63), NULL); | |
mem_add(ct, MEM_SPEC(64), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64 - 1), NULL); | |
mem_add(ct, MEM_SPEC(64 + 1), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64 + 1), NULL); | |
mem_add(ct, MEM_SPEC(64 - 1), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(1, 1, 1, 1, 1, 1), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(1), NULL); | |
mem_add(ct, MEM_SPEC(1), NULL); | |
mem_add(ct, MEM_SPEC(1), NULL); | |
mem_add(ct, MEM_SPEC(1), NULL); | |
mem_add(ct, MEM_SPEC(1), NULL); | |
mem_add(ct, MEM_SPEC(1), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(1, 64, 64, 64), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(1), NULL); | |
mem_add(ct, MEM_SPEC(64), NULL); | |
mem_add(ct, MEM_SPEC(64), NULL); | |
mem_add(ct, MEM_SPEC(64), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(63), NULL); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64), NULL); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(2), NULL); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(3), NULL); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(63), NULL); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(1), NULL); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(1, ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2, 1), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(ct->memsize / 2), NULL); | |
mem_add(ct, MEM_SPEC(1), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(64, 64), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(63, 63), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(1, 63), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(63, 1), NULL); | |
test_hash(ct); | |
mem_add(ct, MEM_SPEC(1, 1), NULL); | |
test_hash(ct); | |
} | |
static ssize_t cipher_store(struct device *dev, struct device_attribute *attr, | |
const char *buf, size_t len) | |
{ | |
struct platform_device *pdev = to_platform_device(dev); | |
struct crypto_test *ct = platform_get_drvdata(pdev); | |
crypto_test_cipher(ct); | |
return len; | |
} | |
static DEVICE_ATTR_WO(cipher); | |
static ssize_t hash_store(struct device *dev, struct device_attribute *attr, | |
const char *buf, size_t len) | |
{ | |
struct platform_device *pdev = to_platform_device(dev); | |
struct crypto_test *ct = platform_get_drvdata(pdev); | |
crypto_test_hash(ct); | |
return len; | |
} | |
static DEVICE_ATTR_WO(hash); | |
static int crypto_test_probe(struct platform_device *pdev) | |
{ | |
struct reserved_mem *rmem; | |
struct crypto_test *ct; | |
int ret; | |
ct = devm_kzalloc(&pdev->dev, sizeof(struct crypto_test), GFP_KERNEL); | |
if (!ct) | |
return -ENOMEM; | |
platform_set_drvdata(pdev, ct); | |
ret = device_create_file(&pdev->dev, &dev_attr_cipher); | |
if (ret) | |
return ret; | |
ret = device_create_file(&pdev->dev, &dev_attr_hash); | |
if (ret) | |
goto fail_hash_file; | |
ret = of_reserved_mem_device_init(&pdev->dev); | |
if (ret) | |
goto fail_reserved_mem; | |
rmem = pdev->dev.platform_data; | |
ct->memsize = rounddown(rmem->size / 3, PAGE_SIZE); | |
ct->ref_mem = phys_to_virt(rmem->base); | |
ct->src_mem = phys_to_virt(rmem->base + ct->memsize); | |
ct->dst_mem = phys_to_virt(rmem->base + 2 * ct->memsize); | |
get_random_bytes(ct->ref_mem, ct->memsize); | |
get_random_bytes(ct->iv, sizeof(ct->iv)); | |
get_random_bytes(ct->key, sizeof(ct->key)); | |
#if 0 | |
memset(ct->ref_mem, 0, ct->memsize); | |
memset(ct->iv, 'a', sizeof(ct->iv)); | |
memset(ct->key, 'b', sizeof(ct->key)); | |
#endif | |
INIT_LIST_HEAD(&ct->mem_head); | |
mem_cleanup(ct); | |
dev_info(&pdev->dev, "ref_mem = %#lx %#lx", (unsigned long)ct->ref_mem, | |
(unsigned long)virt_to_phys(ct->ref_mem)); | |
dev_info(&pdev->dev, "src_mem = %#lx %#lx", (unsigned long)ct->src_mem, | |
(unsigned long)virt_to_phys(ct->src_mem)); | |
dev_info(&pdev->dev, "dst_mem = %#lx %#lx", (unsigned long)ct->dst_mem, | |
(unsigned long)virt_to_phys(ct->dst_mem)); | |
return 0; | |
fail_reserved_mem: | |
device_remove_file(&pdev->dev, &dev_attr_hash); | |
fail_hash_file: | |
device_remove_file(&pdev->dev, &dev_attr_cipher); | |
return ret; | |
} | |
static int crypto_test_remove(struct platform_device *pdev) | |
{ | |
of_reserved_mem_device_release(&pdev->dev); | |
device_remove_file(&pdev->dev, &dev_attr_hash); | |
device_remove_file(&pdev->dev, &dev_attr_cipher); | |
return 0; | |
} | |
static const struct of_device_id crypto_test_of_match_table[] = { | |
{ | |
.compatible = "amlogic,crypto-test", | |
}, | |
{} | |
}; | |
MODULE_DEVICE_TABLE(of, crypto_test_of_match_table); | |
static struct platform_driver crypto_test_driver = { | |
.probe = crypto_test_probe, | |
.remove = crypto_test_remove, | |
.driver = { | |
.name = "amlogic-crypto-test", | |
.of_match_table = crypto_test_of_match_table, | |
}, | |
}; | |
module_platform_driver(crypto_test_driver); | |
static int crypto_test_rmem_device_init(struct reserved_mem *rmem, | |
struct device *dev) | |
{ | |
dev->platform_data = rmem; | |
return 0; | |
} | |
static void crypto_test_rmem_device_release(struct reserved_mem *rmem, | |
struct device *dev) | |
{ | |
dev->platform_data = NULL; | |
} | |
static const struct reserved_mem_ops crypto_test_rmem_ops = { | |
.device_init = crypto_test_rmem_device_init, | |
.device_release = crypto_test_rmem_device_release, | |
}; | |
static int __init crypto_test_rmem_setup(struct reserved_mem *rmem) | |
{ | |
rmem->ops = &crypto_test_rmem_ops; | |
return 0; | |
} | |
RESERVEDMEM_OF_DECLARE(dma, "amlogic,crypto-test-rmem", crypto_test_rmem_setup); | |
MODULE_LICENSE("GPL v2"); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment