libswarm-ng

C implementation of BMT hasher, Swarmhash and Single Owner Chunk for swarm
git clone git://git.defalsify.org/libswarm-ng.git
Log | Files | Refs | Submodules | README

commit cd8aad2aa77d8bcdc5f158c746958c85b2477932
parent c749ff1ece8690bfe11b12c70db821ee7d065d4e
Author: nolash <dev@holbrook.no>
Date:   Tue, 14 Sep 2021 15:08:03 +0200

Add common test vector generator, file and bmt vector tests

Diffstat:
MMakefile.dev | 12++++++++----
Msrc/bmt.c | 4++--
Msrc/bmt.h | 4+++-
Msrc/file.c | 8++++++--
Msrc/file.h | 1+
Asrc/hex.c | 68++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Asrc/hex.h | 8++++++++
Mtest/check_bmt.c | 60++++++++++++++++++++++++++++++++++++++++++++++++++++++++++--
Mtest/check_file.c | 83+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Atest/common.c | 17+++++++++++++++++
Atest/common.h | 9+++++++++
11 files changed, 263 insertions(+), 11 deletions(-)

diff --git a/Makefile.dev b/Makefile.dev @@ -1,5 +1,6 @@ KECCAK_DIR = ./aux/keccak-tiny CFLAGS += -I./src -I$(KECCAK_DIR) -L./build/lib/ -L./build/ +CFLAGS_CHECK = $(CFLAGS) -L./build/test prefix = /usr/local includedir = $(prefix)/include libdir = $(prefix)/lib @@ -20,8 +21,11 @@ build: prep build_keccak $(CC) -c -o build/file.o $(CFLAGS) src/file.c build_check: build - $(CC) -I./src -o build/test/check_bmt build/bmt.o build/endian.o build/file.o test/check_bmt.c $(CFLAGS) -lcheck -lkeccak-tiny - $(CC) -I./src -o build/test/check_file build/bmt.o build/endian.o build/file.o test/check_file.c $(CFLAGS) -lcheck -lkeccak-tiny + $(CC) -c -o build/hex.o $(CFLAGS) src/hex.c + $(CC) -c -o build/common.o $(CFLAGS) test/common.c + ar -rvs build/test/libtestcommon.a build/common.o build/hex.o + $(CC) -I./src -o build/test/check_bmt build/bmt.o build/endian.o build/file.o test/check_bmt.c $(CFLAGS_CHECK) -lcheck -lkeccak-tiny -ltestcommon + $(CC) -I./src -o build/test/check_file build/bmt.o build/endian.o build/file.o test/check_file.c $(CFLAGS_CHECK) -lcheck -lkeccak-tiny -ltestcommon #$(CC) -I./src -o test/check_bmt build/bmt.o build/endian.o test/check_bmt.c build/libkeccak-tiny-small.a $(CFLAGS) -lcheck build_lib: build @@ -31,8 +35,8 @@ build_lib: build .PHONY: test clean check: build build_check - LD_LIBRARY_PATH=./build/ CK_FORK=no build/test/check_bmt - LD_LIBRARY_PATH=./build/ CK_FORK=no build/test/check_file + LD_LIBRARY_PATH=./build/:./build/test CK_FORK=no build/test/check_bmt + LD_LIBRARY_PATH=./build/:./build/test CK_FORK=no build/test/check_file test: check diff --git a/src/bmt.c b/src/bmt.c @@ -43,14 +43,14 @@ static int bmt_rollup(bmt_t *bmt_content) { return 0; } -void bmt_init(bmt_t *bmt_content, const char *input, const size_t input_length, const long long data_length) { +void bmt_init(bmt_t *bmt_content, const char *input, const size_t input_length, const bmt_spansize_t data_length) { bmt_content->w_ptr = (char*)bmt_content->buf+_SWARM_DATA_LENGTH_TYPESIZE; bmt_content->r_ptr = bmt_content->w_ptr; bmt_content->target = bmt_content->w_ptr + SWARM_BLOCK_SIZE; memset(bmt_content->buf, 0, _SWARM_DATA_LENGTH_TYPESIZE + SWARM_BLOCK_SIZE); - memcpy((char*)bmt_content->buf, &data_length, sizeof(long long)); + memcpy((char*)bmt_content->buf, &data_length, sizeof(bmt_spansize_t)); to_endian(CONVERT_LITTLEENDIAN, _SWARM_DATA_LENGTH_TYPESIZE, (unsigned char*)bmt_content->buf); memcpy(bmt_content->w_ptr, input, input_length); diff --git a/src/bmt.h b/src/bmt.h @@ -12,7 +12,9 @@ typedef struct bmt { char *target; } bmt_t; -void bmt_init(bmt_t *bmt_context, const char *input, const size_t input_length, const long long data_length); +typedef long long bmt_spansize_t; + +void bmt_init(bmt_t *bmt_context, const char *input, const size_t input_length, const bmt_spansize_t data_length); int bmt_sum(bmt_t *bmt_context); #endif // _LIBSWARM_BMT diff --git a/src/file.c b/src/file.c @@ -10,6 +10,7 @@ void filehash_reset(filehash_t *fctx) { fctx->writes[i] = 0; } fctx->length = 0; + fctx->result_ptr = NULL; } void filehash_init(filehash_t *fctx) { @@ -31,7 +32,6 @@ static int filehash_write_hash(filehash_t *fctx, int level, const char *data) { int next_level; int r; - fctx->writes[level] += 1; memcpy(fctx->ptr[level], data, _SWARM_WORD_SIZE); if (fctx->writes[level] == _SWARM_BATCH_SIZE) { @@ -42,7 +42,11 @@ static int filehash_write_hash(filehash_t *fctx, int level, const char *data) { if (r != 0) { return -1; } - filehash_write_hash(fctx, level + 1, bctx->buf); + r = filehash_write_hash(fctx, level + 1, bctx->buf); + if (r != 0) { + return -1; + } + fctx->ptr[level] = fctx->ptr[next_level]; } else { fctx->ptr[level] += _SWARM_WORD_SIZE; } diff --git a/src/file.h b/src/file.h @@ -12,6 +12,7 @@ typedef struct filehash { long long writes[SWARM_LEVELS]; long long spans[SWARM_LEVELS]; long long length; + char *result_ptr; bmt_t bmt_context; } filehash_t; diff --git a/src/hex.c b/src/hex.c @@ -0,0 +1,68 @@ +#include <string.h> +#include <stdio.h> +#include <stdlib.h> + +#include "hex.h" + +/** +* \todo improve +*/ +int toHex(const unsigned char *data, size_t l, unsigned char *zHex, size_t *z) { + int i; + + if (*z < (l*2)+1) { + return 1; + } + + for (i = 0; i < l; i++) { + sprintf(zHex+(i*2), "%02x", *(data+i)); + } + *z = (i*2); + *(zHex+(*z)) = 0x0; + *z++; + return 0; +} + + +// cheekily stolen from https://nachtimwald.com/2017/09/24/hex-encode-and-decode-in-c/ +int hexchr2bin(const char hex, char *out) { + if (out == NULL) + return 0; + + if (hex >= '0' && hex <= '9') { + *out = hex - '0'; + } else if (hex >= 'A' && hex <= 'F') { + *out = hex - 'A' + 10; + } else if (hex >= 'a' && hex <= 'f') { + *out = hex - 'a' + 10; + } else { + return 0; + } + + return 1; +} + +size_t hex2bin(const char *hex, unsigned char *out) { + size_t len; + char b1; + char b2; + size_t i; + + if (hex == NULL || *hex == '\0' || out == NULL) + return 0; + + len = strlen(hex); + if (len % 2 != 0) + return 0; + len /= 2; + + memset(out, 'A', len); + for (i=0; i<len; i++) { + if (!hexchr2bin(hex[i*2], &b1) || !hexchr2bin(hex[i*2+1], &b2)) { + return 0; + } + //(*out)[i] = (b1 << 4) | b2; + *(out+i) = (b1 << 4) | b2; + } + return len; +} diff --git a/src/hex.h b/src/hex.h @@ -0,0 +1,8 @@ +#ifndef LASH_HEX_H_ +#define LASH_HEX_H_ + +int toHex(const unsigned char *data, size_t l, unsigned char *zHex, size_t *z); +int hexchr2bin(const char hex, char *out); +size_t hex2bin(const char *hex, unsigned char *out); + +#endif // LASH_HEX_H_ diff --git a/test/check_bmt.c b/test/check_bmt.c @@ -2,6 +2,8 @@ #include <stdlib.h> #include "bmt.h" +#include "hex.h" +#include "common.h" START_TEST(check_bmt_init) { @@ -18,16 +20,69 @@ START_TEST(check_bmt_init) { END_TEST -START_TEST(check_bmt_sum) { +START_TEST(check_bmt_sum_foo) { bmt_t bmt_context; char *input = "foo"; + char *hash_of_foo = "2387e8e7d8a48c2a9339c97c1dc3461a9a7aa07e994c5cb8b38fd7c1b3e6ea48"; + char v_chk[_SWARM_WORD_SIZE]; char input_length = 3; int r; bmt_init(&bmt_context, input, input_length, 3); r = bmt_sum(&bmt_context); ck_assert_int_eq(r, 0); + + hex2bin(hash_of_foo, v_chk); + ck_assert_mem_eq(bmt_context.buf, v_chk, _SWARM_WORD_SIZE); } +END_TEST + +START_TEST(check_bmt_sum_vector) { + bmt_t bmt_context; + int r; + int i; + + int lengths[] = { + _SWARM_WORD_SIZE - 1, + _SWARM_WORD_SIZE, + _SWARM_WORD_SIZE + 1, + _SWARM_WORD_SIZE * 2 - 1, + _SWARM_WORD_SIZE * 2, + _SWARM_WORD_SIZE * 2 + 1, + //SWARM_BLOCK_SIZE - 1, + SWARM_BLOCK_SIZE, + }; + char *vectors[] = { + "ece86edb20669cc60d142789d464d57bdf5e33cb789d443f608cbd81cfa5697d", + "0be77f0bb7abc9cd0abed640ee29849a3072ccfd1020019fe03658c38f087e02", + "3463b46d4f9d5bfcbf9a23224d635e51896c1daef7d225b86679db17c5fd868e", + "95510c2ff18276ed94be2160aed4e69c9116573b6f69faaeed1b426fea6a3db8", + "490072cc55b8ad381335ff882ac51303cc069cbcb8d8d3f7aa152d9c617829fe", + "541552bae05e9a63a6cb561f69edf36ffe073e441667dbf7a0e9a3864bb744ea", + //"", + "c10090961e7682a10890c334d759a28426647141213abda93b096b892824d2ef", + }; + char v_chk[_SWARM_WORD_SIZE]; + char buf[SWARM_BLOCK_SIZE]; + struct block_generator bg; + + bg.m = 255; + + for (i = 0; i < sizeof(vectors)/sizeof(vectors[0]); i++) { + bg.v = 0; + + r = block_generate(&bg, buf, lengths[i]); + ck_assert_int_eq(r, lengths[i]); + + bmt_init(&bmt_context, buf, lengths[i], lengths[i]); + r = bmt_sum(&bmt_context); + ck_assert_int_eq(r, 0); + + hex2bin(vectors[i], v_chk); + ck_assert_mem_eq(bmt_context.buf, v_chk, _SWARM_WORD_SIZE); + } +} +END_TEST Suite * common_suite(void) { Suite *s; @@ -36,7 +91,8 @@ Suite * common_suite(void) { s = suite_create("bmt"); tc = tcase_create("core"); tcase_add_test(tc, check_bmt_init); - tcase_add_test(tc, check_bmt_sum); + tcase_add_test(tc, check_bmt_sum_foo); + tcase_add_test(tc, check_bmt_sum_vector); suite_add_tcase(s, tc); return s; diff --git a/test/check_file.c b/test/check_file.c @@ -2,8 +2,10 @@ #include <stdlib.h> #include <string.h> +#include "hex.h" #include "file.h" +#include "common.h" START_TEST(check_file_init) { ck_assert_int_eq(_SWARM_BATCH_SIZE, 128); @@ -34,6 +36,85 @@ START_TEST(check_file_single_write) { END_TEST +START_TEST(check_file_write_batch) { + filehash_t fh; + int r; + int i; + int j; + int v; + struct block_generator bg; + + bg.v = 0; + bg.m = 255; + + char buf[SWARM_BLOCK_SIZE]; + + filehash_init(&fh); + + for (i = 0; i < _SWARM_BATCH_SIZE; i++) { + r = block_generate(&bg, buf, SWARM_BLOCK_SIZE); + //ck_assert_int_eq(r, SWARM_BLOCK_SIZE); + r = filehash_write(&fh, buf, SWARM_BLOCK_SIZE); + //ck_assert_int_eq(r, SWARM_BLOCK_SIZE); + } + ck_assert_int_eq(fh.writes[0], _SWARM_BATCH_SIZE); + ck_assert_int_eq(fh.length, _SWARM_BATCH_SIZE * SWARM_BLOCK_SIZE); + ck_assert_ptr_eq(fh.ptr[0], fh.ptr[1]); + ck_assert_ptr_eq(fh.ptr[1], fh.buf + _SWARM_WORD_SIZE); +} +END_TEST + +START_TEST(check_file_vectors) { + filehash_t fh; + char v_chk[_SWARM_WORD_SIZE]; + int r; + int i; + int whole; + int part; + int writes; + int l; + struct block_generator bg; + char buf[SWARM_BLOCK_SIZE]; + + + char *vectors[1] = { + "c10090961e7682a10890c334d759a28426647141213abda93b096b892824d2ef", + }; + + int lengths[] = { + SWARM_BLOCK_SIZE, + }; + + filehash_init(&fh); + + for (i = 0; i < 1; i++) { + bg.v = 0; + bg.m = 255; + + filehash_reset(&fh); + + whole = lengths[i] / SWARM_BLOCK_SIZE; + part = lengths[i] % SWARM_BLOCK_SIZE; + writes = whole + (int)((part - 1)/SWARM_BLOCK_SIZE); + + for (int i = 0; i < writes; i++) { + if (i < whole) { + l = SWARM_BLOCK_SIZE; + } else { + l = part; + } + r = block_generate(&bg, buf, l); + ck_assert_int_eq(r, SWARM_BLOCK_SIZE); + r = filehash_write(&fh, buf, l); + ck_assert_int_eq(r, SWARM_BLOCK_SIZE); + } + + r = hex2bin(vectors[i], v_chk); + ck_assert_mem_eq(v_chk, fh.buf, _SWARM_WORD_SIZE); + } +} +END_TEST + Suite * common_suite(void) { Suite *s; TCase *tc; @@ -42,6 +123,8 @@ Suite * common_suite(void) { tc = tcase_create("core"); tcase_add_test(tc, check_file_init); tcase_add_test(tc, check_file_single_write); + tcase_add_test(tc, check_file_write_batch); + tcase_add_test(tc, check_file_vectors); suite_add_tcase(s, tc); return s; diff --git a/test/common.c b/test/common.c @@ -0,0 +1,17 @@ +#include <stddef.h> + +#include "common.h" + +int block_generate(struct block_generator *bg, char *buf, size_t l) { + int i; + + for (i = 0; i < l; i++) { + *(buf+i) = bg->v; + bg->v++; + bg->v %= bg->m; + } + + return i; +} + + diff --git a/test/common.h b/test/common.h @@ -0,0 +1,9 @@ +#include <stddef.h> + +struct block_generator { + int v; + int m; +}; + +int block_generate(struct block_generator *bg, char *buf, size_t l); +