commit 70d86f8202e8ed1ee16fddef234dc3cebf19e2e5
parent cd8aad2aa77d8bcdc5f158c746958c85b2477932
Author: nolash <dev@holbrook.no>
Date: Tue, 14 Sep 2021 16:02:06 +0200
Add file sum
Diffstat:
3 files changed, 58 insertions(+), 14 deletions(-)
diff --git a/src/file.c b/src/file.c
@@ -10,9 +10,46 @@ void filehash_reset(filehash_t *fctx) {
fctx->writes[i] = 0;
}
fctx->length = 0;
- fctx->result_ptr = NULL;
}
+int filehash_sum(filehash_t *fctx) {
+ bmt_t *bctx;
+ int r;
+ int l;
+ int i;
+ int blocks;
+ long long blocks_span_length;
+ int remain_length;
+ char *target;
+
+ bctx = &fctx->bmt_context;
+
+ remain_length = fctx->length % SWARM_BLOCK_SIZE;
+
+ target = fctx->buf + _SWARM_WORD_SIZE;
+
+ for (i = 0; i < SWARM_LEVELS; i++) {
+ if (fctx->ptr[i] == target) {
+ return fctx->length;
+ }
+ l = fctx->ptr[i] - fctx->ptr[i+1];
+ blocks = (l / _SWARM_WORD_SIZE);
+ blocks_span_length = blocks * fctx->spans[i];
+ blocks_span_length -= SWARM_BLOCK_SIZE - (SWARM_BLOCK_SIZE - remain_length);
+
+ bmt_init(bctx, fctx->ptr[i+1], l, (long long)(blocks_span_length));
+ r = bmt_sum(bctx);
+ if (r != 0) {
+ return -1;
+ }
+ memcpy(fctx->ptr[i+1], bctx->buf, _SWARM_WORD_SIZE);
+ fctx->ptr[i+1] += _SWARM_WORD_SIZE;
+ }
+
+ return -1;
+}
+
+
void filehash_init(filehash_t *fctx) {
int i;
int l;
diff --git a/src/file.h b/src/file.h
@@ -12,7 +12,6 @@ typedef struct filehash {
long long writes[SWARM_LEVELS];
long long spans[SWARM_LEVELS];
long long length;
- char *result_ptr;
bmt_t bmt_context;
} filehash_t;
diff --git a/test/check_file.c b/test/check_file.c
@@ -66,30 +66,35 @@ END_TEST
START_TEST(check_file_vectors) {
filehash_t fh;
- char v_chk[_SWARM_WORD_SIZE];
int r;
int i;
+ int j;
+ int l;
int whole;
int part;
int writes;
- int l;
+ char v_chk[_SWARM_WORD_SIZE];
struct block_generator bg;
char buf[SWARM_BLOCK_SIZE];
-
- char *vectors[1] = {
- "c10090961e7682a10890c334d759a28426647141213abda93b096b892824d2ef",
- };
-
int lengths[] = {
SWARM_BLOCK_SIZE,
+ SWARM_BLOCK_SIZE * 2,
+ SWARM_BLOCK_SIZE * _SWARM_BATCH_SIZE,
+ };
+
+ char *vectors[] = {
+ "c10090961e7682a10890c334d759a28426647141213abda93b096b892824d2ef",
+ "29a5fb121ce96194ba8b7b823a1f9c6af87e1791f824940a53b5a7efe3f790d9",
+ "3047d841077898c26bbe6be652a2ec590a5d9bd7cd45d290ea42511b48753c09",
};
filehash_init(&fh);
- for (i = 0; i < 1; i++) {
+ bg.m = 255;
+
+ for (i = 0; i < sizeof(vectors)/sizeof(vectors[0]); i++) {
bg.v = 0;
- bg.m = 255;
filehash_reset(&fh);
@@ -97,8 +102,8 @@ START_TEST(check_file_vectors) {
part = lengths[i] % SWARM_BLOCK_SIZE;
writes = whole + (int)((part - 1)/SWARM_BLOCK_SIZE);
- for (int i = 0; i < writes; i++) {
- if (i < whole) {
+ for (int j = 0; j < writes; j++) {
+ if (j < whole) {
l = SWARM_BLOCK_SIZE;
} else {
l = part;
@@ -109,8 +114,11 @@ START_TEST(check_file_vectors) {
ck_assert_int_eq(r, SWARM_BLOCK_SIZE);
}
+ r = filehash_sum(&fh);
+ ck_assert_int_eq(r, lengths[i]);
+
r = hex2bin(vectors[i], v_chk);
- ck_assert_mem_eq(v_chk, fh.buf, _SWARM_WORD_SIZE);
+ ck_assert_mem_eq(fh.buf, v_chk, _SWARM_WORD_SIZE);
}
}
END_TEST