From 7dfa5e9e6e05fb2b8bb346d92f2a22229ef495c8 Mon Sep 17 00:00:00 2001 From: stoffu Date: Mon, 5 Mar 2018 18:24:11 +0900 Subject: [PATCH] chacha: call prehashed version explicitly as generate_chacha_key_prehashed hash: add prehashed version cn_slow_hash_prehashed slow-hash: let cn_slow_hash take 4th parameter for deciding prehashed or not slow-hash: add support for prehashed version for the other 3 platforms --- src/crypto/chacha.h | 11 +++++++++-- src/crypto/hash-ops.h | 3 +-- src/crypto/hash.h | 6 +++++- src/crypto/slow-hash.c | 30 +++++++++++++++++++----------- src/device/device_ledger.cpp | 4 ++-- tests/hash/main.cpp | 4 ++-- 6 files changed, 38 insertions(+), 20 deletions(-) diff --git a/src/crypto/chacha.h b/src/crypto/chacha.h index 22da53bd0..7a120931a 100644 --- a/src/crypto/chacha.h +++ b/src/crypto/chacha.h @@ -69,10 +69,17 @@ namespace crypto { chacha20(data, length, key.data(), reinterpret_cast(&iv), cipher); } - inline void generate_chacha_key(const void *data, size_t size, chacha_key& key, int cn_variant = 0, bool prehashed=false) { + inline void generate_chacha_key(const void *data, size_t size, chacha_key& key) { static_assert(sizeof(chacha_key) <= sizeof(hash), "Size of hash must be at least that of chacha_key"); tools::scrubbed_arr pwd_hash; - crypto::cn_slow_hash_pre(data, size, pwd_hash.data(), cn_variant, prehashed); + crypto::cn_slow_hash(data, size, pwd_hash.data(), 0/*variant*/, 0/*prehashed*/); + memcpy(&key, pwd_hash.data(), sizeof(key)); + } + + inline void generate_chacha_key_prehashed(const void *data, size_t size, chacha_key& key) { + static_assert(sizeof(chacha_key) <= sizeof(hash), "Size of hash must be at least that of chacha_key"); + tools::scrubbed_arr pwd_hash; + crypto::cn_slow_hash(data, size, pwd_hash.data(), 0/*variant*/, 1/*prehashed*/); memcpy(&key, pwd_hash.data(), sizeof(key)); } diff --git a/src/crypto/hash-ops.h b/src/crypto/hash-ops.h index 934d464de..d77d55cf3 100644 --- a/src/crypto/hash-ops.h +++ b/src/crypto/hash-ops.h @@ -79,8 +79,7 @@ enum { }; void cn_fast_hash(const void *data, size_t length, char *hash); -void cn_slow_hash(const void *data, size_t length, char *hash, int variant); -void cn_slow_hash_pre(const void *data, size_t length, char *hash, int variant, bool pre); +void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed); void hash_extra_blake(const void *data, size_t length, char *hash); void hash_extra_groestl(const void *data, size_t length, char *hash); diff --git a/src/crypto/hash.h b/src/crypto/hash.h index bf4f4c096..995e2294e 100644 --- a/src/crypto/hash.h +++ b/src/crypto/hash.h @@ -72,7 +72,11 @@ namespace crypto { } inline void cn_slow_hash(const void *data, std::size_t length, hash &hash, int variant = 0) { - cn_slow_hash(data, length, reinterpret_cast(&hash), variant); + cn_slow_hash(data, length, reinterpret_cast(&hash), variant, 0/*prehashed*/); + } + + inline void cn_slow_hash_prehashed(const void *data, std::size_t length, hash &hash, int variant = 0) { + cn_slow_hash(data, length, reinterpret_cast(&hash), variant, 1/*prehashed*/); } inline void tree_hash(const hash *hashes, std::size_t count, hash &root_hash) { diff --git a/src/crypto/slow-hash.c b/src/crypto/slow-hash.c index 8c7dad8e0..d7dcbd274 100644 --- a/src/crypto/slow-hash.c +++ b/src/crypto/slow-hash.c @@ -564,11 +564,7 @@ void slow_hash_free_state(void) * @param length the length in bytes of the data * @param hash a pointer to a buffer in which the final 256 bit hash will be stored */ -void cn_slow_hash(const void *data, size_t length, char *hash, int variant) { - cn_slow_hash_pre(data,length,hash,variant,false); -} - -void cn_slow_hash_pre(const void *data, size_t length, char *hash, int variant, bool prehashed) +void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed) { RDATA_ALIGN16 uint8_t expandedKey[240]; /* These buffers are aligned to use later with SSE functions */ @@ -909,7 +905,7 @@ STATIC INLINE void aes_pseudo_round_xor(const uint8_t *in, uint8_t *out, const u } } -void cn_slow_hash(const void *data, size_t length, char *hash, int variant) +void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed) { RDATA_ALIGN16 uint8_t expandedKey[240]; RDATA_ALIGN16 uint8_t hp_state[MEMORY]; @@ -932,7 +928,11 @@ void cn_slow_hash(const void *data, size_t length, char *hash, int variant) /* CryptoNight Step 1: Use Keccak1600 to initialize the 'state' (and 'text') buffers from the data. */ - hash_process(&state.hs, data, length); + if (prehashed) { + memcpy(&state.hs, data, length); + } else { + hash_process(&state.hs, data, length); + } memcpy(text, state.init, INIT_SIZE_BYTE); VARIANT1_INIT64(); @@ -1105,7 +1105,7 @@ STATIC INLINE void xor_blocks(uint8_t* a, const uint8_t* b) U64(a)[1] ^= U64(b)[1]; } -void cn_slow_hash(const void *data, size_t length, char *hash, int variant) +void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed) { uint8_t text[INIT_SIZE_BYTE]; uint8_t a[AES_BLOCK_SIZE]; @@ -1131,7 +1131,11 @@ void cn_slow_hash(const void *data, size_t length, char *hash, int variant) long_state = (uint8_t *)malloc(MEMORY); #endif - hash_process(&state.hs, data, length); + if (prehashed) { + memcpy(&state.hs, data, length); + } else { + hash_process(&state.hs, data, length); + } memcpy(text, state.init, INIT_SIZE_BYTE); VARIANT1_INIT64(); @@ -1289,7 +1293,7 @@ union cn_slow_hash_state { }; #pragma pack(pop) -void cn_slow_hash(const void *data, size_t length, char *hash, int variant) { +void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed) { uint8_t long_state[MEMORY]; union cn_slow_hash_state state; uint8_t text[INIT_SIZE_BYTE]; @@ -1301,7 +1305,11 @@ void cn_slow_hash(const void *data, size_t length, char *hash, int variant) { uint8_t aes_key[AES_KEY_SIZE]; oaes_ctx *aes_ctx; - hash_process(&state.hs, data, length); + if (prehashed) { + memcpy(&state.hs, data, length); + } else { + hash_process(&state.hs, data, length); + } memcpy(text, state.init, INIT_SIZE_BYTE); memcpy(aes_key, state.hs.b, AES_KEY_SIZE); aes_ctx = (oaes_ctx *) oaes_alloc(); diff --git a/src/device/device_ledger.cpp b/src/device/device_ledger.cpp index ccbc45b45..b3c0035a1 100644 --- a/src/device/device_ledger.cpp +++ b/src/device/device_ledger.cpp @@ -513,10 +513,10 @@ namespace hw { char prekey[200]; memmove(prekey, &this->buffer_recv[0], 200); - crypto::generate_chacha_key(&prekey[0], sizeof(prekey), key, 0, true); + crypto::generate_chacha_key_prehashed(&prekey[0], sizeof(prekey), key); #ifdef DEBUG_HWDEVICE - hw::ledger::check32("generate_chacha_key", "key", (char*)key_x.data(), (char*)key.data()); + hw::ledger::check32("generate_chacha_key_prehashed", "key", (char*)key_x.data(), (char*)key.data()); #endif unlock_device(); diff --git a/tests/hash/main.cpp b/tests/hash/main.cpp index 5a16284df..c7e1fe712 100644 --- a/tests/hash/main.cpp +++ b/tests/hash/main.cpp @@ -52,10 +52,10 @@ extern "C" { tree_hash((const char (*)[32]) data, length >> 5, hash); } static void cn_slow_hash_0(const void *data, size_t length, char *hash) { - return cn_slow_hash(data, length, hash, 0); + return cn_slow_hash(data, length, hash, 0/*variant*/, 0/*prehashed*/); } static void cn_slow_hash_1(const void *data, size_t length, char *hash) { - return cn_slow_hash(data, length, hash, 1); + return cn_slow_hash(data, length, hash, 1/*variant*/, 0/*prehashed*/); } } POP_WARNINGS