mirror of
https://codeberg.org/anoncontributorxmr/monero.git
synced 2024-11-25 17:02:26 +00:00
chacha: call prehashed version explicitly as generate_chacha_key_prehashed
hash: add prehashed version cn_slow_hash_prehashed slow-hash: let cn_slow_hash take 4th parameter for deciding prehashed or not slow-hash: add support for prehashed version for the other 3 platforms
This commit is contained in:
parent
b2d23b189e
commit
7dfa5e9e6e
6 changed files with 38 additions and 20 deletions
|
@ -69,10 +69,17 @@ namespace crypto {
|
|||
chacha20(data, length, key.data(), reinterpret_cast<const uint8_t*>(&iv), cipher);
|
||||
}
|
||||
|
||||
inline void generate_chacha_key(const void *data, size_t size, chacha_key& key, int cn_variant = 0, bool prehashed=false) {
|
||||
inline void generate_chacha_key(const void *data, size_t size, chacha_key& key) {
|
||||
static_assert(sizeof(chacha_key) <= sizeof(hash), "Size of hash must be at least that of chacha_key");
|
||||
tools::scrubbed_arr<char, HASH_SIZE> pwd_hash;
|
||||
crypto::cn_slow_hash_pre(data, size, pwd_hash.data(), cn_variant, prehashed);
|
||||
crypto::cn_slow_hash(data, size, pwd_hash.data(), 0/*variant*/, 0/*prehashed*/);
|
||||
memcpy(&key, pwd_hash.data(), sizeof(key));
|
||||
}
|
||||
|
||||
inline void generate_chacha_key_prehashed(const void *data, size_t size, chacha_key& key) {
|
||||
static_assert(sizeof(chacha_key) <= sizeof(hash), "Size of hash must be at least that of chacha_key");
|
||||
tools::scrubbed_arr<char, HASH_SIZE> pwd_hash;
|
||||
crypto::cn_slow_hash(data, size, pwd_hash.data(), 0/*variant*/, 1/*prehashed*/);
|
||||
memcpy(&key, pwd_hash.data(), sizeof(key));
|
||||
}
|
||||
|
||||
|
|
|
@ -79,8 +79,7 @@ enum {
|
|||
};
|
||||
|
||||
void cn_fast_hash(const void *data, size_t length, char *hash);
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant);
|
||||
void cn_slow_hash_pre(const void *data, size_t length, char *hash, int variant, bool pre);
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed);
|
||||
|
||||
void hash_extra_blake(const void *data, size_t length, char *hash);
|
||||
void hash_extra_groestl(const void *data, size_t length, char *hash);
|
||||
|
|
|
@ -72,7 +72,11 @@ namespace crypto {
|
|||
}
|
||||
|
||||
inline void cn_slow_hash(const void *data, std::size_t length, hash &hash, int variant = 0) {
|
||||
cn_slow_hash(data, length, reinterpret_cast<char *>(&hash), variant);
|
||||
cn_slow_hash(data, length, reinterpret_cast<char *>(&hash), variant, 0/*prehashed*/);
|
||||
}
|
||||
|
||||
inline void cn_slow_hash_prehashed(const void *data, std::size_t length, hash &hash, int variant = 0) {
|
||||
cn_slow_hash(data, length, reinterpret_cast<char *>(&hash), variant, 1/*prehashed*/);
|
||||
}
|
||||
|
||||
inline void tree_hash(const hash *hashes, std::size_t count, hash &root_hash) {
|
||||
|
|
|
@ -564,11 +564,7 @@ void slow_hash_free_state(void)
|
|||
* @param length the length in bytes of the data
|
||||
* @param hash a pointer to a buffer in which the final 256 bit hash will be stored
|
||||
*/
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant) {
|
||||
cn_slow_hash_pre(data,length,hash,variant,false);
|
||||
}
|
||||
|
||||
void cn_slow_hash_pre(const void *data, size_t length, char *hash, int variant, bool prehashed)
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed)
|
||||
{
|
||||
RDATA_ALIGN16 uint8_t expandedKey[240]; /* These buffers are aligned to use later with SSE functions */
|
||||
|
||||
|
@ -909,7 +905,7 @@ STATIC INLINE void aes_pseudo_round_xor(const uint8_t *in, uint8_t *out, const u
|
|||
}
|
||||
}
|
||||
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant)
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed)
|
||||
{
|
||||
RDATA_ALIGN16 uint8_t expandedKey[240];
|
||||
RDATA_ALIGN16 uint8_t hp_state[MEMORY];
|
||||
|
@ -932,7 +928,11 @@ void cn_slow_hash(const void *data, size_t length, char *hash, int variant)
|
|||
|
||||
/* CryptoNight Step 1: Use Keccak1600 to initialize the 'state' (and 'text') buffers from the data. */
|
||||
|
||||
hash_process(&state.hs, data, length);
|
||||
if (prehashed) {
|
||||
memcpy(&state.hs, data, length);
|
||||
} else {
|
||||
hash_process(&state.hs, data, length);
|
||||
}
|
||||
memcpy(text, state.init, INIT_SIZE_BYTE);
|
||||
|
||||
VARIANT1_INIT64();
|
||||
|
@ -1105,7 +1105,7 @@ STATIC INLINE void xor_blocks(uint8_t* a, const uint8_t* b)
|
|||
U64(a)[1] ^= U64(b)[1];
|
||||
}
|
||||
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant)
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed)
|
||||
{
|
||||
uint8_t text[INIT_SIZE_BYTE];
|
||||
uint8_t a[AES_BLOCK_SIZE];
|
||||
|
@ -1131,7 +1131,11 @@ void cn_slow_hash(const void *data, size_t length, char *hash, int variant)
|
|||
long_state = (uint8_t *)malloc(MEMORY);
|
||||
#endif
|
||||
|
||||
hash_process(&state.hs, data, length);
|
||||
if (prehashed) {
|
||||
memcpy(&state.hs, data, length);
|
||||
} else {
|
||||
hash_process(&state.hs, data, length);
|
||||
}
|
||||
memcpy(text, state.init, INIT_SIZE_BYTE);
|
||||
|
||||
VARIANT1_INIT64();
|
||||
|
@ -1289,7 +1293,7 @@ union cn_slow_hash_state {
|
|||
};
|
||||
#pragma pack(pop)
|
||||
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant) {
|
||||
void cn_slow_hash(const void *data, size_t length, char *hash, int variant, int prehashed) {
|
||||
uint8_t long_state[MEMORY];
|
||||
union cn_slow_hash_state state;
|
||||
uint8_t text[INIT_SIZE_BYTE];
|
||||
|
@ -1301,7 +1305,11 @@ void cn_slow_hash(const void *data, size_t length, char *hash, int variant) {
|
|||
uint8_t aes_key[AES_KEY_SIZE];
|
||||
oaes_ctx *aes_ctx;
|
||||
|
||||
hash_process(&state.hs, data, length);
|
||||
if (prehashed) {
|
||||
memcpy(&state.hs, data, length);
|
||||
} else {
|
||||
hash_process(&state.hs, data, length);
|
||||
}
|
||||
memcpy(text, state.init, INIT_SIZE_BYTE);
|
||||
memcpy(aes_key, state.hs.b, AES_KEY_SIZE);
|
||||
aes_ctx = (oaes_ctx *) oaes_alloc();
|
||||
|
|
|
@ -513,10 +513,10 @@ namespace hw {
|
|||
|
||||
char prekey[200];
|
||||
memmove(prekey, &this->buffer_recv[0], 200);
|
||||
crypto::generate_chacha_key(&prekey[0], sizeof(prekey), key, 0, true);
|
||||
crypto::generate_chacha_key_prehashed(&prekey[0], sizeof(prekey), key);
|
||||
|
||||
#ifdef DEBUG_HWDEVICE
|
||||
hw::ledger::check32("generate_chacha_key", "key", (char*)key_x.data(), (char*)key.data());
|
||||
hw::ledger::check32("generate_chacha_key_prehashed", "key", (char*)key_x.data(), (char*)key.data());
|
||||
#endif
|
||||
|
||||
unlock_device();
|
||||
|
|
|
@ -52,10 +52,10 @@ extern "C" {
|
|||
tree_hash((const char (*)[32]) data, length >> 5, hash);
|
||||
}
|
||||
static void cn_slow_hash_0(const void *data, size_t length, char *hash) {
|
||||
return cn_slow_hash(data, length, hash, 0);
|
||||
return cn_slow_hash(data, length, hash, 0/*variant*/, 0/*prehashed*/);
|
||||
}
|
||||
static void cn_slow_hash_1(const void *data, size_t length, char *hash) {
|
||||
return cn_slow_hash(data, length, hash, 1);
|
||||
return cn_slow_hash(data, length, hash, 1/*variant*/, 0/*prehashed*/);
|
||||
}
|
||||
}
|
||||
POP_WARNINGS
|
||||
|
|
Loading…
Reference in a new issue