为什么这两个算法最后输出的值不一样,一个node.js写的,一个python写的?

python 代码:地址

import logging
import io
from hashlib import sha1
from struct import pack, unpack

from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes

logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
.............................
..........
@staticmethod
def makekey_from_password(password, algId, algIdHash, providerType, keySize, saltSize, salt):
    r"""
    Generate intermediate key from given password.

        >>> password = 'Password1234_'
        >>> algId = 0x660e
        >>> algIdHash = 0x8004
        >>> providerType = 0x18
        >>> keySize = 128
        >>> saltSize = 16
        >>> salt = b'\xe8\x82fI\x0c[\xd1\xee\xbd+C\x94\xe3\xf80\xef'
        >>> expected = b'@\xb1:q\xf9\x0b\x96n7T\x08\xf2\xd1\x81\xa1\xaa'
        >>> ECMA376Standard.makekey_from_password(password, algId, algIdHash, providerType, keySize, saltSize, salt) == expected
        True
    """
    logger.debug([password, hex(algId), hex(algIdHash), hex(providerType), keySize, saltSize, salt])
    xor_bytes = lambda a, b: bytearray([p ^ q for p, q in zip(bytearray(a), bytearray(b))])  # bytearray() for Python 2 compat.

    # https://msdn.microsoft.com/en-us/library/dd925430(v=office.12).aspx
    ITER_COUNT = 50000

    password = password.encode("UTF-16LE")
    h = sha1(salt + password).digest()
    for i in range(ITER_COUNT):
        ibytes = pack("<I", i)
        h = sha1(ibytes + h).digest()
    block = 0
    blockbytes = pack("<I", block)
    hfinal = sha1(h + blockbytes).digest()
    cbRequiredKeyLength = keySize // 8
    cbHash = sha1().digest_size
    buf1 = b"\x36" * 64
    buf1 = xor_bytes(hfinal, buf1[:cbHash]) + buf1[cbHash:]
    x1 = sha1(buf1).digest()
    buf2 = b"\x5c" * 64
    buf2 = xor_bytes(hfinal, buf2[:cbHash]) + buf2[cbHash:]
    x2 = sha1(buf2).digest()  # In spec but unused
    x3 = x1 + x2
    keyDerived = x3[:cbRequiredKeyLength]
    logger.debug(keyDerived)
    return keyDerived

node.js 代码:

const crypto = require('crypto');

function makekey_from_password (password, algId, algIdHash, providerType, keySize, saltSize, salt) {
  const ITER_COUNT = 50000;
  const cbRequiredKeyLength = keySize / 8;

  const passwordBuf = Buffer.from(password, 'utf16le');
  //   let saltedPasswordHash = crypto.createHash('sha1').update(salt).update(passwordBuf).digest();
  let saltedPasswordHash = crypto.createHash('sha1').update(Buffer.concat([salt, passwordBuf])).digest();

  for (let i = 0; i < ITER_COUNT; i++) {
    const ibytes = Buffer.alloc(4);
    ibytes.writeUInt32LE(i, 0);
    saltedPasswordHash = crypto.createHash('sha1').update(Buffer.concat([ibytes, saltedPasswordHash])).digest();
    // saltedPasswordHash = crypto.createHash('sha1').update(ibytes).update(saltedPasswordHash).digest();
  }

  const block = Buffer.alloc(4);
  // const hfinal = crypto.createHash('sha1').update(saltedPasswordHash).update(block).digest();
  const hfinal = crypto.createHash('sha1').update(Buffer.concat([saltedPasswordHash, block])).digest();

  const cbHash = 20;

  let buf1 = Buffer.alloc(64, 0x36);
  buf1 = Buffer.concat([xor_bytes(hfinal, buf1.slice(0, cbHash)), buf1.slice(cbHash)]);
  const x1 = crypto.createHash('sha1').update(buf1).digest();

  let buf2 = Buffer.alloc(64, 0x5C);
  buf2 = Buffer.concat([xor_bytes(hfinal, buf2.slice(0, cbHash)), buf2.slice(cbHash)]);
  const x2 = crypto.createHash('sha1').update(buf2).digest();
  const x3 = Buffer.concat([x1, x2]);
  const keyDerived = x3.slice(0, cbRequiredKeyLength);

  return keyDerived;
}

function xor_bytes (a, b) {
  const result = [];
  for (let i = 0; i < a.length; i++) {
    result.push(a[i] ^ b[i]);
  }
  return Buffer.from(result);
}

// 测试
const password = 'Password1234_';
const algId = 0x660e;
const algIdHash = 0x8004;
const providerType = 0x18;
const keySize = 128;
const saltSize = 16;
const salt = Buffer.from('e88266490c5bd1eebd2b43c94e3f3830ef', 'hex');
const expected = Buffer.from('40b13a71f90b966e375408f2d181a1aa', 'hex');
const key = makekey_from_password(password, algId, algIdHash, providerType, keySize, saltSize, salt);
console.log('key', key, key.toString('hex'));
console.log(makekey_from_password(password, algId, algIdHash, providerType, keySize, saltSize, salt).equals(expected));

python 输入输出:

>>> salt = b'\xe8\x82fI\x0c[\xd1\xee\xbd+C\x94\xe3\xf80\xef'
>>> expected = b'@\xb1:q\xf9\x0b\x96n7T\x08\xf2\xd1\x81\xa1\xaa'
(40b13a71f90b966e375408f2d181a1aa)

node.js 输入输出:

Buffer.from('e88266490c5bd1eebd2b43c94e3f3830ef', 'hex');
实际输出是:6c1cc43d0fbe4e7d987f7ac68968111d

请大佬指教为什么我参考 python 代码写的 node.js 代码最后输出的内容是不一样的?

阅读 2.1k
2 个回答

代码我没看,不过你这个测试用例两边儿也不一致啊?

你 python 里:

salt = b'\xe8\x82fI\x0c[\xd1\xee\xbd+C\x94\xe3\xf80\xef'

对应十六进制是:

e88266490c5bd1eebd2b4394e3f830ef

跟你 JS 里的也不一样啊?

e88266490c5bd1eebd2b4394e3f830ef    -- python
e88266490c5bd1eebd2b43c94e3f3830ef  -- node.js

nodejs地方改一下:

const salt = Buffer.from([0xe8, 0x82, 0x66, 0x49, 0x0c, 0x5b, 0xd1, 0xee, 0xbd, 0x2b, 0x43, 0xc9, 0x4e, 0x3f, 0x38, 0x30, 0xef]);
撰写回答
你尚未登录,登录后可以
  • 和开发者交流问题的细节
  • 关注并接收问题和回答的更新提醒
  • 参与内容的编辑和改进,让解决方法与时俱进
推荐问题
宣传栏