md5一个字符串在不同平台上多次得到不同的结果
tc
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <openssl/md5.h>
static char* unsigned_to_signed_char(const unsigned char* in , int len) {
char* res = (char*)malloc(len * 2 + 1);
int i = 0;
memset(res , 0 , len * 2 + 1);
while(i < len) {
sprintf(res + i * 2 , "%02x" , in[i]);
i ++;
};
return res;
}
static unsigned char * md5(const unsigned char * in) {
MD5_CTX ctx;
unsigned char * result1 = (unsigned char *)malloc(MD5_DIGEST_LENGTH);
MD5_Init(&ctx);
printf("len: %lu \n", strlen(in));
MD5_Update(&ctx, in, strlen(in));
MD5_Final(result1, &ctx);
return result1;
}
int main(int argc, char *argv[])
{
const char * i = "abcdef";
unsigned char * data = (unsigned char *)malloc(strlen(i) + 1);
strncpy(data, i, strlen(i));
unsigned char * result1 = md5(data);
free(data);
printf("%s\n", unsigned_to_signed_char(result1, MD5_DIGEST_LENGTH));
unsigned char * result2 = md5(result1);
free(result1);
printf("%s\n", unsigned_to_signed_char(result2, MD5_DIGEST_LENGTH));
unsigned char * result3 = md5(result2);
free(result2);
printf("%s\n", unsigned_to_signed_char(result3, MD5_DIGEST_LENGTH));
return 0;
}
makeflle
all:
cc t.c -Wall -L/usr/local/lib -lcrypto
和 t.py
#!/usr/bin/env python
import hashlib
import binascii
src = 'abcdef'
a = hashlib.md5(src).digest()
b = hashlib.md5(a).digest()
c = hashlib.md5(b).hexdigest().upper()
print binascii.b2a_hex(a)
print binascii.b2a_hex(b)
print c
python 脚本在 Debian6 x86 和 MacOS 10.6 上的结果是相同的:
e80b5017098950fc58aad83c8c14978e
b91282813df47352f7fe2c0c1fe9e5bd
85E4FBD1BD400329009162A8023E1E4B
MacOS 上的 c 版本是:
len: 6
e80b5017098950fc58aad83c8c14978e
len: 48
eac9eaa9a4e5673c5d3773d7a3108c18
len: 64
73f83fa79e53e9415446c66802a0383f
为什么与 Debian6 不同?
Debian 环境:
gcc (Debian 4.4.5-8) 4.4.5
Python 2.6.6
Linux shuge-lab 2.6.26-2-686 #1 SMP Thu Nov 25 01:53:57 UTC 2010 i686 GNU/Linux
OpenSSL 是测试安装的存储库。
MacOS 环境:
i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 (Apple Inc. build 5666) (dot 3)
Python 2.7.1
Darwin Lees-Box.local 10.7.0 Darwin Kernel Version 10.7.0: Sat Jan 29 15:17:16 PST 2011; root:xnu-1504.9.37~1/RELEASE_I386 i386
OpenSSL 是从 MacPort 安装的。
openssl @1.0.0d (devel, security)
OpenSSL SSL/TLS cryptography library
t.c
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <openssl/md5.h>
static char* unsigned_to_signed_char(const unsigned char* in , int len) {
char* res = (char*)malloc(len * 2 + 1);
int i = 0;
memset(res , 0 , len * 2 + 1);
while(i < len) {
sprintf(res + i * 2 , "%02x" , in[i]);
i ++;
};
return res;
}
static unsigned char * md5(const unsigned char * in) {
MD5_CTX ctx;
unsigned char * result1 = (unsigned char *)malloc(MD5_DIGEST_LENGTH);
MD5_Init(&ctx);
printf("len: %lu \n", strlen(in));
MD5_Update(&ctx, in, strlen(in));
MD5_Final(result1, &ctx);
return result1;
}
int main(int argc, char *argv[])
{
const char * i = "abcdef";
unsigned char * data = (unsigned char *)malloc(strlen(i) + 1);
strncpy(data, i, strlen(i));
unsigned char * result1 = md5(data);
free(data);
printf("%s\n", unsigned_to_signed_char(result1, MD5_DIGEST_LENGTH));
unsigned char * result2 = md5(result1);
free(result1);
printf("%s\n", unsigned_to_signed_char(result2, MD5_DIGEST_LENGTH));
unsigned char * result3 = md5(result2);
free(result2);
printf("%s\n", unsigned_to_signed_char(result3, MD5_DIGEST_LENGTH));
return 0;
}
makeflle
all:
cc t.c -Wall -L/usr/local/lib -lcrypto
and t.py
#!/usr/bin/env python
import hashlib
import binascii
src = 'abcdef'
a = hashlib.md5(src).digest()
b = hashlib.md5(a).digest()
c = hashlib.md5(b).hexdigest().upper()
print binascii.b2a_hex(a)
print binascii.b2a_hex(b)
print c
The results of python script on Debian6 x86 and MacOS 10.6 are the same:
e80b5017098950fc58aad83c8c14978e
b91282813df47352f7fe2c0c1fe9e5bd
85E4FBD1BD400329009162A8023E1E4B
the c version on MacOS is:
len: 6
e80b5017098950fc58aad83c8c14978e
len: 48
eac9eaa9a4e5673c5d3773d7a3108c18
len: 64
73f83fa79e53e9415446c66802a0383f
Why it is different from Debian6 ?
Debian environment:
gcc (Debian 4.4.5-8) 4.4.5
Python 2.6.6
Linux shuge-lab 2.6.26-2-686 #1 SMP Thu Nov 25 01:53:57 UTC 2010 i686 GNU/Linux
OpenSSL was installed from testing repository.
MacOS environment:
i686-apple-darwin10-gcc-4.2.1 (GCC) 4.2.1 (Apple Inc. build 5666) (dot 3)
Python 2.7.1
Darwin Lees-Box.local 10.7.0 Darwin Kernel Version 10.7.0: Sat Jan 29 15:17:16 PST 2011; root:xnu-1504.9.37~1/RELEASE_I386 i386
OpenSSL was installed from MacPort.
openssl @1.0.0d (devel, security)
OpenSSL SSL/TLS cryptography library
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。
绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
发布评论
评论(3)
我认为您正在为 MD5 结果精确分配字节,而不以
\0
结尾。然后,您正在计算内存块的 MD5,该内存块以先前 MD5 计算的结果开始,但后面有一些随机字节。您应该为结果多分配一个字节并将其设置为\0
。我的建议:
I think you are allocating bytes exactly for MD5 result, without ending
\0
. Then you are calculating MD5 of block of memory that starts with result from previous MD5 calculating but with some random bytes after it. You should allocate one byte more for result and set it to\0
.My proposal:
到目前为止,我认为答案似乎还没有足够清楚地说明这个问题。具体来说,问题在于这一行:
您传入的数据块不是
'\0'
终止的,因此对 update 的调用可能会尝试处理超出 MD5_DIGEST_LENGTH 缓冲区末尾的更多字节。简而言之,停止使用 strlen() 来计算任意字节缓冲区的长度:您知道缓冲区应该有多长,因此可以传递长度。The answers so far don't seem to me to have stated the issue clearly enough. Specifically the problem is the line:
The data block you pass in is not
'\0'
terminated, so the call to update may try to process further bytes beyond the end of the MD5_DIGEST_LENGTH buffer. In short, stop usingstrlen()
to work out the length of an arbitrary buffer of bytes: you know how long the buffers are supposed to be so pass the length around.您不会
'\0'
终止您传递给md5
的字符串(我假设采用
'\0'
终止字符串,因为您没有将其传递给长度)。代码
完全损坏:
sizeof( strlen( i ) )
与sizeof( size_t )
,在典型机器上为 4 或 8。但你不想要无论如何,memset 。尝试将它们替换为:
或者更好:
,然后将
i.c_str()
传递给md5
(并声明md5
以获取字符
。 (我也会在常量*
md5()
中使用std::vector
,并让它归还。并且
unsigned_to_signed_char
将采用std::vector
并返回std::string
。)You don't
'\0'
terminate the string you're passing tomd5
(which Isuppose takes a
'\0'
terminated string, since you don't pass it thelength). The code
is completely broken:
sizeof( strlen( i ) )
is the same assizeof( size_t )
, 4 or 8 on typical machines. But you don't want thememset
anyway. Try replacing these with:Or better yet:
, then pass
i.c_str()
tomd5
(and declaremd5
to take achar
. (I'd use anconst*
std::vector<unsigned char>
inmd5()
as well,and have it return it. And
unsigned_to_signed_char
would take thestd::vector<unsigned char>
and returnstd::string
.)