Define DC_TIMEZONE_NONE as a signed integer
The hexadecimal value 0x80000000 is too large to be represented as a signed 32bit integer. Therefore the default type for the constant is an unsigned 32bit integer. This is a bit annoying because the timezone field is actually defined as a signed integer, and thus comparisions produce -Wsign-compare compiler warnings. Fixed by switching to INT_MIN, which is the same underlying value but interpreted as a signed integer.
This commit is contained in:
parent
0688b74099
commit
10a4ec0b08
@ -22,11 +22,13 @@
|
||||
#ifndef DC_DATETIME_H
|
||||
#define DC_DATETIME_H
|
||||
|
||||
#include <limits.h>
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif /* __cplusplus */
|
||||
|
||||
#define DC_TIMEZONE_NONE 0x80000000
|
||||
#define DC_TIMEZONE_NONE INT_MIN
|
||||
|
||||
#if defined (_WIN32) && !defined (__GNUC__)
|
||||
typedef __int64 dc_ticks_t;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user