3rdparty: Sync libchdr to commit cb077337d53392454e7100a0fd07139ca678e527

This commit is contained in:
TheLastRar 2024-12-18 22:03:40 +00:00 committed by Ty
parent 7f59757eea
commit 72a9f18456
5 changed files with 495 additions and 312 deletions

View File

@ -58,67 +58,67 @@ extern "C" {
V1 header:
[ 0] char tag[8]; // 'MComprHD'
[ 8] UINT32 length; // length of header (including tag and length fields)
[ 12] UINT32 version; // drive format version
[ 16] UINT32 flags; // flags (see below)
[ 20] UINT32 compression; // compression type
[ 24] UINT32 hunksize; // 512-byte sectors per hunk
[ 28] UINT32 totalhunks; // total # of hunks represented
[ 32] UINT32 cylinders; // number of cylinders on hard disk
[ 36] UINT32 heads; // number of heads on hard disk
[ 40] UINT32 sectors; // number of sectors on hard disk
[ 44] UINT8 md5[16]; // MD5 checksum of raw data
[ 60] UINT8 parentmd5[16]; // MD5 checksum of parent file
[ 8] uint32_t length; // length of header (including tag and length fields)
[ 12] uint32_t version; // drive format version
[ 16] uint32_t flags; // flags (see below)
[ 20] uint32_t compression; // compression type
[ 24] uint32_t hunksize; // 512-byte sectors per hunk
[ 28] uint32_t totalhunks; // total # of hunks represented
[ 32] uint32_t cylinders; // number of cylinders on hard disk
[ 36] uint32_t heads; // number of heads on hard disk
[ 40] uint32_t sectors; // number of sectors on hard disk
[ 44] uint8_t md5[16]; // MD5 checksum of raw data
[ 60] uint8_t parentmd5[16]; // MD5 checksum of parent file
[ 76] (V1 header length)
V2 header:
[ 0] char tag[8]; // 'MComprHD'
[ 8] UINT32 length; // length of header (including tag and length fields)
[ 12] UINT32 version; // drive format version
[ 16] UINT32 flags; // flags (see below)
[ 20] UINT32 compression; // compression type
[ 24] UINT32 hunksize; // seclen-byte sectors per hunk
[ 28] UINT32 totalhunks; // total # of hunks represented
[ 32] UINT32 cylinders; // number of cylinders on hard disk
[ 36] UINT32 heads; // number of heads on hard disk
[ 40] UINT32 sectors; // number of sectors on hard disk
[ 44] UINT8 md5[16]; // MD5 checksum of raw data
[ 60] UINT8 parentmd5[16]; // MD5 checksum of parent file
[ 76] UINT32 seclen; // number of bytes per sector
[ 8] uint32_t length; // length of header (including tag and length fields)
[ 12] uint32_t version; // drive format version
[ 16] uint32_t flags; // flags (see below)
[ 20] uint32_t compression; // compression type
[ 24] uint32_t hunksize; // seclen-byte sectors per hunk
[ 28] uint32_t totalhunks; // total # of hunks represented
[ 32] uint32_t cylinders; // number of cylinders on hard disk
[ 36] uint32_t heads; // number of heads on hard disk
[ 40] uint32_t sectors; // number of sectors on hard disk
[ 44] uint8_t md5[16]; // MD5 checksum of raw data
[ 60] uint8_t parentmd5[16]; // MD5 checksum of parent file
[ 76] uint32_t seclen; // number of bytes per sector
[ 80] (V2 header length)
V3 header:
[ 0] char tag[8]; // 'MComprHD'
[ 8] UINT32 length; // length of header (including tag and length fields)
[ 12] UINT32 version; // drive format version
[ 16] UINT32 flags; // flags (see below)
[ 20] UINT32 compression; // compression type
[ 24] UINT32 totalhunks; // total # of hunks represented
[ 28] UINT64 logicalbytes; // logical size of the data (in bytes)
[ 36] UINT64 metaoffset; // offset to the first blob of metadata
[ 44] UINT8 md5[16]; // MD5 checksum of raw data
[ 60] UINT8 parentmd5[16]; // MD5 checksum of parent file
[ 76] UINT32 hunkbytes; // number of bytes per hunk
[ 80] UINT8 sha1[20]; // SHA1 checksum of raw data
[100] UINT8 parentsha1[20];// SHA1 checksum of parent file
[ 8] uint32_t length; // length of header (including tag and length fields)
[ 12] uint32_t version; // drive format version
[ 16] uint32_t flags; // flags (see below)
[ 20] uint32_t compression; // compression type
[ 24] uint32_t totalhunks; // total # of hunks represented
[ 28] uint64_t logicalbytes; // logical size of the data (in bytes)
[ 36] uint64_t metaoffset; // offset to the first blob of metadata
[ 44] uint8_t md5[16]; // MD5 checksum of raw data
[ 60] uint8_t parentmd5[16]; // MD5 checksum of parent file
[ 76] uint32_t hunkbytes; // number of bytes per hunk
[ 80] uint8_t sha1[20]; // SHA1 checksum of raw data
[100] uint8_t parentsha1[20];// SHA1 checksum of parent file
[120] (V3 header length)
V4 header:
[ 0] char tag[8]; // 'MComprHD'
[ 8] UINT32 length; // length of header (including tag and length fields)
[ 12] UINT32 version; // drive format version
[ 16] UINT32 flags; // flags (see below)
[ 20] UINT32 compression; // compression type
[ 24] UINT32 totalhunks; // total # of hunks represented
[ 28] UINT64 logicalbytes; // logical size of the data (in bytes)
[ 36] UINT64 metaoffset; // offset to the first blob of metadata
[ 44] UINT32 hunkbytes; // number of bytes per hunk
[ 48] UINT8 sha1[20]; // combined raw+meta SHA1
[ 68] UINT8 parentsha1[20];// combined raw+meta SHA1 of parent
[ 88] UINT8 rawsha1[20]; // raw data SHA1
[ 8] uint32_t length; // length of header (including tag and length fields)
[ 12] uint32_t version; // drive format version
[ 16] uint32_t flags; // flags (see below)
[ 20] uint32_t compression; // compression type
[ 24] uint32_t totalhunks; // total # of hunks represented
[ 28] uint64_t logicalbytes; // logical size of the data (in bytes)
[ 36] uint64_t metaoffset; // offset to the first blob of metadata
[ 44] uint32_t hunkbytes; // number of bytes per hunk
[ 48] uint8_t sha1[20]; // combined raw+meta SHA1
[ 68] uint8_t parentsha1[20];// combined raw+meta SHA1 of parent
[ 88] uint8_t rawsha1[20]; // raw data SHA1
[108] (V4 header length)
Flags:
@ -130,17 +130,17 @@ extern "C" {
V5 header:
[ 0] char tag[8]; // 'MComprHD'
[ 8] uint32_t length; // length of header (including tag and length fields)
[ 12] uint32_t version; // drive format version
[ 16] uint32_t compressors[4];// which custom compressors are used?
[ 32] uint64_t logicalbytes; // logical size of the data (in bytes)
[ 40] uint64_t mapoffset; // offset to the map
[ 48] uint64_t metaoffset; // offset to the first blob of metadata
[ 56] uint32_t hunkbytes; // number of bytes per hunk (512k maximum)
[ 60] uint32_t unitbytes; // number of bytes per unit within each hunk
[ 64] uint8_t rawsha1[20]; // raw data SHA1
[ 84] uint8_t sha1[20]; // combined raw+meta SHA1
[104] uint8_t parentsha1[20];// combined raw+meta SHA1 of parent
[ 8] uint32_t_t length; // length of header (including tag and length fields)
[ 12] uint32_t_t version; // drive format version
[ 16] uint32_t_t compressors[4];// which custom compressors are used?
[ 32] uint64_t_t logicalbytes; // logical size of the data (in bytes)
[ 40] uint64_t_t mapoffset; // offset to the map
[ 48] uint64_t_t metaoffset; // offset to the first blob of metadata
[ 56] uint32_t_t hunkbytes; // number of bytes per hunk (512k maximum)
[ 60] uint32_t_t unitbytes; // number of bytes per unit within each hunk
[ 64] uint8_t_t rawsha1[20]; // raw data SHA1
[ 84] uint8_t_t sha1[20]; // combined raw+meta SHA1
[104] uint8_t_t parentsha1[20];// combined raw+meta SHA1 of parent
[124] (V5 header length)
If parentsha1 != 0, we have a parent (no need for flags)
@ -148,22 +148,22 @@ extern "C" {
V5 uncompressed map format:
[ 0] uint32_t offset; // starting offset / hunk size
[ 0] uint32_t_t offset; // starting offset / hunk size
V5 compressed map format header:
[ 0] uint32_t length; // length of compressed map
[ 0] uint32_t_t length; // length of compressed map
[ 4] UINT48 datastart; // offset of first block
[ 10] uint16_t crc; // crc-16 of the map
[ 12] uint8_t lengthbits; // bits used to encode complength
[ 13] uint8_t hunkbits; // bits used to encode self-refs
[ 14] uint8_t parentunitbits; // bits used to encode parent unit refs
[ 15] uint8_t reserved; // future use
[ 12] uint8_t_t lengthbits; // bits used to encode complength
[ 13] uint8_t_t hunkbits; // bits used to encode self-refs
[ 14] uint8_t_t parentunitbits; // bits used to encode parent unit refs
[ 15] uint8_t_t reserved; // future use
[ 16] (compressed header length)
Each compressed map entry, once expanded, looks like:
[ 0] uint8_t compression; // compression type
[ 0] uint8_t_t compression; // compression type
[ 1] UINT24 complength; // compressed length
[ 4] UINT48 offset; // offset
[ 10] uint16_t crc; // crc-16 of the data
@ -220,7 +220,7 @@ extern "C" {
/* metadata parameters */
#define CHDMETATAG_WILDCARD 0
#define CHD_METAINDEX_APPEND ((UINT32)-1)
#define CHD_METAINDEX_APPEND ((uint32_t)-1)
/* metadata flags */
#define CHD_MDFLAGS_CHECKSUM 0x01 /* indicates data is checksummed */
@ -307,32 +307,32 @@ typedef struct _chd_file chd_file;
typedef struct _chd_header chd_header;
struct _chd_header
{
UINT32 length; /* length of header data */
UINT32 version; /* drive format version */
UINT32 flags; /* flags field */
UINT32 compression[4]; /* compression type */
UINT32 hunkbytes; /* number of bytes per hunk */
UINT32 totalhunks; /* total # of hunks represented */
UINT64 logicalbytes; /* logical size of the data */
UINT64 metaoffset; /* offset in file of first metadata */
UINT64 mapoffset; /* TOOD V5 */
UINT8 md5[CHD_MD5_BYTES]; /* overall MD5 checksum */
UINT8 parentmd5[CHD_MD5_BYTES]; /* overall MD5 checksum of parent */
UINT8 sha1[CHD_SHA1_BYTES]; /* overall SHA1 checksum */
UINT8 rawsha1[CHD_SHA1_BYTES]; /* SHA1 checksum of raw data */
UINT8 parentsha1[CHD_SHA1_BYTES]; /* overall SHA1 checksum of parent */
UINT32 unitbytes; /* TODO V5 */
UINT64 unitcount; /* TODO V5 */
UINT32 hunkcount; /* TODO V5 */
uint32_t length; /* length of header data */
uint32_t version; /* drive format version */
uint32_t flags; /* flags field */
uint32_t compression[4]; /* compression type */
uint32_t hunkbytes; /* number of bytes per hunk */
uint32_t totalhunks; /* total # of hunks represented */
uint64_t logicalbytes; /* logical size of the data */
uint64_t metaoffset; /* offset in file of first metadata */
uint64_t mapoffset; /* TOOD V5 */
uint8_t md5[CHD_MD5_BYTES]; /* overall MD5 checksum */
uint8_t parentmd5[CHD_MD5_BYTES]; /* overall MD5 checksum of parent */
uint8_t sha1[CHD_SHA1_BYTES]; /* overall SHA1 checksum */
uint8_t rawsha1[CHD_SHA1_BYTES]; /* SHA1 checksum of raw data */
uint8_t parentsha1[CHD_SHA1_BYTES]; /* overall SHA1 checksum of parent */
uint32_t unitbytes; /* TODO V5 */
uint64_t unitcount; /* TODO V5 */
uint32_t hunkcount; /* TODO V5 */
/* map information */
UINT32 mapentrybytes; /* length of each entry in a map (V5) */
UINT8* rawmap; /* raw map data */
uint32_t mapentrybytes; /* length of each entry in a map (V5) */
uint8_t* rawmap; /* raw map data */
UINT32 obsolete_cylinders; /* obsolete field -- do not use! */
UINT32 obsolete_sectors; /* obsolete field -- do not use! */
UINT32 obsolete_heads; /* obsolete field -- do not use! */
UINT32 obsolete_hunksize; /* obsolete field -- do not use! */
uint32_t obsolete_cylinders; /* obsolete field -- do not use! */
uint32_t obsolete_sectors; /* obsolete field -- do not use! */
uint32_t obsolete_heads; /* obsolete field -- do not use! */
uint32_t obsolete_hunksize; /* obsolete field -- do not use! */
};
@ -340,10 +340,10 @@ struct _chd_header
typedef struct _chd_verify_result chd_verify_result;
struct _chd_verify_result
{
UINT8 md5[CHD_MD5_BYTES]; /* overall MD5 checksum */
UINT8 sha1[CHD_SHA1_BYTES]; /* overall SHA1 checksum */
UINT8 rawsha1[CHD_SHA1_BYTES]; /* SHA1 checksum of raw data */
UINT8 metasha1[CHD_SHA1_BYTES]; /* SHA1 checksum of metadata */
uint8_t md5[CHD_MD5_BYTES]; /* overall MD5 checksum */
uint8_t sha1[CHD_SHA1_BYTES]; /* overall SHA1 checksum */
uint8_t rawsha1[CHD_SHA1_BYTES]; /* SHA1 checksum of raw data */
uint8_t metasha1[CHD_SHA1_BYTES]; /* SHA1 checksum of metadata */
};
@ -369,10 +369,10 @@ struct _chd_verify_result
/* ----- CHD file management ----- */
/* create a new CHD file fitting the given description */
/* chd_error chd_create(const char *filename, UINT64 logicalbytes, UINT32 hunkbytes, UINT32 compression, chd_file *parent); */
/* chd_error chd_create(const char *filename, uint64_t logicalbytes, uint32_t hunkbytes, uint32_t compression, chd_file *parent); */
/* same as chd_create(), but accepts an already-opened core_file object */
/* chd_error chd_create_file(core_file *file, UINT64 logicalbytes, UINT32 hunkbytes, UINT32 compression, chd_file *parent); */
/* chd_error chd_create_file(core_file *file, uint64_t logicalbytes, uint32_t hunkbytes, uint32_t compression, chd_file *parent); */
/* open an existing CHD file */
CHD_EXPORT chd_error chd_open_core_file(core_file *file, int mode, chd_file *parent, chd_file **chd);
@ -408,14 +408,14 @@ CHD_EXPORT chd_error chd_read_header(const char *filename, chd_header *header);
/* ----- core data read/write ----- */
/* read one hunk from the CHD file */
CHD_EXPORT chd_error chd_read(chd_file *chd, UINT32 hunknum, void *buffer);
CHD_EXPORT chd_error chd_read(chd_file *chd, uint32_t hunknum, void *buffer);
/* ----- metadata management ----- */
/* get indexed metadata of a particular sort */
CHD_EXPORT chd_error chd_get_metadata(chd_file *chd, UINT32 searchtag, UINT32 searchindex, void *output, UINT32 outputlen, UINT32 *resultlen, UINT32 *resulttag, UINT8 *resultflags);
CHD_EXPORT chd_error chd_get_metadata(chd_file *chd, uint32_t searchtag, uint32_t searchindex, void *output, uint32_t outputlen, uint32_t *resultlen, uint32_t *resulttag, uint8_t *resultflags);
@ -426,7 +426,7 @@ CHD_EXPORT chd_error chd_get_metadata(chd_file *chd, UINT32 searchtag, UINT32 se
CHD_EXPORT chd_error chd_codec_config(chd_file *chd, int param, void *config);
/* return a string description of a codec */
CHD_EXPORT const char *chd_get_codec_name(UINT32 codec);
CHD_EXPORT const char *chd_get_codec_name(uint32_t codec);
#ifdef __cplusplus
}

View File

@ -8,26 +8,13 @@
#include <streams/file_stream_transforms.h>
#endif
#ifndef ARRAY_LENGTH
#define ARRAY_LENGTH(x) (sizeof(x)/sizeof(x[0]))
#if defined(__PS3__) || defined(__PSL1GHT__)
#undef UINT32
#undef UINT16
#undef UINT8
#undef INT32
#undef INT16
#undef INT8
#endif
typedef uint64_t UINT64;
typedef uint32_t UINT32;
typedef uint16_t UINT16;
typedef uint8_t UINT8;
typedef int64_t INT64;
typedef int32_t INT32;
typedef int16_t INT16;
typedef int8_t INT8;
#ifndef ARRAY_SIZE
#define ARRAY_SIZE(x) (sizeof(x)/sizeof(x[0]))
#endif
typedef struct chd_core_file {
/*
@ -41,9 +28,9 @@ typedef struct chd_core_file {
* undefined because many implementations will seek to the end of the
* file and call ftell.
*
* on error, (UINT64)-1 is returned.
* on error, (uint64_t)-1 is returned.
*/
UINT64(*fsize)(struct chd_core_file*);
uint64_t(*fsize)(struct chd_core_file*);
/*
* should match the behavior of fread, except the FILE* argument at the end
@ -55,7 +42,7 @@ typedef struct chd_core_file {
int (*fclose)(struct chd_core_file*);
// fseek clone
int (*fseek)(struct chd_core_file*, INT64, int);
int (*fseek)(struct chd_core_file*, int64_t, int);
} core_file;
static inline int core_fclose(core_file *fp) {
@ -66,11 +53,11 @@ static inline size_t core_fread(core_file *fp, void *ptr, size_t len) {
return fp->fread(ptr, 1, len, fp);
}
static inline int core_fseek(core_file* fp, INT64 offset, int whence) {
static inline int core_fseek(core_file* fp, int64_t offset, int whence) {
return fp->fseek(fp, offset, whence);
}
static inline UINT64 core_fsize(core_file *fp)
static inline uint64_t core_fsize(core_file *fp)
{
return fp->fsize(fp);
}

View File

@ -85,6 +85,6 @@ int huffman_build_tree(struct huffman_decoder* decoder, uint32_t totaldata, uint
enum huffman_error huffman_assign_canonical_codes(struct huffman_decoder* decoder);
enum huffman_error huffman_compute_tree_from_histo(struct huffman_decoder* decoder);
void huffman_build_lookup_table(struct huffman_decoder* decoder);
enum huffman_error huffman_build_lookup_table(struct huffman_decoder* decoder);
#endif

File diff suppressed because it is too large Load Diff

View File

@ -230,7 +230,9 @@ enum huffman_error huffman_import_tree_rle(struct huffman_decoder* decoder, stru
return error;
/* build the lookup table */
huffman_build_lookup_table(decoder);
error = huffman_build_lookup_table(decoder);
if (error != HUFFERR_NONE)
return error;
/* determine final input length and report errors */
return bitstream_overflow(bitbuf) ? HUFFERR_INPUT_BUFFER_TOO_SMALL : HUFFERR_NONE;
@ -271,8 +273,16 @@ enum huffman_error huffman_import_tree_huffman(struct huffman_decoder* decoder,
/* then regenerate the tree */
error = huffman_assign_canonical_codes(smallhuff);
if (error != HUFFERR_NONE)
{
delete_huffman_decoder(smallhuff);
return error;
huffman_build_lookup_table(smallhuff);
}
error = huffman_build_lookup_table(smallhuff);
if (error != HUFFERR_NONE)
{
delete_huffman_decoder(smallhuff);
return error;
}
/* determine the maximum length of an RLE count */
temp = decoder->numcodes - 9;
@ -308,7 +318,9 @@ enum huffman_error huffman_import_tree_huffman(struct huffman_decoder* decoder,
return error;
/* build the lookup table */
huffman_build_lookup_table(decoder);
error = huffman_build_lookup_table(decoder);
if (error != HUFFERR_NONE)
return error;
/* determine final input length and report errors */
return bitstream_overflow(bitbuf) ? HUFFERR_INPUT_BUFFER_TOO_SMALL : HUFFERR_NONE;
@ -523,8 +535,9 @@ enum huffman_error huffman_assign_canonical_codes(struct huffman_decoder* decode
*-------------------------------------------------
*/
void huffman_build_lookup_table(struct huffman_decoder* decoder)
enum huffman_error huffman_build_lookup_table(struct huffman_decoder* decoder)
{
const lookup_value* lookupend = &decoder->lookup[(1u << decoder->maxbits)];
uint32_t curcode;
/* iterate over all codes */
for (curcode = 0; curcode < decoder->numcodes; curcode++)
@ -533,9 +546,10 @@ void huffman_build_lookup_table(struct huffman_decoder* decoder)
struct node_t* node = &decoder->huffnode[curcode];
if (node->numbits > 0)
{
int shift;
lookup_value *dest;
lookup_value *destend;
int shift;
lookup_value *dest;
lookup_value *destend;
/* set up the entry */
lookup_value value = MAKE_LOOKUP(curcode, node->numbits);
@ -543,8 +557,12 @@ void huffman_build_lookup_table(struct huffman_decoder* decoder)
shift = decoder->maxbits - node->numbits;
dest = &decoder->lookup[node->bits << shift];
destend = &decoder->lookup[((node->bits + 1) << shift) - 1];
if (dest >= lookupend || destend >= lookupend || destend < dest)
return HUFFERR_INTERNAL_INCONSISTENCY;
while (dest <= destend)
*dest++ = value;
}
}
return HUFFERR_NONE;
}