163dab8eeSAdrian Chadd /*
263dab8eeSAdrian Chadd * Private includes and definitions for userspace use of XZ Embedded
363dab8eeSAdrian Chadd *
463dab8eeSAdrian Chadd * Author: Lasse Collin <lasse.collin@tukaani.org>
563dab8eeSAdrian Chadd *
663dab8eeSAdrian Chadd * This file has been put into the public domain.
763dab8eeSAdrian Chadd * You can do whatever you want with this file.
863dab8eeSAdrian Chadd */
963dab8eeSAdrian Chadd
1063dab8eeSAdrian Chadd #ifndef XZ_CONFIG_H
1163dab8eeSAdrian Chadd #define XZ_CONFIG_H
1263dab8eeSAdrian Chadd
13*cd3a777bSXin LI /* Uncomment to enable building of xz_dec_catrun(). */
14*cd3a777bSXin LI /* #define XZ_DEC_CONCATENATED */
15*cd3a777bSXin LI
16f0bd5302SXin LI /* Uncomment to enable CRC64 support. */
17f0bd5302SXin LI /* #define XZ_USE_CRC64 */
18f0bd5302SXin LI
1963dab8eeSAdrian Chadd /* Uncomment as needed to enable BCJ filter decoders. */
2063dab8eeSAdrian Chadd /* #define XZ_DEC_X86 */
2163dab8eeSAdrian Chadd /* #define XZ_DEC_POWERPC */
2263dab8eeSAdrian Chadd /* #define XZ_DEC_IA64 */
2363dab8eeSAdrian Chadd /* #define XZ_DEC_ARM */
2463dab8eeSAdrian Chadd /* #define XZ_DEC_ARMTHUMB */
2563dab8eeSAdrian Chadd /* #define XZ_DEC_SPARC */
2663dab8eeSAdrian Chadd
27f0bd5302SXin LI /*
28f0bd5302SXin LI * MSVC doesn't support modern C but XZ Embedded is mostly C89
29f0bd5302SXin LI * so these are enough.
30f0bd5302SXin LI */
31f0bd5302SXin LI #ifdef _MSC_VER
32f0bd5302SXin LI typedef unsigned char bool;
33f0bd5302SXin LI # define true 1
34f0bd5302SXin LI # define false 0
35f0bd5302SXin LI # define inline __inline
36f0bd5302SXin LI #else
3763dab8eeSAdrian Chadd # include <stdbool.h>
38f0bd5302SXin LI #endif
39f0bd5302SXin LI
4063dab8eeSAdrian Chadd #include <stdlib.h>
4163dab8eeSAdrian Chadd #include <string.h>
4263dab8eeSAdrian Chadd
4363dab8eeSAdrian Chadd #include "xz.h"
4463dab8eeSAdrian Chadd
4563dab8eeSAdrian Chadd #define kmalloc(size, flags) malloc(size)
4663dab8eeSAdrian Chadd #define kfree(ptr) free(ptr)
4763dab8eeSAdrian Chadd #define vmalloc(size) malloc(size)
4863dab8eeSAdrian Chadd #define vfree(ptr) free(ptr)
4963dab8eeSAdrian Chadd
5063dab8eeSAdrian Chadd #define memeq(a, b, size) (memcmp(a, b, size) == 0)
5163dab8eeSAdrian Chadd #define memzero(buf, size) memset(buf, 0, size)
5263dab8eeSAdrian Chadd
5363dab8eeSAdrian Chadd #ifndef min
5463dab8eeSAdrian Chadd # define min(x, y) ((x) < (y) ? (x) : (y))
5563dab8eeSAdrian Chadd #endif
5663dab8eeSAdrian Chadd #define min_t(type, x, y) min(x, y)
5763dab8eeSAdrian Chadd
5863dab8eeSAdrian Chadd /*
5963dab8eeSAdrian Chadd * Some functions have been marked with __always_inline to keep the
6063dab8eeSAdrian Chadd * performance reasonable even when the compiler is optimizing for
6163dab8eeSAdrian Chadd * small code size. You may be able to save a few bytes by #defining
6263dab8eeSAdrian Chadd * __always_inline to plain inline, but don't complain if the code
6363dab8eeSAdrian Chadd * becomes slow.
6463dab8eeSAdrian Chadd *
6563dab8eeSAdrian Chadd * NOTE: System headers on GNU/Linux may #define this macro already,
6663dab8eeSAdrian Chadd * so if you want to change it, you need to #undef it first.
6763dab8eeSAdrian Chadd */
6863dab8eeSAdrian Chadd #ifndef __always_inline
6963dab8eeSAdrian Chadd # ifdef __GNUC__
7063dab8eeSAdrian Chadd # define __always_inline \
7163dab8eeSAdrian Chadd inline __attribute__((__always_inline__))
7263dab8eeSAdrian Chadd # else
7363dab8eeSAdrian Chadd # define __always_inline inline
7463dab8eeSAdrian Chadd # endif
7563dab8eeSAdrian Chadd #endif
7663dab8eeSAdrian Chadd
7763dab8eeSAdrian Chadd /* Inline functions to access unaligned unsigned 32-bit integers */
7863dab8eeSAdrian Chadd #ifndef get_unaligned_le32
get_unaligned_le32(const uint8_t * buf)7963dab8eeSAdrian Chadd static inline uint32_t get_unaligned_le32(const uint8_t *buf)
8063dab8eeSAdrian Chadd {
8163dab8eeSAdrian Chadd return (uint32_t)buf[0]
8263dab8eeSAdrian Chadd | ((uint32_t)buf[1] << 8)
8363dab8eeSAdrian Chadd | ((uint32_t)buf[2] << 16)
8463dab8eeSAdrian Chadd | ((uint32_t)buf[3] << 24);
8563dab8eeSAdrian Chadd }
8663dab8eeSAdrian Chadd #endif
8763dab8eeSAdrian Chadd
8863dab8eeSAdrian Chadd #ifndef get_unaligned_be32
get_unaligned_be32(const uint8_t * buf)8963dab8eeSAdrian Chadd static inline uint32_t get_unaligned_be32(const uint8_t *buf)
9063dab8eeSAdrian Chadd {
9163dab8eeSAdrian Chadd return (uint32_t)(buf[0] << 24)
9263dab8eeSAdrian Chadd | ((uint32_t)buf[1] << 16)
9363dab8eeSAdrian Chadd | ((uint32_t)buf[2] << 8)
9463dab8eeSAdrian Chadd | (uint32_t)buf[3];
9563dab8eeSAdrian Chadd }
9663dab8eeSAdrian Chadd #endif
9763dab8eeSAdrian Chadd
9863dab8eeSAdrian Chadd #ifndef put_unaligned_le32
put_unaligned_le32(uint32_t val,uint8_t * buf)9963dab8eeSAdrian Chadd static inline void put_unaligned_le32(uint32_t val, uint8_t *buf)
10063dab8eeSAdrian Chadd {
10163dab8eeSAdrian Chadd buf[0] = (uint8_t)val;
10263dab8eeSAdrian Chadd buf[1] = (uint8_t)(val >> 8);
10363dab8eeSAdrian Chadd buf[2] = (uint8_t)(val >> 16);
10463dab8eeSAdrian Chadd buf[3] = (uint8_t)(val >> 24);
10563dab8eeSAdrian Chadd }
10663dab8eeSAdrian Chadd #endif
10763dab8eeSAdrian Chadd
10863dab8eeSAdrian Chadd #ifndef put_unaligned_be32
put_unaligned_be32(uint32_t val,uint8_t * buf)10963dab8eeSAdrian Chadd static inline void put_unaligned_be32(uint32_t val, uint8_t *buf)
11063dab8eeSAdrian Chadd {
11163dab8eeSAdrian Chadd buf[0] = (uint8_t)(val >> 24);
11263dab8eeSAdrian Chadd buf[1] = (uint8_t)(val >> 16);
11363dab8eeSAdrian Chadd buf[2] = (uint8_t)(val >> 8);
11463dab8eeSAdrian Chadd buf[3] = (uint8_t)val;
11563dab8eeSAdrian Chadd }
11663dab8eeSAdrian Chadd #endif
11763dab8eeSAdrian Chadd
11863dab8eeSAdrian Chadd /*
11963dab8eeSAdrian Chadd * Use get_unaligned_le32() also for aligned access for simplicity. On
12063dab8eeSAdrian Chadd * little endian systems, #define get_le32(ptr) (*(const uint32_t *)(ptr))
12163dab8eeSAdrian Chadd * could save a few bytes in code size.
12263dab8eeSAdrian Chadd */
12363dab8eeSAdrian Chadd #ifndef get_le32
12463dab8eeSAdrian Chadd # define get_le32 get_unaligned_le32
12563dab8eeSAdrian Chadd #endif
12663dab8eeSAdrian Chadd
12763dab8eeSAdrian Chadd #endif
128