#include <linux/blkdev.h>
#include <linux/fs.h>
#include <linux/log2.h>
#include "debug.h"
#include "ntfs.h"
#include "ntfs_fs.h"
#define NTFS3_RUN_MAX_BYTES …
struct ntfs_run { … };
static bool run_lookup(const struct runs_tree *run, CLST vcn, size_t *index)
{ … }
static void run_consolidate(struct runs_tree *run, size_t index)
{ … }
bool run_is_mapped_full(const struct runs_tree *run, CLST svcn, CLST evcn)
{ … }
bool run_lookup_entry(const struct runs_tree *run, CLST vcn, CLST *lcn,
CLST *len, size_t *index)
{ … }
void run_truncate_head(struct runs_tree *run, CLST vcn)
{ … }
void run_truncate(struct runs_tree *run, CLST vcn)
{ … }
void run_truncate_around(struct runs_tree *run, CLST vcn)
{ … }
bool run_add_entry(struct runs_tree *run, CLST vcn, CLST lcn, CLST len,
bool is_mft)
{ … }
bool run_collapse_range(struct runs_tree *run, CLST vcn, CLST len)
{ … }
bool run_insert_range(struct runs_tree *run, CLST vcn, CLST len)
{ … }
bool run_get_entry(const struct runs_tree *run, size_t index, CLST *vcn,
CLST *lcn, CLST *len)
{ … }
#ifdef __BIG_ENDIAN
static inline int run_packed_size(const s64 n)
{
const u8 *p = (const u8 *)&n + sizeof(n) - 1;
if (n >= 0) {
if (p[-7] || p[-6] || p[-5] || p[-4])
p -= 4;
if (p[-3] || p[-2])
p -= 2;
if (p[-1])
p -= 1;
if (p[0] & 0x80)
p -= 1;
} else {
if (p[-7] != 0xff || p[-6] != 0xff || p[-5] != 0xff ||
p[-4] != 0xff)
p -= 4;
if (p[-3] != 0xff || p[-2] != 0xff)
p -= 2;
if (p[-1] != 0xff)
p -= 1;
if (!(p[0] & 0x80))
p -= 1;
}
return (const u8 *)&n + sizeof(n) - p;
}
static inline void run_pack_s64(u8 *run_buf, u8 size, s64 v)
{
const u8 *p = (u8 *)&v;
switch (size) {
case 8:
run_buf[7] = p[0];
fallthrough;
case 7:
run_buf[6] = p[1];
fallthrough;
case 6:
run_buf[5] = p[2];
fallthrough;
case 5:
run_buf[4] = p[3];
fallthrough;
case 4:
run_buf[3] = p[4];
fallthrough;
case 3:
run_buf[2] = p[5];
fallthrough;
case 2:
run_buf[1] = p[6];
fallthrough;
case 1:
run_buf[0] = p[7];
}
}
static inline s64 run_unpack_s64(const u8 *run_buf, u8 size, s64 v)
{
u8 *p = (u8 *)&v;
switch (size) {
case 8:
p[0] = run_buf[7];
fallthrough;
case 7:
p[1] = run_buf[6];
fallthrough;
case 6:
p[2] = run_buf[5];
fallthrough;
case 5:
p[3] = run_buf[4];
fallthrough;
case 4:
p[4] = run_buf[3];
fallthrough;
case 3:
p[5] = run_buf[2];
fallthrough;
case 2:
p[6] = run_buf[1];
fallthrough;
case 1:
p[7] = run_buf[0];
}
return v;
}
#else
static inline int run_packed_size(const s64 n)
{ … }
static inline void run_pack_s64(u8 *run_buf, u8 size, s64 v)
{ … }
static inline s64 run_unpack_s64(const u8 *run_buf, u8 size, s64 v)
{ … }
#endif
int run_pack(const struct runs_tree *run, CLST svcn, CLST len, u8 *run_buf,
u32 run_buf_size, CLST *packed_vcns)
{ … }
int run_unpack(struct runs_tree *run, struct ntfs_sb_info *sbi, CLST ino,
CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf,
int run_buf_size)
{ … }
#ifdef NTFS3_CHECK_FREE_CLST
int run_unpack_ex(struct runs_tree *run, struct ntfs_sb_info *sbi, CLST ino,
CLST svcn, CLST evcn, CLST vcn, const u8 *run_buf,
int run_buf_size)
{ … }
#endif
int run_get_highest_vcn(CLST vcn, const u8 *run_buf, u64 *highest_vcn)
{ … }
int run_clone(const struct runs_tree *run, struct runs_tree *new_run)
{ … }