16 namespace tdc {
namespace io {
17 inline size_t pagesize() {
18 return sysconf(_SC_PAGESIZE);
27 static constexpr
const void* EMPTY =
"";
35 inline static size_t adj_size(
size_t v) {
36 return std::max(
size_t(1), v);
39 inline static void check_mmap_error(
void* ptr,
string_ref descr) {
40 if (ptr == MAP_FAILED) {
43 CHECK(ptr != MAP_FAILED) <<
"Error at " << descr;
51 uint8_t* m_ptr = (uint8_t*) EMPTY;
54 State m_state = State::Unmapped;
55 Mode m_mode = Mode::Read;
58 inline static bool is_offset_valid(
size_t offset) {
59 return (offset % pagesize()) == 0;
62 inline static size_t next_valid_offset(
size_t offset) {
64 auto diff = offset % ps;
65 auto ok = offset - diff;
66 DCHECK(is_offset_valid(ok));
79 inline MMap(
const std::string& path,
87 DCHECK(is_offset_valid(offset))
88 <<
"Offset must be page aligned, use MMap::next_valid_offset() to ensure this.";
90 size_t file_size = read_file_size(path);
91 bool needs_to_overallocate =
92 (offset + m_size) > file_size;
95 auto fd = open(path.c_str(), O_RDONLY);
96 CHECK(fd != -1) <<
"Error at opening file";
101 bool try_next =
false;
103 if (!needs_to_overallocate) {
108 if (m_mode == Mode::ReadWrite) {
109 mmap_prot = PROT_READ | PROT_WRITE;
110 mmap_flags = MAP_PRIVATE;
111 state = State::Private;
113 mmap_prot = PROT_READ;
114 mmap_flags = MAP_SHARED;
115 state = State::Shared;
118 void* ptr = mmap(NULL,
125 if (
false && ptr != MAP_FAILED) {
126 m_ptr = (uint8_t*) ptr;
128 IF_STATS(
if (m_state == State::Private) {
129 malloc_callback::on_alloc(adj_size(m_size));
138 if (try_next || needs_to_overallocate) {
145 *
this = MMap(m_size);
149 auto ret = lseek(fd, offset, SEEK_SET);
151 perror(
"Seeking fd");
159 auto size = file_size - offset;
162 auto ret = read(fd, ptr, size);
164 perror(
"Reading fd into mapped memory");
178 inline MMap(
size_t size)
180 m_mode = Mode::ReadWrite;
183 int mmap_prot = PROT_READ | PROT_WRITE;
184 int mmap_flags = MAP_PRIVATE | MAP_ANONYMOUS;
186 void* ptr = mmap(NULL,
192 check_mmap_error(ptr,
"creating anon. memory map");
194 m_ptr = (uint8_t*) ptr;
196 m_state = State::Private;
197 IF_STATS(
if (m_state == State::Private) {
198 malloc_callback::on_alloc(adj_size(m_size));
205 inline void remap(
size_t new_size) {
206 DCHECK(m_mode == Mode::ReadWrite);
207 DCHECK(m_state == State::Private);
212 auto p = mremap(m_ptr, adj_size(m_size), adj_size(new_size), MREMAP_MAYMOVE);
213 check_mmap_error(p,
"remapping memory");
214 IF_STATS(
if (m_state == State::Private) {
215 malloc_callback::on_free(adj_size(m_size));
216 malloc_callback::on_alloc(adj_size(new_size));
221 m_ptr = (uint8_t*) p;
227 auto new_map = MMap(new_size);
228 size_t common_size = std::min(new_size, m_size);
229 std::memcpy(new_map.view().data(), view().data(), common_size);
230 *
this = std::move(new_map);
236 return View(m_ptr, m_size);
239 GenericView<uint8_t> view() {
240 const auto err =
"Attempting to get a mutable view into a read-only mapping. Call the const overload of view() instead"_v;
242 DCHECK(m_state == State::Private) << err;
243 DCHECK(m_mode == Mode::ReadWrite) << err;
244 return GenericView<uint8_t>(m_ptr, m_size);
247 inline MMap(
const MMap& other) =
delete;
248 inline MMap&
operator=(
const MMap& other) =
delete;
250 inline void move_from(MMap&& other) {
252 m_size = other.m_size;
254 m_state = other.m_state;
255 m_mode = other.m_mode;
257 other.m_state = State::Unmapped;
258 other.m_ptr = (uint8_t*) EMPTY;
262 inline MMap(MMap&& other) {
263 move_from(std::move(other));
267 move_from(std::move(other));
272 if (m_state != State::Unmapped) {
273 DCHECK(m_ptr != EMPTY);
275 int rc = munmap(m_ptr, adj_size(m_size));
276 CHECK(rc == 0) <<
"Error at unmapping";
277 IF_STATS(
if (m_state == State::Private) {
278 malloc_callback::on_free(adj_size(m_size));
Contains the text compression and encoding framework.
uint_impl_t & operator=(const uint_impl_t &b)
#define IF_STATS(x)
x is compiled only when the STATS_DISABLED macro is undefined.