map_ptr_x64.h
Go to the documentation of this file.
1 //
2 // Bareflank Hypervisor
3 //
4 // Copyright (C) 2015 Assured Information Security, Inc.
5 // Author: Rian Quinn <quinnr@ainfosec.com>
6 // Author: Brendan Kerrigan <kerriganb@ainfosec.com>
7 //
8 // This library is free software; you can redistribute it and/or
9 // modify it under the terms of the GNU Lesser General Public
10 // License as published by the Free Software Foundation; either
11 // version 2.1 of the License, or (at your option) any later version.
12 //
13 // This library is distributed in the hope that it will be useful,
14 // but WITHOUT ANY WARRANTY; without even the implied warranty of
15 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
16 // Lesser General Public License for more details.
17 //
18 // You should have received a copy of the GNU Lesser General Public
19 // License along with this library; if not, write to the Free Software
20 // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
21 
22 #ifndef MAP_PTR_H
23 #define MAP_PTR_H
24 
25 #include <gsl/gsl>
26 
27 #include <vector>
28 #include <utility>
29 #include <cstdint>
30 #include <type_traits>
31 
32 #include <memory.h>
33 #include <upper_lower.h>
34 #include <guard_exceptions.h>
35 
36 #include <memory_manager/pat_x64.h>
40 
41 #include <intrinsics/x64.h>
42 #include <intrinsics/tlb_x64.h>
43 #include <intrinsics/msrs_x64.h>
44 #include <intrinsics/cache_x64.h>
45 
46 // -----------------------------------------------------------------------------
47 // Definition
48 // -----------------------------------------------------------------------------
49 
50 namespace bfn
51 {
52 
53 template <class T>
55 
74 template<class T>
77 {
78  auto &&vmap = g_mm->alloc_map(x64::page_size);
79 
80  try
81  {
82  return unique_map_ptr_x64<T>(reinterpret_cast<typename unique_map_ptr_x64<T>::integer_pointer>(vmap),
83  reinterpret_cast<typename unique_map_ptr_x64<T>::integer_pointer>(phys),
84  attr);
85  }
86  catch (...)
87  {
88  g_mm->free_map(vmap);
89  throw;
90  }
91 }
92 
111 template<class T>
114 {
115  auto &&vmap = g_mm->alloc_map(x64::page_size);
116 
117  try
118  {
119  return unique_map_ptr_x64<T>(reinterpret_cast<typename unique_map_ptr_x64<T>::integer_pointer>(vmap),
120  phys, attr);
121  }
122  catch (...)
123  {
124  g_mm->free_map(vmap);
125  throw;
126  }
127 }
128 
167 template<class T>
168 auto make_unique_map_x64(const std::vector<std::pair<typename unique_map_ptr_x64<T>::integer_pointer, typename unique_map_ptr_x64<T>::size_type>> &list,
170 {
172 
173  for (const auto &p : list)
174  size += p.second;
175 
176  auto &&vmap = g_mm->alloc_map(size);
177 
178  try
179  {
180  return unique_map_ptr_x64<T>(reinterpret_cast<typename unique_map_ptr_x64<T>::integer_pointer>(vmap),
181  list, attr);
182  }
183  catch (...)
184  {
185  g_mm->free_map(vmap);
186  throw;
187  }
188 }
189 
221 template<class T>
226 {
227  auto &&vmap = g_mm->alloc_map(size + lower(virt));
228 
229 #ifdef MAP_PTR_TESTING
230 
231  (void) cr3;
232  (void) pat;
233 
234  expects(virt != 0xDEADBEEF);
235  return unique_map_ptr_x64<T> {reinterpret_cast<typename unique_map_ptr_x64<T>::integer_pointer>(vmap), size};
236 
237 #else
238 
239  try
240  {
241  return unique_map_ptr_x64<T>(reinterpret_cast<typename unique_map_ptr_x64<T>::integer_pointer>(vmap),
242  virt, cr3, size, pat);
243  }
244  catch (...)
245  {
246  g_mm->free_map(vmap);
247  throw;
248  }
249 
250 #endif
251 }
252 
275 uintptr_t virt_to_phys_with_cr3(uintptr_t virt, uintptr_t cr3);
276 
312 void map_with_cr3(uintptr_t vmap, uintptr_t virt, uintptr_t cr3, size_t size, x64::msrs::value_type pat);
313 
338 template <class T>
339 class unique_map_ptr_x64
340 {
341 public:
342 
343  using pointer = T*;
344  using integer_pointer = uintptr_t;
345  using size_type = size_t;
346  using element_type = T;
347 
354  m_virt(0),
355  m_size(0),
356  m_unaligned_size(0)
357  { }
358 
364  unique_map_ptr_x64(std::nullptr_t donotcare) :
365  m_virt(0),
366  m_size(0),
367  m_unaligned_size(0)
368  { (void) donotcare; }
369 
378  m_virt(virt),
379  m_size(size),
380  m_unaligned_size(size)
381  { }
382 
405  m_virt(vmap),
406  m_size(x64::page_size),
407  m_unaligned_size(x64::page_size)
408  {
409  // [[ensures: get() != nullptr]]
410  expects(vmap != 0);
411  expects(lower(vmap) == 0);
412  expects(phys != 0);
413  expects(lower(phys) == 0);
414 
415  g_pt->map_4k(vmap, upper(phys), attr);
416 
417  flush();
418  }
419 
469  unique_map_ptr_x64(integer_pointer vmap, const std::vector<std::pair<integer_pointer, size_type>> &list, x64::memory_attr::attr_type attr) :
470  m_virt(0),
471  m_size(0),
472  m_unaligned_size(0)
473  {
474  // [[ensures: get() != nullptr]]
475  expects(vmap != 0);
476  expects(lower(vmap) == 0);
477  expects(!list.empty());
478 
479  for (const auto &p : list)
480  {
481  expects(p.first != 0);
482  expects(p.second != 0);
483  expects(lower(p.second) == 0);
484 
485  m_size += p.second;
486  m_unaligned_size += p.second;
487  }
488 
489  m_virt |= lower(list.front().first);
490  m_virt |= upper(vmap);
491 
492  auto &&voff = 0UL;
493  auto &&poff = 0UL;
494 
495  for (const auto &p : list)
496  {
497  auto &&phys = upper(p.first);
498  auto &&size = p.second;
499 
500  for (poff = 0; poff < size; poff += x64::page_size, voff += x64::page_size)
501  g_pt->map_4k(vmap + voff, phys + poff, attr);
502  }
503 
504  flush();
505  }
506 
543  m_virt(0),
544  m_size(size),
545  m_unaligned_size(size)
546  {
547  // [[ensures: get() != nullptr]]
548 
549  m_virt |= lower(virt);
550  m_virt |= upper(vmap);
551 
552  m_unaligned_size += lower(virt);
553 
554  map_with_cr3(vmap, virt, cr3, m_unaligned_size, pat);
555 
556  flush();
557  }
558 
580  m_virt(0),
581  m_size(0),
582  m_unaligned_size(0)
583  { reset(other.release()); }
584 
595  {
596  guard_exceptions([&]
597  { cleanup(m_virt, m_unaligned_size); });
598 
599  m_virt = 0;
600  m_size = 0;
601  m_unaligned_size = 0;
602  }
603 
626  {
627  reset(other.release());
628  return *this;
629  }
630 
649  unique_map_ptr_x64 &operator=(std::nullptr_t dontcare) noexcept
650  {
651  (void) dontcare;
652 
653  reset();
654  return *this;
655  }
656 
668  { return *reinterpret_cast<pointer>(m_virt); }
669 
680  auto operator->() const noexcept
681  { return reinterpret_cast<pointer>(m_virt); }
682 
693  virtual pointer get() const noexcept
694  { return reinterpret_cast<pointer>(m_virt); }
695 
703  operator bool() const noexcept
704  { return m_virt != 0 && m_size != 0 && m_unaligned_size != 0; }
705 
714  virtual size_type size() const noexcept
715  { return m_size; }
716 
734  {
735  auto old_virt = m_virt;
736  auto old_size = m_size;
737  auto old_unaligned_size = m_unaligned_size;
738 
739  m_virt = 0;
740  m_size = 0;
741  m_unaligned_size = 0;
742 
743  return std::make_tuple(reinterpret_cast<pointer>(old_virt), old_size, old_unaligned_size);
744  }
745 
768  void reset(pointer ptr = pointer(), size_type size = size_type(), size_type unaligned_size = size_type()) noexcept
769  {
770  auto old_virt = m_virt;
771  auto old_unaligned_size = m_unaligned_size;
772 
773  m_virt = reinterpret_cast<integer_pointer>(ptr);
774  m_size = size;
775  m_unaligned_size = unaligned_size;
776 
777  cleanup(old_virt, old_unaligned_size);
778  }
779 
797  void reset(const std::tuple<pointer, size_type, size_type> &p) noexcept
798  { reset(std::get<0>(p), std::get<1>(p), std::get<2>(p)); }
799 
810  {
811  std::swap(m_virt, other.m_virt);
812  std::swap(m_size, other.m_size);
813  std::swap(m_unaligned_size, other.m_unaligned_size);
814  }
815 
827  {
828  auto &&vmap = upper(m_virt);
829  for (auto vadr = vmap; vadr < vmap + m_unaligned_size; vadr += x64::page_size)
830  x64::tlb::invlpg(reinterpret_cast<pointer>(vadr));
831  }
832 
843  {
844  auto &&vmap = upper(m_virt);
845  for (auto vadr = vmap; vadr < vmap + m_unaligned_size; vadr += x64::cache_line_size)
846  x64::cache::clflush(reinterpret_cast<pointer>(vadr));
847  }
848 
849 private:
850 
852  {
853  if (virt != 0 && size != 0)
854  {
855  auto &&vmap = upper(virt);
856  for (auto vadr = vmap; vadr < vmap + size; vadr += x64::page_size)
857  g_pt->unmap(vadr);
858 
859  g_mm->free_map(reinterpret_cast<pointer>(vmap));
860  }
861  }
862 
863 private:
864 
865  integer_pointer m_virt;
866  size_type m_size;
867  size_type m_unaligned_size;
868 
869 public:
870 
871  unique_map_ptr_x64(const unique_map_ptr_x64 &) = delete;
873 };
874 
875 template <class T>
877 { x.swap(y); }
878 
879 template <class T1, class T2>
881 { return x.get() == y.get(); }
882 
883 template <class T1, class T2>
885 { return x.get() != y.get(); }
886 
887 template <class T1, class T2>
888 bool operator<(const unique_map_ptr_x64<T1> &x, const unique_map_ptr_x64<T2> &y)
889 { return x.get() < y.get(); }
890 
891 template <class T1, class T2>
892 bool operator<=(const unique_map_ptr_x64<T1> &x, const unique_map_ptr_x64<T2> &y)
893 { return x.get() <= y.get(); }
894 
895 template <class T1, class T2>
897 { return x.get() > y.get(); }
898 
899 template <class T1, class T2>
901 { return x.get() >= y.get(); }
902 
903 template <class T>
904 bool operator==(const unique_map_ptr_x64<T> &x, std::nullptr_t dontcare) noexcept
905 { (void) dontcare; return !x; }
906 
907 template <class T>
908 bool operator==(std::nullptr_t dontcare, const unique_map_ptr_x64<T> &y) noexcept
909 { (void) dontcare; return !y; }
910 
911 template <class T>
912 bool operator!=(const unique_map_ptr_x64<T> &x, std::nullptr_t dontcare) noexcept
913 { (void) dontcare; return x; }
914 
915 template <class T>
916 bool operator!=(std::nullptr_t dontcare, const unique_map_ptr_x64<T> &y) noexcept
917 { (void) dontcare; return y; }
918 
919 inline uintptr_t virt_to_phys_with_cr3(uintptr_t virt, uintptr_t cr3)
920 {
921  uintptr_t from;
922 
923  expects(cr3 != 0);
924  expects(lower(cr3) == 0);
925  expects(virt != 0);
926 
928  auto &&pml4_idx = x64::page_table::index(virt, from);
929  auto &&pml4_map = bfn::make_unique_map_x64<uintptr_t>(cr3);
930  auto &&pml4_pte = page_table_entry_x64{&pml4_map.get()[pml4_idx]};
931 
932  expects(pml4_pte.present());
933  expects(pml4_pte.phys_addr() != 0);
934 
936  auto &&pdpt_idx = x64::page_table::index(virt, from);
937  auto &&pdpt_map = bfn::make_unique_map_x64<uintptr_t>(pml4_pte.phys_addr());
938  auto &&pdpt_pte = page_table_entry_x64{&pdpt_map.get()[pdpt_idx]};
939 
940  expects(pdpt_pte.present());
941  expects(pdpt_pte.phys_addr() != 0);
942 
943  if (pdpt_pte.ps())
944  return upper(pdpt_pte.phys_addr(), from) | lower(virt, from);
945 
947  auto &&pd_idx = x64::page_table::index(virt, from);
948  auto &&pd_map = bfn::make_unique_map_x64<uintptr_t>(pdpt_pte.phys_addr());
949  auto &&pd_pte = page_table_entry_x64{&pd_map.get()[pd_idx]};
950 
951  expects(pd_pte.present());
952  expects(pd_pte.phys_addr() != 0);
953 
954  if (pd_pte.ps())
955  return upper(pd_pte.phys_addr(), from) | lower(virt, from);
956 
958  auto &&pt_idx = x64::page_table::index(virt, from);
959  auto &&pt_map = bfn::make_unique_map_x64<uintptr_t>(pd_pte.phys_addr());
960  auto &&pt_pte = page_table_entry_x64{&pt_map.get()[pt_idx]};
961 
962  expects(pt_pte.present());
963  expects(pt_pte.phys_addr() != 0);
964 
965  return upper(pt_pte.phys_addr(), from) | lower(virt, from);
966 }
967 
968 }
969 
970 #endif
void reset(const std::tuple< pointer, size_type, size_type > &p) noexcept
Definition: map_ptr_x64.h:797
void uintptr_t uintptr_t cr3
Definition: map_ptr_x64.cpp:33
auto make_unique_map_x64(typename unique_map_ptr_x64< T >::pointer phys, x64::memory_attr::attr_type attr=x64::memory_attr::rw_wb)
Definition: map_ptr_x64.h:75
#define g_pt
bool operator==(const unique_map_ptr_x64< T1 > &x, const unique_map_ptr_x64< T2 > &y)
Definition: map_ptr_x64.h:880
virtual size_type size() const noexcept
Definition: map_ptr_x64.h:714
constexpr const auto cache_line_size
Definition: x64.h:37
virtual ~unique_map_ptr_x64() noexcept
Definition: map_ptr_x64.h:594
unique_map_ptr_x64(integer_pointer virt, size_type size)
Definition: map_ptr_x64.h:377
constexpr const auto from
void invlpg(T val) noexcept
Definition: tlb_x64.h:34
auto upper(T ptr) noexcept
Definition: upper_lower.h:55
void clflush(integer_pointer addr) noexcept
Definition: cache_x64.h:44
bool operator!=(const unique_map_ptr_x64< T1 > &x, const unique_map_ptr_x64< T2 > &y)
Definition: map_ptr_x64.h:884
std::add_lvalue_reference< T >::type operator*() const
Definition: map_ptr_x64.h:667
void uintptr_t uintptr_t size_t x64::msrs::value_type pat
Definition: map_ptr_x64.cpp:37
unique_map_ptr_x64(integer_pointer vmap, const std::vector< std::pair< integer_pointer, size_type >> &list, x64::memory_attr::attr_type attr)
Definition: map_ptr_x64.h:469
void uint64_t uint64_t uint64_t *rdx noexcept
constexpr const auto from
Definition: cpuid_x64.h:86
constexpr const auto from
auto index(const T virt, const F from)
void uintptr_t uintptr_t size_t size
Definition: map_ptr_x64.cpp:33
unique_map_ptr_x64(std::nullptr_t donotcare)
Definition: map_ptr_x64.h:364
uint64_t value_type
Definition: msrs_x64.h:40
constexpr const auto rw_wb
Definition: mem_attr_x64.h:47
uint64_t attr_type
Definition: mem_attr_x64.h:35
void uintptr_t virt
Definition: map_ptr_x64.cpp:33
void swap(unique_map_ptr_x64 &other) noexcept
Definition: map_ptr_x64.h:809
unique_map_ptr_x64 & operator=(std::nullptr_t dontcare) noexcept
Definition: map_ptr_x64.h:649
unique_map_ptr_x64 & operator=(unique_map_ptr_x64 &&other) noexcept
Definition: map_ptr_x64.h:625
#define g_mm
unique_map_ptr_x64(integer_pointer vmap, integer_pointer phys, x64::memory_attr::attr_type attr)
Definition: map_ptr_x64.h:404
auto release() noexcept
Definition: map_ptr_x64.h:733
void flush() noexcept
Definition: map_ptr_x64.h:826
constexpr const auto page_size
Definition: x64.h:35
unique_map_ptr_x64(integer_pointer vmap, integer_pointer virt, integer_pointer cr3, size_type size, x64::msrs::value_type pat)
Definition: map_ptr_x64.h:542
bool operator>(const unique_map_ptr_x64< T1 > &x, const unique_map_ptr_x64< T2 > &y)
Definition: map_ptr_x64.h:896
uintptr_t virt_to_phys_with_cr3(uintptr_t virt, uintptr_t cr3)
Definition: map_ptr_x64.h:919
virtual pointer get() const noexcept
Definition: map_ptr_x64.h:693
void reset(pointer ptr=pointer(), size_type size=size_type(), size_type unaligned_size=size_type()) noexcept
Definition: map_ptr_x64.h:768
E guard_exceptions(E error_code, T func)
auto operator->() const noexcept
Definition: map_ptr_x64.h:680
bool operator>=(const unique_map_ptr_x64< T1 > &x, const unique_map_ptr_x64< T2 > &y)
Definition: map_ptr_x64.h:900
void cache_flush() noexcept
Definition: map_ptr_x64.h:842
auto lower(T ptr) noexcept
Definition: upper_lower.h:36
constexpr const auto from
void swap(unique_map_ptr_x64< T > &x, unique_map_ptr_x64< T > &y) noexcept
Definition: map_ptr_x64.h:876
Definition: cache_x64.h:31
constexpr const auto from
unique_map_ptr_x64(unique_map_ptr_x64 &&other) noexcept
Definition: map_ptr_x64.h:579
void map_with_cr3(uintptr_t vmap, uintptr_t virt, uintptr_t cr3, size_t size, x64::msrs::value_type pat)
expects(lower(vmap)==0)