@@ -55,6 +55,17 @@ _page_aligned_alloc :: proc(size, alignment, granularity: int,
55
55
return nil , .Invalid_Argument
56
56
}
57
57
58
+ alignment := alignment
59
+ alignment = max (alignment, GRANULARITY_MIN)
60
+ if granularity == GRANULARITY_MIN && alignment == GRANULARITY_MIN {
61
+ size_full := mem.align_forward_int (size, GRANULARITY_MIN)
62
+ memory, err = virtual.reserve_and_commit (uint (size_full))
63
+ if err != nil {
64
+ memory = memory[:size]
65
+ }
66
+ return
67
+ }
68
+
58
69
if .Allow_Large_Pages in flags && granularity >= mem.Gigabyte && size >= mem.Gigabyte {
59
70
raw_map_flags := i32 (MAP_HUGE_1GB)
60
71
map_flags := transmute (linux.Map_Flags)(raw_map_flags)
@@ -67,18 +78,7 @@ _page_aligned_alloc :: proc(size, alignment, granularity: int,
67
78
}
68
79
}
69
80
70
- alignment := alignment
71
- alignment = max (alignment, GRANULARITY_MIN)
72
- if granularity == GRANULARITY_MIN && alignment == GRANULARITY_MIN {
73
- size_full := mem.align_forward_int (size, GRANULARITY_MIN)
74
- memory, err = virtual.reserve_and_commit (uint (size_full))
75
- if err != nil {
76
- memory = memory[:size]
77
- }
78
- return
79
- }
80
-
81
- huge_if: if .Allow_Large_Pages in flags && granularity > 2 * mem.Megabyte && size > 2 * mem.Megabyte {
81
+ if .Allow_Large_Pages in flags && granularity > 2 * mem.Megabyte && size > 2 * mem.Megabyte {
82
82
raw_map_flags := i32 (MAP_HUGE_2MB)
83
83
map_flags := transmute (linux.Map_Flags)(raw_map_flags)
84
84
map_flags += {.ANONYMOUS, .PRIVATE, .HUGETLB}
0 commit comments