mirror of
https://github.com/vlang/v.git
synced 2025-08-03 09:47:15 -04:00
builtin,cgen,markused: add struct @[aligned] support for structs allocated on the heap too (#24886)
This commit is contained in:
parent
f06def7d6d
commit
fa904c495e
@ -453,7 +453,15 @@ pub fn malloc(n isize) &u8 {
|
||||
// so theoretically it is safe
|
||||
res = unsafe { __malloc(usize(n)) }
|
||||
} $else {
|
||||
res = unsafe { C.malloc(n) }
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_malloc to allocate memory.
|
||||
// This ensures that we can later free the memory with _aligned_free
|
||||
// without needing to track whether the memory was originally allocated
|
||||
// by malloc or _aligned_malloc.
|
||||
res = unsafe { C._aligned_malloc(n, 1) }
|
||||
} $else {
|
||||
res = unsafe { C.malloc(n) }
|
||||
}
|
||||
}
|
||||
if res == 0 {
|
||||
_memory_panic(@FN, n)
|
||||
@ -492,7 +500,15 @@ pub fn malloc_noscan(n isize) &u8 {
|
||||
} $else $if freestanding {
|
||||
res = unsafe { __malloc(usize(n)) }
|
||||
} $else {
|
||||
res = unsafe { C.malloc(n) }
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_malloc to allocate memory.
|
||||
// This ensures that we can later free the memory with _aligned_free
|
||||
// without needing to track whether the memory was originally allocated
|
||||
// by malloc or _aligned_malloc.
|
||||
res = unsafe { C._aligned_malloc(n, 1) }
|
||||
} $else {
|
||||
res = unsafe { C.malloc(n) }
|
||||
}
|
||||
}
|
||||
if res == 0 {
|
||||
_memory_panic(@FN, n)
|
||||
@ -538,7 +554,15 @@ pub fn malloc_uncollectable(n isize) &u8 {
|
||||
} $else $if freestanding {
|
||||
res = unsafe { __malloc(usize(n)) }
|
||||
} $else {
|
||||
res = unsafe { C.malloc(n) }
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_malloc to allocate memory.
|
||||
// This ensures that we can later free the memory with _aligned_free
|
||||
// without needing to track whether the memory was originally allocated
|
||||
// by malloc or _aligned_malloc.
|
||||
res = unsafe { C._aligned_malloc(n, 1) }
|
||||
} $else {
|
||||
res = unsafe { C.malloc(n) }
|
||||
}
|
||||
}
|
||||
if res == 0 {
|
||||
_memory_panic(@FN, n)
|
||||
@ -570,7 +594,15 @@ pub fn v_realloc(b &u8, n isize) &u8 {
|
||||
} $else $if gcboehm ? {
|
||||
new_ptr = unsafe { C.GC_REALLOC(b, n) }
|
||||
} $else {
|
||||
new_ptr = unsafe { C.realloc(b, n) }
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_realloc to reallocate memory.
|
||||
// This ensures that we can later free the memory with _aligned_free
|
||||
// without needing to track whether the memory was originally allocated
|
||||
// by malloc or _aligned_malloc/_aligned_realloc.
|
||||
new_ptr = unsafe { C._aligned_realloc(b, n, 1) }
|
||||
} $else {
|
||||
new_ptr = unsafe { C.realloc(b, n) }
|
||||
}
|
||||
}
|
||||
if new_ptr == 0 {
|
||||
_memory_panic(@FN, n)
|
||||
@ -616,7 +648,15 @@ pub fn realloc_data(old_data &u8, old_size int, new_size int) &u8 {
|
||||
$if gcboehm ? {
|
||||
nptr = unsafe { C.GC_REALLOC(old_data, new_size) }
|
||||
} $else {
|
||||
nptr = unsafe { C.realloc(old_data, new_size) }
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_realloc to reallocate memory.
|
||||
// This ensures that we can later free the memory with _aligned_free
|
||||
// without needing to track whether the memory was originally allocated
|
||||
// by malloc or _aligned_malloc/_aligned_realloc.
|
||||
nptr = unsafe { C._aligned_realloc(old_data, new_size, 1) }
|
||||
} $else {
|
||||
nptr = unsafe { C.realloc(old_data, new_size) }
|
||||
}
|
||||
}
|
||||
if nptr == 0 {
|
||||
_memory_panic(@FN, isize(new_size))
|
||||
@ -642,7 +682,19 @@ pub fn vcalloc(n isize) &u8 {
|
||||
} $else $if gcboehm ? {
|
||||
return unsafe { &u8(C.GC_MALLOC(n)) }
|
||||
} $else {
|
||||
return unsafe { C.calloc(1, n) }
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_malloc to allocate memory.
|
||||
// This ensures that we can later free the memory with _aligned_free
|
||||
// without needing to track whether the memory was originally allocated
|
||||
// by malloc or _aligned_malloc/_aligned_realloc/_aligned_recalloc.
|
||||
ptr := unsafe { C._aligned_malloc(n, 1) }
|
||||
if ptr != &u8(unsafe { nil }) {
|
||||
unsafe { C.memset(ptr, 0, n) }
|
||||
}
|
||||
return ptr
|
||||
} $else {
|
||||
return unsafe { C.calloc(1, n) }
|
||||
}
|
||||
}
|
||||
return &u8(unsafe { nil }) // not reached, TODO: remove when V's checker is improved
|
||||
}
|
||||
@ -704,7 +756,12 @@ pub fn free(ptr voidptr) {
|
||||
unsafe { C.GC_FREE(ptr) }
|
||||
}
|
||||
} $else {
|
||||
C.free(ptr)
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_free to free memory.
|
||||
unsafe { C._aligned_free(ptr) }
|
||||
} $else {
|
||||
C.free(ptr)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -757,6 +814,63 @@ pub fn memdup_uncollectable(src voidptr, sz isize) voidptr {
|
||||
}
|
||||
}
|
||||
|
||||
// memdup_align dynamically allocates a memory block of `sz` bytes on the heap,
|
||||
// copies the contents from `src` into the allocated space, and returns a pointer
|
||||
// to the newly allocated memory. The returned pointer is aligned to the specified `align` boundary.
|
||||
// - `align` must be a power of two and at least 1
|
||||
// - `sz` must be non-negative
|
||||
// - The memory regions should not overlap
|
||||
@[unsafe]
|
||||
pub fn memdup_align(src voidptr, sz isize, align isize) voidptr {
|
||||
$if trace_memdup ? {
|
||||
C.fprintf(C.stderr, c'memdup_align size: %10d align: %10d\n', sz, align)
|
||||
}
|
||||
if sz == 0 {
|
||||
return vcalloc(1)
|
||||
}
|
||||
n := sz
|
||||
$if trace_malloc ? {
|
||||
total_m += n
|
||||
C.fprintf(C.stderr, c'_v_memdup_align %6d total %10d\n', n, total_m)
|
||||
// print_backtrace()
|
||||
}
|
||||
if n < 0 {
|
||||
_memory_panic(@FN, n)
|
||||
}
|
||||
mut res := &u8(unsafe { nil })
|
||||
$if prealloc {
|
||||
res = prealloc_malloc_align(n, align)
|
||||
} $else $if gcboehm ? {
|
||||
unsafe {
|
||||
res = C.GC_memalign(align, n)
|
||||
}
|
||||
} $else $if freestanding {
|
||||
// todo: is this safe to call malloc there? We export __malloc as malloc and it uses dlmalloc behind the scenes
|
||||
// so theoretically it is safe
|
||||
panic('memdup_align is not implemented with -freestanding')
|
||||
res = unsafe { __malloc(usize(n)) }
|
||||
} $else {
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_malloc to allocate memory.
|
||||
// This ensures that we can later free the memory with _aligned_free
|
||||
// without needing to track whether the memory was originally allocated
|
||||
// by malloc or _aligned_malloc.
|
||||
res = unsafe { C._aligned_malloc(n, align) }
|
||||
} $else {
|
||||
res = unsafe { C.aligned_alloc(align, n) }
|
||||
}
|
||||
}
|
||||
if res == 0 {
|
||||
_memory_panic(@FN, n)
|
||||
}
|
||||
$if debug_malloc ? {
|
||||
// Fill in the memory with something != 0 i.e. `M`, so it is easier to spot
|
||||
// when the calling code wrongly relies on it being zeroed.
|
||||
unsafe { C.memset(res, 0x4D, n) }
|
||||
}
|
||||
return C.memcpy(res, src, sz)
|
||||
}
|
||||
|
||||
// GCHeapUsage contains stats about the current heap usage of your program.
|
||||
pub struct GCHeapUsage {
|
||||
pub:
|
||||
|
@ -140,6 +140,8 @@ fn C.GC_REALLOC(ptr voidptr, n usize) voidptr
|
||||
|
||||
fn C.GC_FREE(ptr voidptr)
|
||||
|
||||
fn C.GC_memalign(align isize, size isize) voidptr
|
||||
|
||||
// explicitly perform garbage collection now! Garbage collections
|
||||
// are done automatically when needed, so this function is hardly needed
|
||||
fn C.GC_gcollect()
|
||||
|
@ -14,6 +14,8 @@ fn C.GC_REALLOC(ptr voidptr, n usize) voidptr
|
||||
|
||||
fn C.GC_FREE(ptr voidptr)
|
||||
|
||||
fn C.GC_memalign(align isize, size isize) voidptr
|
||||
|
||||
fn C.GC_get_heap_usage_safe(pheap_size &usize, pfree_bytes &usize, punmapped_bytes &usize, pbytes_since_gc &usize,
|
||||
ptotal_bytes &usize)
|
||||
|
||||
|
@ -23,6 +23,17 @@ fn C.realloc(a &u8, b int) &u8
|
||||
|
||||
fn C.free(ptr voidptr)
|
||||
|
||||
fn C.aligned_alloc(align isize, size isize) voidptr
|
||||
|
||||
// windows aligned memory functions
|
||||
fn C._aligned_malloc(size isize, align isize) voidptr
|
||||
fn C._aligned_free(voidptr)
|
||||
fn C._aligned_realloc(voidptr, size isize, align isize) voidptr
|
||||
fn C._aligned_offset_malloc(size isize, align isize, offset isize) voidptr
|
||||
fn C._aligned_offset_realloc(voidptr, size isize, align isize, offset isize) voidptr
|
||||
fn C._aligned_msize(voidptr, align isize, offset isize) isize
|
||||
fn C._aligned_recalloc(voidptr, num isize, size isize, align isize) voidptr
|
||||
|
||||
@[noreturn; trusted]
|
||||
fn C.exit(code int)
|
||||
|
||||
|
@ -37,7 +37,7 @@ fn vmemory_abort_on_nil(p voidptr, bytes isize) {
|
||||
}
|
||||
|
||||
@[unsafe]
|
||||
fn vmemory_block_new(prev &VMemoryBlock, at_least isize) &VMemoryBlock {
|
||||
fn vmemory_block_new(prev &VMemoryBlock, at_least isize, align isize) &VMemoryBlock {
|
||||
vmem_block_size := sizeof(VMemoryBlock)
|
||||
mut v := unsafe { &VMemoryBlock(C.calloc(1, vmem_block_size)) }
|
||||
vmemory_abort_on_nil(v, vmem_block_size)
|
||||
@ -49,16 +49,35 @@ fn vmemory_block_new(prev &VMemoryBlock, at_least isize) &VMemoryBlock {
|
||||
if unsafe { prev != 0 } {
|
||||
prev.next = v
|
||||
}
|
||||
block_size := if at_least < isize(prealloc_block_size) {
|
||||
base_block_size := if at_least < isize(prealloc_block_size) {
|
||||
isize(prealloc_block_size)
|
||||
} else {
|
||||
at_least
|
||||
}
|
||||
$if prealloc_trace_malloc ? {
|
||||
C.fprintf(C.stderr, c'vmemory_block_new id: %d, block_size: %lld, at_least: %lld\n',
|
||||
v.id, block_size, at_least)
|
||||
block_size := if align > 0 {
|
||||
if base_block_size % align == 0 {
|
||||
base_block_size
|
||||
} else {
|
||||
base_block_size + align - (base_block_size % align)
|
||||
}
|
||||
} else {
|
||||
base_block_size
|
||||
}
|
||||
$if prealloc_trace_malloc ? {
|
||||
C.fprintf(C.stderr, c'vmemory_block_new id: %d, block_size: %lld, at_least: %lld, align: %lld\n',
|
||||
v.id, block_size, at_least, align)
|
||||
}
|
||||
|
||||
fixed_align := if align <= 1 { 1 } else { align }
|
||||
$if windows {
|
||||
v.start = unsafe { C._aligned_malloc(block_size, fixed_align) }
|
||||
} $else {
|
||||
if fixed_align == 1 {
|
||||
v.start = unsafe { C.malloc(block_size) }
|
||||
} else {
|
||||
v.start = unsafe { C.aligned_alloc(fixed_align, block_size) }
|
||||
}
|
||||
}
|
||||
v.start = unsafe { C.malloc(block_size) }
|
||||
vmemory_abort_on_nil(v.start, block_size)
|
||||
$if prealloc_memset ? {
|
||||
unsafe { C.memset(v.start, int($d('prealloc_memset_value', 0)), block_size) }
|
||||
@ -69,15 +88,15 @@ fn vmemory_block_new(prev &VMemoryBlock, at_least isize) &VMemoryBlock {
|
||||
}
|
||||
|
||||
@[unsafe]
|
||||
fn vmemory_block_malloc(n isize) &u8 {
|
||||
fn vmemory_block_malloc(n isize, align isize) &u8 {
|
||||
$if prealloc_trace_malloc ? {
|
||||
C.fprintf(C.stderr, c'vmemory_block_malloc g_memory_block.id: %d, n: %lld\n',
|
||||
g_memory_block.id, n)
|
||||
C.fprintf(C.stderr, c'vmemory_block_malloc g_memory_block.id: %d, n: %lld align: %d\n',
|
||||
g_memory_block.id, n, align)
|
||||
}
|
||||
unsafe {
|
||||
remaining := i64(g_memory_block.stop) - i64(g_memory_block.current)
|
||||
if _unlikely_(remaining < n) {
|
||||
g_memory_block = vmemory_block_new(g_memory_block, n)
|
||||
g_memory_block = vmemory_block_new(g_memory_block, n, align)
|
||||
}
|
||||
res := &u8(g_memory_block.current)
|
||||
g_memory_block.current += n
|
||||
@ -96,7 +115,7 @@ fn prealloc_vinit() {
|
||||
C.fprintf(C.stderr, c'prealloc_vinit started\n')
|
||||
}
|
||||
unsafe {
|
||||
g_memory_block = vmemory_block_new(nil, isize(prealloc_block_size))
|
||||
g_memory_block = vmemory_block_new(nil, isize(prealloc_block_size), 0)
|
||||
at_exit(prealloc_vcleanup) or {}
|
||||
}
|
||||
}
|
||||
@ -170,20 +189,28 @@ fn prealloc_vcleanup() {
|
||||
}
|
||||
unsafe {
|
||||
for g_memory_block != 0 {
|
||||
C.free(g_memory_block.start)
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_free to free memory.
|
||||
C._aligned_free(g_memory_block.start)
|
||||
} $else {
|
||||
C.free(g_memory_block.start)
|
||||
}
|
||||
tmp := g_memory_block
|
||||
g_memory_block = g_memory_block.previous
|
||||
// free the link node
|
||||
C.free(tmp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@[unsafe]
|
||||
fn prealloc_malloc(n isize) &u8 {
|
||||
return unsafe { vmemory_block_malloc(n) }
|
||||
return unsafe { vmemory_block_malloc(n, 0) }
|
||||
}
|
||||
|
||||
@[unsafe]
|
||||
fn prealloc_realloc(old_data &u8, old_size isize, new_size isize) &u8 {
|
||||
new_ptr := unsafe { vmemory_block_malloc(new_size) }
|
||||
new_ptr := unsafe { vmemory_block_malloc(new_size, 0) }
|
||||
min_size := if old_size < new_size { old_size } else { new_size }
|
||||
unsafe { C.memcpy(new_ptr, old_data, min_size) }
|
||||
return new_ptr
|
||||
@ -191,7 +218,12 @@ fn prealloc_realloc(old_data &u8, old_size isize, new_size isize) &u8 {
|
||||
|
||||
@[unsafe]
|
||||
fn prealloc_calloc(n isize) &u8 {
|
||||
new_ptr := unsafe { vmemory_block_malloc(n) }
|
||||
new_ptr := unsafe { vmemory_block_malloc(n, 0) }
|
||||
unsafe { C.memset(new_ptr, 0, n) }
|
||||
return new_ptr
|
||||
}
|
||||
|
||||
@[unsafe]
|
||||
fn prealloc_malloc_align(n isize, align isize) &u8 {
|
||||
return unsafe { vmemory_block_malloc(n, align) }
|
||||
}
|
||||
|
@ -5,8 +5,17 @@ module builtin
|
||||
@[unsafe]
|
||||
pub fn __malloc(size usize) voidptr {
|
||||
unsafe {
|
||||
return C.malloc(int(size))
|
||||
$if windows {
|
||||
// Warning! On windows, we always use _aligned_malloc to allocate memory.
|
||||
// This ensures that we can later free the memory with _aligned_free
|
||||
// without needing to track whether the memory was originally allocated
|
||||
// by malloc or _aligned_malloc.
|
||||
return C._aligned_malloc(size, 1)
|
||||
} $else {
|
||||
return C.malloc(int(size))
|
||||
}
|
||||
}
|
||||
return unsafe { nil }
|
||||
}
|
||||
|
||||
@[unsafe]
|
||||
|
@ -46,6 +46,7 @@ pub mut:
|
||||
debugger bool // debugger is used
|
||||
comptime_calls map[string]bool // resolved name to call on comptime
|
||||
comptime_for bool // uses $for
|
||||
memory_align bool // @[aligned] for struct
|
||||
}
|
||||
|
||||
@[unsafe]
|
||||
|
@ -67,6 +67,10 @@ fn (mut c Checker) struct_decl(mut node ast.StructDecl) {
|
||||
if node.language != .c && attr.name == 'typedef' {
|
||||
c.error('`typedef` attribute can only be used with C structs', node.pos)
|
||||
}
|
||||
aligned := if attr.arg == '' { 0 } else { attr.arg.int() }
|
||||
if aligned > 1 {
|
||||
c.table.used_features.memory_align = true
|
||||
}
|
||||
}
|
||||
|
||||
// Evaluate the size of the unresolved fixed array
|
||||
|
@ -55,12 +55,12 @@ pub fn (mut e Eval) expr(expr ast.Expr, expecting ast.Type) Object {
|
||||
}
|
||||
'malloc' {
|
||||
return Ptr{
|
||||
val: unsafe { C.malloc(args[0].int_val()) }
|
||||
val: unsafe { malloc(isize(args[0].int_val())) }
|
||||
}
|
||||
}
|
||||
'calloc' {
|
||||
return Ptr{
|
||||
val: unsafe { C.calloc(args[0].int_val(), args[1].int_val()) }
|
||||
val: unsafe { vcalloc(isize(args[0].int_val() * args[1].int_val())) }
|
||||
}
|
||||
}
|
||||
'getcwd' {
|
||||
|
@ -720,6 +720,13 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
|
||||
}
|
||||
final_left_sym := g.table.final_sym(g.unwrap_generic(var_type))
|
||||
final_right_sym := g.table.final_sym(unwrapped_val_type)
|
||||
mut aligned := 0
|
||||
if final_left_sym.info is ast.Struct {
|
||||
if attr := final_left_sym.info.attrs.find_first('aligned') {
|
||||
aligned = if attr.arg == '' { 0 } else { attr.arg.int() }
|
||||
}
|
||||
}
|
||||
|
||||
if final_left_sym.kind == .bool && final_right_sym.kind == .bool
|
||||
&& node.op in [.boolean_or_assign, .boolean_and_assign] {
|
||||
extracted_op := match node.op {
|
||||
@ -947,7 +954,11 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
|
||||
is_option_unwrapped := val is ast.Ident && val.or_expr.kind != .absent
|
||||
is_option_auto_heap := is_auto_heap && is_option_unwrapped
|
||||
if is_auto_heap {
|
||||
g.write('HEAP(${styp}, (')
|
||||
if aligned != 0 {
|
||||
g.write('HEAP_align(${styp}, (')
|
||||
} else {
|
||||
g.write('HEAP(${styp}, (')
|
||||
}
|
||||
}
|
||||
if val.is_auto_deref_var() && !is_option_unwrapped {
|
||||
g.write('*')
|
||||
@ -978,7 +989,11 @@ fn (mut g Gen) assign_stmt(node_ ast.AssignStmt) {
|
||||
g.expr(val)
|
||||
}
|
||||
if is_auto_heap && !is_option_auto_heap {
|
||||
g.write('))')
|
||||
if aligned != 0 {
|
||||
g.write('), ${aligned})')
|
||||
} else {
|
||||
g.write('))')
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -1073,10 +1088,21 @@ fn (mut g Gen) gen_multi_return_assign(node &ast.AssignStmt, return_type ast.Typ
|
||||
g.write('*')
|
||||
}
|
||||
noscan := if is_auto_heap { g.check_noscan(return_type) } else { '' }
|
||||
mut aligned := 0
|
||||
sym := g.table.final_sym(node.left_types[i])
|
||||
if sym.info is ast.Struct {
|
||||
if attr := sym.info.attrs.find_first('aligned') {
|
||||
aligned = if attr.arg == '' { 0 } else { attr.arg.int() }
|
||||
}
|
||||
}
|
||||
if node.left_types[i].has_flag(.option) {
|
||||
base_typ := g.base_type(node.left_types[i])
|
||||
tmp_var := if is_auto_heap {
|
||||
'HEAP${noscan}(${styp}, ${mr_var_name}.arg${i})'
|
||||
if aligned != 0 {
|
||||
'HEAP_align(${styp}, ${mr_var_name}.arg${i}, ${aligned})'
|
||||
} else {
|
||||
'HEAP${noscan}(${styp}, ${mr_var_name}.arg${i})'
|
||||
}
|
||||
} else if is_option {
|
||||
'(*((${g.base_type(return_type)}*)${mr_var_name}.data)).arg${i}'
|
||||
} else {
|
||||
@ -1098,13 +1124,16 @@ fn (mut g Gen) gen_multi_return_assign(node &ast.AssignStmt, return_type ast.Typ
|
||||
}
|
||||
} else {
|
||||
g.expr(lx)
|
||||
sym := g.table.final_sym(node.left_types[i])
|
||||
if sym.kind == .array_fixed {
|
||||
g.writeln2(';', 'memcpy(&${g.expr_string(lx)}, &${mr_var_name}.arg${i}, sizeof(${styp}));')
|
||||
} else {
|
||||
if cur_indexexpr != -1 {
|
||||
if is_auto_heap {
|
||||
g.writeln('HEAP${noscan}(${styp}, ${mr_var_name}.arg${i}) });')
|
||||
if aligned != 0 {
|
||||
g.writeln('HEAP_align(${styp}, ${mr_var_name}.arg${i}, ${aligned}) });')
|
||||
} else {
|
||||
g.writeln('HEAP${noscan}(${styp}, ${mr_var_name}.arg${i}) });')
|
||||
}
|
||||
} else if is_option {
|
||||
g.writeln('(*((${g.base_type(return_type)}*)${mr_var_name}.data)).arg${i} });')
|
||||
} else {
|
||||
@ -1113,7 +1142,11 @@ fn (mut g Gen) gen_multi_return_assign(node &ast.AssignStmt, return_type ast.Typ
|
||||
g.cur_indexexpr.delete(cur_indexexpr)
|
||||
} else {
|
||||
if is_auto_heap {
|
||||
g.writeln(' = HEAP${noscan}(${styp}, ${mr_var_name}.arg${i});')
|
||||
if aligned != 0 {
|
||||
g.writeln(' = HEAP_align(${styp}, ${mr_var_name}.arg${i}, ${aligned});')
|
||||
} else {
|
||||
g.writeln(' = HEAP${noscan}(${styp}, ${mr_var_name}.arg${i});')
|
||||
}
|
||||
} else if is_option {
|
||||
g.writeln(' = (*((${g.base_type(return_type)}*)${mr_var_name}.data)).arg${i};')
|
||||
} else {
|
||||
|
@ -581,6 +581,7 @@ const c_helper_macros = '//============================== HELPER C MACROS ======
|
||||
// copy something to the heap
|
||||
#define HEAP(type, expr) ((type*)memdup((void*)&((type[]){expr}[0]), sizeof(type)))
|
||||
#define HEAP_noscan(type, expr) ((type*)memdup_noscan((void*)&((type[]){expr}[0]), sizeof(type)))
|
||||
#define HEAP_align(type, expr, align) ((type*)memdup_align((void*)&((type[]){expr}[0]), sizeof(type), align))
|
||||
|
||||
#define _PUSH_MANY(arr, val, tmp, tmp_typ) {tmp_typ tmp = (val); array_push_many(arr, tmp.data, tmp.len);}
|
||||
#define _PUSH_MANY_noscan(arr, val, tmp, tmp_typ) {tmp_typ tmp = (val); array_push_many_noscan(arr, tmp.data, tmp.len);}
|
||||
|
@ -58,9 +58,13 @@ fn (mut g Gen) struct_init(node ast.StructInit) {
|
||||
if is_amp {
|
||||
g.go_back(1) // delete the `&` already generated in `prefix_expr()
|
||||
}
|
||||
mut aligned := 0
|
||||
mut is_anon := false
|
||||
mut is_array_fixed_struct_init := false // return T{} where T is fixed array
|
||||
if mut sym.info is ast.Struct {
|
||||
if attr := sym.info.attrs.find_first('aligned') {
|
||||
aligned = if attr.arg == '' { 0 } else { attr.arg.int() }
|
||||
}
|
||||
is_anon = sym.info.is_anon
|
||||
}
|
||||
is_generic_default := sym.kind !in [.struct, .array_fixed] && node.typ.has_flag(.generic) // T{}
|
||||
@ -92,9 +96,17 @@ fn (mut g Gen) struct_init(node ast.StructInit) {
|
||||
} else if is_amp || g.inside_cast_in_heap > 0 {
|
||||
if node.typ.has_flag(.option) {
|
||||
basetyp := g.base_type(node.typ)
|
||||
g.write('(${basetyp}*)memdup(&(${basetyp}){')
|
||||
if aligned != 0 {
|
||||
g.write('(${basetyp}*)memdup_align(&(${basetyp}){')
|
||||
} else {
|
||||
g.write('(${basetyp}*)memdup(&(${basetyp}){')
|
||||
}
|
||||
} else {
|
||||
g.write('(${styp}*)memdup(&(${styp}){')
|
||||
if aligned != 0 {
|
||||
g.write('(${styp}*)memdup_align(&(${styp}){')
|
||||
} else {
|
||||
g.write('(${styp}*)memdup(&(${styp}){')
|
||||
}
|
||||
}
|
||||
} else if node.typ.is_ptr() {
|
||||
basetyp := g.styp(node.typ.set_nr_muls(0))
|
||||
@ -389,13 +401,25 @@ fn (mut g Gen) struct_init(node ast.StructInit) {
|
||||
g.write('}')
|
||||
}
|
||||
if g.is_shared && !g.inside_opt_data && !g.is_arraymap_set {
|
||||
g.write('}, sizeof(${shared_styp}))')
|
||||
if aligned != 0 {
|
||||
g.write('}, sizeof(${shared_styp}), ${aligned})')
|
||||
} else {
|
||||
g.write('}, sizeof(${shared_styp}))')
|
||||
}
|
||||
} else if is_amp || g.inside_cast_in_heap > 0 {
|
||||
if node.typ.has_flag(.option) {
|
||||
basetyp := g.base_type(node.typ)
|
||||
g.write(', sizeof(${basetyp}))')
|
||||
if aligned != 0 {
|
||||
g.write(', sizeof(${basetyp}), ${aligned})')
|
||||
} else {
|
||||
g.write(', sizeof(${basetyp}))')
|
||||
}
|
||||
} else {
|
||||
g.write(', sizeof(${styp}))')
|
||||
if aligned != 0 {
|
||||
g.write(', sizeof(${styp}), ${aligned})')
|
||||
} else {
|
||||
g.write(', sizeof(${styp}))')
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -219,6 +219,9 @@ pub fn mark_used(mut table ast.Table, mut pref_ pref.Preferences, ast_files []&a
|
||||
if table.used_features.type_name {
|
||||
core_fns << charptr_idx_str + '.vstring_literal'
|
||||
}
|
||||
if table.used_features.memory_align {
|
||||
core_fns << 'memdup_align'
|
||||
}
|
||||
if pref_.trace_calls || pref_.trace_fns.len > 0 {
|
||||
include_panic_deps = true
|
||||
core_fns << 'vgettid'
|
||||
|
13
vlib/v/tests/structs/struct_aligned_test.v
Normal file
13
vlib/v/tests/structs/struct_aligned_test.v
Normal file
@ -0,0 +1,13 @@
|
||||
@[aligned: 512]
|
||||
struct MyStruct {
|
||||
a int
|
||||
}
|
||||
|
||||
fn test_struct_aligned() {
|
||||
x := u64(voidptr(&MyStruct{}))
|
||||
assert x % 512 == 0
|
||||
|
||||
y := MyStruct{}
|
||||
ptr := u64(voidptr(&y))
|
||||
assert ptr % 512 == 0
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user