v: adjust some checks, based on branch prediction analysis (#22848)

This commit is contained in:
Felipe Pena 2024-11-15 17:55:02 -03:00 committed by GitHub
parent 561efd1463
commit c4fd61cec7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 119 additions and 126 deletions

View File

@ -158,11 +158,10 @@ pub fn (mut s Scope) register_struct_field(name string, field ScopeStructField)
}
pub fn (mut s Scope) register(obj ScopeObject) {
if obj.name == '_' || obj.name in s.objects {
return
}
if !(obj.name == '_' || obj.name in s.objects) {
s.objects[obj.name] = obj
}
}
// returns the innermost scope containing pos
// pub fn (s &Scope) innermost(pos int) ?&Scope {

View File

@ -307,6 +307,7 @@ fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
for left is ast.ParExpr {
left = (left as ast.ParExpr).expr
}
is_assign := node.op in [.assign, .decl_assign]
match mut left {
ast.Ident {
if (is_decl || left.kind == .blank_ident) && left_type.is_ptr()
@ -326,7 +327,7 @@ fn (mut c Checker) assign_stmt(mut node ast.AssignStmt) {
}
left_type = right_type
node.left_types[i] = right_type
if node.op !in [.assign, .decl_assign] {
if !is_assign {
c.error('cannot modify blank `_` identifier', left.pos)
}
} else if left.info !is ast.IdentVar {
@ -601,18 +602,8 @@ or use an explicit `unsafe{ a[..] }`, if you do not want a copy of the slice.',
}
}
}
if left_sym.kind == .array_fixed && !c.inside_unsafe && node.op in [.assign, .decl_assign]
&& right_sym.kind == .array_fixed && left is ast.Ident && !left.is_blank_ident()
&& right is ast.Ident {
if right_sym.info is ast.ArrayFixed {
if right_sym.info.elem_type.is_ptr() {
c.error('assignment from one fixed array to another with a pointer element type is prohibited outside of `unsafe`',
node.pos)
}
}
}
if left_sym.kind == .map && node.op in [.assign, .decl_assign] && right_sym.kind == .map
&& !left.is_blank_ident() && right.is_lvalue() && right !is ast.ComptimeSelector
if left_sym.kind == .map && is_assign && right_sym.kind == .map && !left.is_blank_ident()
&& right.is_lvalue() && right !is ast.ComptimeSelector
&& (!right_type.is_ptr() || (right is ast.Ident && right.is_auto_deref_var())) {
// Do not allow `a = b`
c.error('cannot copy map: call `move` or `clone` method (or use a reference)',
@ -628,6 +619,16 @@ or use an explicit `unsafe{ a[..] }`, if you do not want a copy of the slice.',
c.error('cannot assign `${right}` as a generic function variable', right.pos())
}
}
if left_sym.kind == .array_fixed && !c.inside_unsafe && is_assign
&& right_sym.kind == .array_fixed && left is ast.Ident && !left.is_blank_ident()
&& right is ast.Ident {
if right_sym.info is ast.ArrayFixed {
if right_sym.info.elem_type.is_ptr() {
c.error('assignment from one fixed array to another with a pointer element type is prohibited outside of `unsafe`',
node.pos)
}
}
}
if left_type.is_any_kind_of_pointer() && !left.is_auto_deref_var() {
if !c.inside_unsafe && node.op !in [.assign, .decl_assign] {
// ptr op=

View File

@ -2748,11 +2748,9 @@ fn (mut c Checker) stmts_ending_with_expression(mut stmts []ast.Stmt, expected_o
c.stmt_level++
for i, mut stmt in stmts {
c.is_last_stmt = i == stmts.len - 1
if c.scope_returns {
if unreachable.line_nr == -1 {
if c.scope_returns && unreachable.line_nr == -1 {
unreachable = stmt.pos
}
}
prev_expected_or_type := c.expected_or_type
c.expected_or_type = expected_or_type
c.stmt(mut stmt)

View File

@ -2169,31 +2169,26 @@ fn (mut g Gen) stmt(node ast.Stmt) {
g.set_current_pos_as_last_stmt_pos()
}
match node {
ast.AsmStmt {
g.write_v_source_line_info_stmt(node)
g.asm_stmt(node)
ast.FnDecl {
g.fn_decl(node)
}
ast.AssertStmt {
ast.Block {
g.write_v_source_line_info_stmt(node)
g.assert_stmt(node)
if !node.is_unsafe {
g.writeln('{')
} else {
g.writeln('{ // Unsafe block')
}
g.stmts(node.stmts)
g.writeln('}')
}
ast.AssignStmt {
g.write_v_source_line_info_stmt(node)
g.assign_stmt(node)
}
ast.Block {
ast.AssertStmt {
g.write_v_source_line_info_stmt(node)
if node.is_unsafe {
g.writeln('{ // Unsafe block')
} else {
g.writeln('{')
}
g.stmts(node.stmts)
g.writeln('}')
}
ast.BranchStmt {
g.write_v_source_line_info_stmt(node)
g.branch_stmt(node)
g.assert_stmt(node)
}
ast.ConstDecl {
g.write_v_source_line_info_stmt(node)
@ -2202,18 +2197,9 @@ fn (mut g Gen) stmt(node ast.Stmt) {
ast.ComptimeFor {
g.comptime_for(node)
}
ast.DebuggerStmt {
g.debugger_stmt(node)
}
ast.DeferStmt {
mut defer_stmt := node
defer_stmt.ifdef = g.defer_ifdef
g.writeln('${g.defer_flag_var(defer_stmt)} = true;')
g.defer_stmts << defer_stmt
}
ast.EmptyStmt {}
ast.EnumDecl {
g.enum_decl(node)
ast.BranchStmt {
g.write_v_source_line_info_stmt(node)
g.branch_stmt(node)
}
ast.ExprStmt {
g.write_v_source_line_info_stmt(node)
@ -2246,9 +2232,6 @@ fn (mut g Gen) stmt(node ast.Stmt) {
}
}
}
ast.FnDecl {
g.fn_decl(node)
}
ast.ForCStmt {
prev_branch_parent_pos := g.branch_parent_pos
g.branch_parent_pos = node.pos.pos
@ -2297,19 +2280,21 @@ fn (mut g Gen) stmt(node ast.Stmt) {
g.labeled_loops.delete(node.label)
g.inner_loop = save_inner_loop
}
ast.Return {
g.return_stmt(node)
}
ast.DeferStmt {
mut defer_stmt := node
defer_stmt.ifdef = g.defer_ifdef
g.writeln('${g.defer_flag_var(defer_stmt)} = true;')
g.defer_stmts << defer_stmt
}
ast.EnumDecl {
g.enum_decl(node)
}
ast.GlobalDecl {
g.global_decl(node)
}
ast.GotoLabel {
g.writeln('${c_name(node.name)}: {}')
}
ast.GotoStmt {
g.write_v_source_line_info_stmt(node)
g.writeln('goto ${c_name(node.name)};')
}
ast.HashStmt {
g.hash_stmt(node)
}
ast.Import {}
ast.InterfaceDecl {
// definitions are sorted and added in write_types
@ -2327,20 +2312,6 @@ fn (mut g Gen) stmt(node ast.Stmt) {
}
}
}
ast.Module {
g.is_builtin_mod = util.module_is_builtin(node.name)
g.cur_mod = node
}
ast.NodeError {}
ast.Return {
g.return_stmt(node)
}
ast.SemicolonStmt {
g.writeln(';')
}
ast.SqlStmt {
g.sql_stmt(node)
}
ast.StructDecl {
name := if node.language == .c {
util.no_dots(node.name)
@ -2369,11 +2340,40 @@ fn (mut g Gen) stmt(node ast.Stmt) {
g.typedefs.writeln('typedef struct ${name} ${name};')
}
}
ast.GotoLabel {
g.writeln('${c_name(node.name)}: {}')
}
ast.GotoStmt {
g.write_v_source_line_info_stmt(node)
g.writeln('goto ${c_name(node.name)};')
}
ast.AsmStmt {
g.write_v_source_line_info_stmt(node)
g.asm_stmt(node)
}
ast.HashStmt {
g.hash_stmt(node)
}
ast.TypeDecl {
if !g.pref.skip_unused {
g.writeln('// TypeDecl')
}
}
ast.SemicolonStmt {
g.writeln(';')
}
ast.SqlStmt {
g.sql_stmt(node)
}
ast.Module {
g.is_builtin_mod = util.module_is_builtin(node.name)
g.cur_mod = node
}
ast.EmptyStmt {}
ast.DebuggerStmt {
g.debugger_stmt(node)
}
ast.NodeError {}
}
if !g.skip_stmt_pos { // && g.stmt_path_pos.len > 0 {
g.stmt_path_pos.delete_last()
@ -3555,7 +3555,11 @@ fn (mut g Gen) expr(node_ ast.Expr) {
g.ident(node)
}
ast.IfExpr {
if !node.is_comptime {
g.if_expr(node)
} else {
g.comptime_if(node)
}
}
ast.IfGuardExpr {
g.write('/* guard */')
@ -3564,12 +3568,12 @@ fn (mut g Gen) expr(node_ ast.Expr) {
g.index_expr(node)
}
ast.InfixExpr {
if node.op in [.left_shift, .plus_assign, .minus_assign] {
if node.op !in [.left_shift, .plus_assign, .minus_assign] {
g.infix_expr(node)
} else {
g.inside_map_infix = true
g.infix_expr(node)
g.inside_map_infix = false
} else {
g.infix_expr(node)
}
}
ast.IntegerLiteral {

View File

@ -176,10 +176,6 @@ fn (mut g Gen) needs_conds_order(node ast.IfExpr) bool {
}
fn (mut g Gen) if_expr(node ast.IfExpr) {
if node.is_comptime {
g.comptime_if(node)
return
}
// For simple if expressions we can use C's `?:`
// `if x > 0 { 1 } else { 2 }` => `(x > 0)? (1) : (2)`
// For if expressions with multiple statements or another if expression inside, it's much

View File

@ -804,9 +804,6 @@ fn (mut p Parser) top_stmt() ast.Stmt {
p.attributes()
continue
}
.key_asm {
return p.asm_stmt(true)
}
.key_interface {
return p.interface_decl()
}
@ -867,6 +864,9 @@ fn (mut p Parser) top_stmt() ast.Stmt {
.semicolon {
return p.semicolon_stmt()
}
.key_asm {
return p.asm_stmt(true)
}
else {
return p.other_stmts(ast.empty_stmt)
}
@ -1032,31 +1032,6 @@ fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
}
}
}
.key_assert {
p.next()
mut pos := p.tok.pos()
expr := p.expr(0)
pos.update_last_line(p.prev_tok.line_nr)
mut extra := ast.empty_expr
mut extra_pos := p.tok.pos()
if p.tok.kind == .comma {
p.next()
extra_pos = p.tok.pos()
extra = p.expr(0)
// dump(extra)
extra_pos = extra_pos.extend(p.tok.pos())
}
return ast.AssertStmt{
expr: expr
extra: extra
extra_pos: extra_pos
pos: pos.extend(p.tok.pos())
is_used: p.inside_test_file || !p.pref.is_prod
}
}
.key_for {
return p.for_stmt()
}
.name {
if p.peek_tok.kind == .name && p.tok.lit == 'sql' {
return p.sql_stmt()
@ -1103,14 +1078,17 @@ fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
}
return p.parse_multi_expr(is_top_level)
}
.key_for {
return p.for_stmt()
}
.comment {
return p.comment_stmt()
}
.key_return {
if p.inside_defer {
return p.error_with_pos('`return` not allowed inside `defer` block', p.tok.pos())
} else {
if !p.inside_defer {
return p.return_stmt()
} else {
return p.error_with_pos('`return` not allowed inside `defer` block', p.tok.pos())
}
}
.dollar {
@ -1166,10 +1144,30 @@ fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
.hash {
return p.hash()
}
.key_assert {
p.next()
mut pos := p.tok.pos()
expr := p.expr(0)
pos.update_last_line(p.prev_tok.line_nr)
mut extra := ast.empty_expr
mut extra_pos := p.tok.pos()
if p.tok.kind == .comma {
p.next()
extra_pos = p.tok.pos()
extra = p.expr(0)
// dump(extra)
extra_pos = extra_pos.extend(p.tok.pos())
}
return ast.AssertStmt{
expr: expr
extra: extra
extra_pos: extra_pos
pos: pos.extend(p.tok.pos())
is_used: p.inside_test_file || !p.pref.is_prod
}
}
.key_defer {
if p.inside_defer {
return p.error_with_pos('`defer` blocks cannot be nested', p.tok.pos())
} else {
if !p.inside_defer {
p.next()
spos := p.tok.pos()
p.inside_defer = true
@ -1181,6 +1179,8 @@ fn (mut p Parser) stmt(is_top_level bool) ast.Stmt {
defer_vars: p.defer_vars.clone()
pos: spos.extend_with_last_line(p.tok.pos(), p.prev_tok.line_nr)
}
} else {
return p.error_with_pos('`defer` blocks cannot be nested', p.tok.pos())
}
}
.key_go, .key_spawn {

View File

@ -623,11 +623,9 @@ pub fn (mut s Scanner) scan() token.Token {
if cidx >= s.all_tokens.len || s.should_abort {
return s.end_of_file()
}
if s.all_tokens[cidx].kind == .comment {
if !s.should_parse_comment() {
if s.all_tokens[cidx].kind == .comment && !s.should_parse_comment() {
continue
}
}
return s.all_tokens[cidx]
}
return s.new_eof_token()

View File

@ -25,10 +25,7 @@ pub mut:
@[direct_array_access]
pub fn (km &KeywordsMatcherTrie) find(word string) int {
wlen := word.len
if wlen < km.min_len {
return -1
}
if wlen > km.max_len {
if wlen < km.min_len || wlen > km.max_len {
return -1
}
node := km.nodes[wlen]