diff --git a/cmd/tools/vdoc/html.v b/cmd/tools/vdoc/html.v
index f98e5571ab..38f52972d3 100644
--- a/cmd/tools/vdoc/html.v
+++ b/cmd/tools/vdoc/html.v
@@ -37,6 +37,11 @@ enum HighlightTokenTyp {
operator
punctuation
string
+ // For string interpolation
+ opening_string
+ string_interp
+ partial_string
+ closing_string
symbol
none_
module_
@@ -330,87 +335,166 @@ fn get_src_link(repo_url string, file_name string, line_nr int) string {
return url.str()
}
-fn html_highlight(code string, tb &ast.Table) string {
- builtin := ['bool', 'string', 'i8', 'i16', 'int', 'i64', 'i128', 'byte', 'u16', 'u32', 'u64',
- 'u128', 'rune', 'f32', 'f64', 'int_literal', 'float_literal', 'byteptr', 'voidptr', 'any']
- highlight_code := fn (tok token.Token, typ HighlightTokenTyp) string {
- lit := if typ in [.unone, .operator, .punctuation] {
- tok.kind.str()
- } else if typ == .string {
- "'${tok.lit}'"
- } else if typ == .char {
- '`${tok.lit}`'
- } else if typ == .comment {
- if tok.lit != '' && tok.lit[0] == 1 { '//${tok.lit[1..]}' } else { '//${tok.lit}' }
- } else {
- tok.lit
+fn write_token(tok token.Token, typ HighlightTokenTyp, mut buf strings.Builder) {
+ match typ {
+ .unone, .operator, .punctuation {
+ buf.write_string(tok.kind.str())
}
- if typ in [.unone, .name] {
- return lit
+ .string_interp {
+ // tok.kind.str() for this returns $2 instead of $
+ buf.write_byte(`$`)
+ }
+ .opening_string {
+ buf.write_string("'${tok.lit}")
+ }
+ .closing_string {
+ // A string as the next token of the expression
+ // inside the string interpolation indicates that
+ // this is the closing of string interpolation
+ buf.write_string("${tok.lit}'")
+ }
+ .string {
+ buf.write_string("'${tok.lit}'")
+ }
+ .char {
+ buf.write_string('`${tok.lit}`')
+ }
+ .comment {
+ buf.write_string('//')
+ if tok.lit != '' && tok.lit[0] == 1 {
+ buf.write_string(tok.lit[1..])
+ } else {
+ buf.write_string(tok.lit)
+ }
+ }
+ else {
+ buf.write_string(tok.lit)
}
- return '${lit}'
}
+}
+
+fn html_highlight(code string, tb &ast.Table) string {
mut s := scanner.new_scanner(code, .parse_comments, &pref.Preferences{ output_mode: .silent })
mut tok := s.scan()
mut next_tok := s.scan()
mut buf := strings.new_builder(200)
mut i := 0
+ mut inside_string_interp := false
for i < code.len {
- if i == tok.pos {
- mut tok_typ := HighlightTokenTyp.unone
- match tok.kind {
- .name {
- if tok.lit in builtin || tb.known_type(tok.lit) {
- tok_typ = .builtin
- } else if next_tok.kind == .lcbr {
- tok_typ = .symbol
- } else if next_tok.kind == .lpar || (!tok.lit[0].is_capital()
- && next_tok.kind == .lt && next_tok.pos == tok.pos + tok.lit.len) {
- tok_typ = .function
- } else {
- tok_typ = .name
- }
- }
- .comment {
- tok_typ = .comment
- }
- .chartoken {
- tok_typ = .char
- }
- .string {
- tok_typ = .string
- }
- .number {
- tok_typ = .number
- }
- .key_true, .key_false {
- tok_typ = .boolean
- }
- .lpar, .lcbr, .rpar, .rcbr, .lsbr, .rsbr, .semicolon, .colon, .comma, .dot,
- .dotdot, .ellipsis {
- tok_typ = .punctuation
- }
- else {
- if token.is_key(tok.lit) || token.is_decl(tok.kind) {
- tok_typ = .keyword
- } else if tok.kind == .decl_assign || tok.kind.is_assign() || tok.is_unary()
- || tok.kind.is_relational() || tok.kind.is_infix() || tok.kind.is_postfix() {
- tok_typ = .operator
- }
- }
- }
- buf.write_string(highlight_code(tok, tok_typ))
- if next_tok.kind != .eof {
- i = tok.pos + tok.len
- tok = next_tok
- next_tok = s.scan()
- } else {
- break
- }
- } else {
+ if i != tok.pos {
+ // All characters not detected by the scanner
+ // (mostly whitespaces) go here.
buf.write_u8(code[i])
i++
+ continue
}
+
+ mut tok_typ := HighlightTokenTyp.unone
+ match tok.kind {
+ .name {
+ if tok.lit in highlight_builtin_types || tb.known_type(tok.lit) {
+ tok_typ = .builtin
+ } else if next_tok.kind == .lcbr {
+ tok_typ = .symbol
+ } else if next_tok.kind == .lpar || (!tok.lit[0].is_capital()
+ && next_tok.kind == .lt && next_tok.pos == tok.pos + tok.lit.len) {
+ tok_typ = .function
+ } else {
+ tok_typ = .name
+ }
+ }
+ .comment {
+ tok_typ = .comment
+ }
+ .chartoken {
+ tok_typ = .char
+ }
+ .str_dollar {
+ tok_typ = .string_interp
+ inside_string_interp = true
+ }
+ .string {
+ if inside_string_interp {
+ if next_tok.kind == .str_dollar {
+ // the " hello " in "${a} hello ${b} world"
+ tok_typ = .partial_string
+ } else {
+ // the " world" in "${a} hello ${b} world"
+ tok_typ = .closing_string
+ }
+
+ // NOTE: Do not switch inside_string_interp yet!
+ // It will be handy later when we do some special
+ // handling in generating code (see code below)
+ } else if next_tok.kind == .str_dollar {
+ tok_typ = .opening_string
+ } else {
+ tok_typ = .string
+ }
+ }
+ .number {
+ tok_typ = .number
+ }
+ .key_true, .key_false {
+ tok_typ = .boolean
+ }
+ .lpar, .lcbr, .rpar, .rcbr, .lsbr, .rsbr, .semicolon, .colon, .comma, .dot, .dotdot,
+ .ellipsis {
+ tok_typ = .punctuation
+ }
+ else {
+ if token.is_key(tok.lit) || token.is_decl(tok.kind) {
+ tok_typ = .keyword
+ } else if tok.kind == .decl_assign || tok.kind.is_assign() || tok.is_unary()
+ || tok.kind.is_relational() || tok.kind.is_infix() || tok.kind.is_postfix() {
+ tok_typ = .operator
+ }
+ }
+ }
+
+ if tok_typ in [.unone, .name] {
+ write_token(tok, tok_typ, mut buf)
+ } else {
+ // Special handling for "complex" string literals
+ if tok_typ in [.partial_string, .closing_string] && inside_string_interp {
+ // rcbr is not rendered when the string on the right
+ // side of the expr/string interpolation is not empty.
+ // e.g. "${a}.${b}${c}"
+ // expectation: "${a}.${b}${c}"
+ // reality: "${a.${b}${c}"
+ if tok.lit.len != 0 {
+ write_token(token.Token{ kind: .rcbr }, .unone, mut buf)
+ }
+
+ inside_string_interp = false
+ }
+
+ // Properly treat and highlight the "string"-related types
+ // as if they are "string" type.
+ final_tok_typ := match tok_typ {
+ .opening_string, .partial_string, .closing_string { HighlightTokenTyp.string }
+ else { tok_typ }
+ }
+
+ buf.write_string('')
+ write_token(tok, tok_typ, mut buf)
+ buf.write_string('')
+ }
+
+ if next_tok.kind == .eof {
+ break
+ }
+
+ i = tok.pos + tok.len
+
+ // This is to avoid issues that skips any "unused" tokens
+ // For example: Call expr with complex string literals as arg
+ if i - 1 == next_tok.pos {
+ i--
+ }
+
+ tok = next_tok
+ next_tok = s.scan()
}
return buf.str()
}
diff --git a/cmd/tools/vdoc/tests/testdata/output_formats/main.html b/cmd/tools/vdoc/tests/testdata/output_formats/main.html
index d4a63bb161..49ffe33402 100644
--- a/cmd/tools/vdoc/tests/testdata/output_formats/main.html
+++ b/cmd/tools/vdoc/tests/testdata/output_formats/main.html
@@ -28,28 +28,27 @@ secret := 'your-2
token := make_token(secret)
ok := auth_verify(secret, token)
dt := sw.elapsed().microseconds()
-println('token: ${token}')
-println('auth_verify(secret, token): ${ok}')
-println('Elapsed time: ${dt} uS')
-}
+println('token: ${token}')
+println('auth_verify(secret, token): ${ok}')
+println('Elapsed time: ${dt} uS')
+}
-fn make_token(secret string) string {
-header := base64.url_encode(json.encode(JwtHeader{'HS256', 'JWT'}).bytes())
-payload := base64.url_encode(json.encode(JwtPayload{'1234567890', 'John Doe', 1516239022}).bytes())
-signature := base64.url_encode(hmac.new(secret.bytes(), '${header}.${payload}'.bytes(),
-sha256.sum, sha256.block_size))
-jwt := '${header}.${payload}.${signature}'
-return jwt
-}
+fn make_token(secret string) string {
+header := base64.url_encode(json.encode(JwtHeader{'HS256', 'JWT'}).bytes())
+payload := base64.url_encode(json.encode(JwtPayload{'1234567890', 'John Doe', 1516239022}).bytes())
+signature := base64.url_encode(hmac.new(secret.bytes(),'${header}.${payload}'.bytes(),
+sha256.sum, sha256.block_size))
+jwt :='${header}.${payload}.${signature}'
+return jwt
+}
-fn auth_verify(secret string, token string) bool {
-token_split := token.split('.')
-signature_mirror := hmac.new(secret.bytes(), '${token_split[0]}.${token_split[1]}'.bytes(),
-sha256.sum, sha256.block_size)
-signature_from_token := base64.url_decode(token_split[2])
-return hmac.equal(signature_from_token, signature_mirror)
-}
-
+fn auth_verify(secret string, token string) bool {
+token_split := token.split('.')
+signature_mirror := hmac.new(secret.bytes(),'${token_split[0]}.${token_split[1]}'.bytes(),
+sha256.sum, sha256.block_size)
+signature_from_token := base64.url_decode(token_split[2])
+return hmac.equal(signature_from_token, signature_mirror)
+}
diff --git a/cmd/tools/vdoc/utils.v b/cmd/tools/vdoc/utils.v
index 1ab5bfbafb..bcfb8e9d23 100644
--- a/cmd/tools/vdoc/utils.v
+++ b/cmd/tools/vdoc/utils.v
@@ -137,9 +137,11 @@ fn gen_footer_text(d &doc.Doc, include_timestamp bool) string {
return '${footer_text} Generated on: ${time_str}'
}
+const highlight_builtin_types = ['bool', 'string', 'i8', 'i16', 'int', 'i64', 'i128', 'isize',
+ 'byte', 'u8', 'u16', 'u32', 'u64', 'usize', 'u128', 'rune', 'f32', 'f64', 'byteptr', 'voidptr',
+ 'any']
+
fn color_highlight(code string, tb &ast.Table) string {
- builtin := ['bool', 'string', 'i8', 'i16', 'int', 'i64', 'i128', 'isize', 'byte', 'u8', 'u16',
- 'u32', 'u64', 'usize', 'u128', 'rune', 'f32', 'f64', 'byteptr', 'voidptr', 'any']
highlight_code := fn (tok token.Token, typ HighlightTokenTyp) string {
mut lit := ''
match typ {
@@ -207,14 +209,15 @@ fn color_highlight(code string, tb &ast.Table) string {
mut tok_typ := HighlightTokenTyp.unone
match tok.kind {
.name {
- if (tok.lit in builtin || tb.known_type(tok.lit))
+ if (tok.lit in highlight_builtin_types || tb.known_type(tok.lit))
&& (next_tok.kind != .lpar || prev.kind !in [.key_fn, .rpar]) {
tok_typ = .builtin
} else if
(next_tok.kind in [.lcbr, .rpar, .eof, .comma, .pipe, .name, .rcbr, .assign, .key_pub, .key_mut, .pipe, .comma, .comment, .lt, .lsbr]
- && next_tok.lit !in builtin)
+ && next_tok.lit !in highlight_builtin_types)
&& (prev.kind in [.name, .amp, .lcbr, .rsbr, .key_type, .assign, .dot, .not, .question, .rpar, .key_struct, .key_enum, .pipe, .key_interface, .comment, .ellipsis, .comma]
- && prev.lit !in builtin) && ((tok.lit != '' && tok.lit[0].is_capital())
+ && prev.lit !in highlight_builtin_types)
+ && ((tok.lit != '' && tok.lit[0].is_capital())
|| prev_prev.lit in ['C', 'JS']) {
tok_typ = .symbol
} else if tok.lit[0].is_capital() && prev.kind == .lpar
diff --git a/vlib/v/doc/doc.v b/vlib/v/doc/doc.v
index 5a4a29a4de..8bab33decb 100644
--- a/vlib/v/doc/doc.v
+++ b/vlib/v/doc/doc.v
@@ -103,8 +103,8 @@ pub mut:
base_path string
table &ast.Table = ast.new_table()
checker checker.Checker = checker.Checker{
- table: 0
- pref: 0
+ table: unsafe { nil }
+ pref: unsafe { nil }
}
fmt fmt.Fmt
filename string