This file is indexed.

/usr/share/gocode/src/github.com/alecthomas/chroma/lexer_test.go is in golang-github-alecthomas-chroma-dev 0.4.0+git20180402.51d250f-1.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

 1
 2
 3
 4
 5
 6
 7
 8
 9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
package chroma

import (
	"testing"

	"github.com/alecthomas/assert"
)

func TestTokenTypeClassifiers(t *testing.T) {
	assert.True(t, GenericDeleted.InCategory(Generic))
	assert.True(t, LiteralStringBacktick.InSubCategory(String))
	assert.Equal(t, LiteralStringBacktick.String(), "LiteralStringBacktick")
}

func TestSimpleLexer(t *testing.T) {
	lexer, err := NewLexer(
		&Config{
			Name:      "INI",
			Aliases:   []string{"ini", "cfg"},
			Filenames: []string{"*.ini", "*.cfg"},
		},
		map[string][]Rule{
			"root": {
				{`\s+`, Whitespace, nil},
				{`;.*?$`, Comment, nil},
				{`\[.*?\]$`, Keyword, nil},
				{`(.*?)(\s*)(=)(\s*)(.*?)$`, ByGroups(Name, Whitespace, Operator, Whitespace, String), nil},
			},
		},
	)
	assert.NoError(t, err)
	actual, err := Tokenise(lexer, nil, `
	; this is a comment
	[section]
	a = 10
`)
	assert.NoError(t, err)
	expected := []*Token{
		{Whitespace, "\n\t"},
		{Comment, "; this is a comment"},
		{Whitespace, "\n\t"},
		{Keyword, "[section]"},
		{Whitespace, "\n\t"},
		{Name, "a"},
		{Whitespace, " "},
		{Operator, "="},
		{Whitespace, " "},
		{LiteralString, "10"},
		{Whitespace, "\n"},
	}
	assert.Equal(t, expected, actual)
}