qiniu / logkit
Showing 1 of 2 files from the diff.
Other files ignored by Codecov

@@ -29,6 +29,11 @@
Loading
29 29
	ValidateJsonRawMessage: true,
30 30
}.Froze()
31 31
32 +
var (
33 +
	labelList          []string
34 +
	containSplitterKey string
35 +
)
36 +
32 37
type Parser struct {
33 38
	name                 string
34 39
	schema               []field
@@ -44,6 +49,7 @@
Loading
44 49
	numRoutine           int
45 50
	keepRawData          bool
46 51
	containSplitterIndex int
52 +
	hasHeader            bool
47 53
}
48 54
49 55
type field struct {
@@ -61,36 +67,6 @@
Loading
61 67
	name, _ := c.GetStringOr(KeyParserName, "")
62 68
	splitter, _ := c.GetStringOr(KeyCSVSplitter, "\t")
63 69
64 -
	schema, err := c.GetString(KeyCSVSchema)
65 -
	if err != nil {
66 -
		return nil, err
67 -
	}
68 -
	timeZoneOffsetRaw, _ := c.GetStringOr(KeyTimeZoneOffset, "")
69 -
	timeZoneOffset := ParseTimeZoneOffset(timeZoneOffsetRaw)
70 -
	isAutoRename, _ := c.GetBoolOr(KeyCSVAutoRename, false)
71 -
72 -
	fieldList, err := parseSchemaFieldList(schema)
73 -
	if err != nil {
74 -
		return nil, err
75 -
	}
76 -
	fields, err := parseSchemaFields(fieldList)
77 -
	if err != nil {
78 -
		return nil, err
79 -
	}
80 -
	nameMap := map[string]struct{}{}
81 -
	for _, newField := range fields {
82 -
		_, exist := nameMap[newField.name]
83 -
		if exist {
84 -
			return nil, errors.New("column conf error: duplicated column " + newField.name)
85 -
		}
86 -
		nameMap[newField.name] = struct{}{}
87 -
	}
88 -
	labelList, _ := c.GetStringListOr(KeyLabels, []string{})
89 -
	if len(labelList) < 1 {
90 -
		labelList, _ = c.GetStringListOr(KeyCSVLabels, []string{}) //向前兼容老的配置
91 -
	}
92 -
	labels := GetGrokLabels(labelList, nameMap)
93 -
94 70
	disableRecordErrData, _ := c.GetBoolOr(KeyDisableRecordErrData, false)
95 71
96 72
	allowNotMatch, _ := c.GetBoolOr(KeyCSVAllowNoMatch, false)
@@ -101,17 +77,33 @@
Loading
101 77
	allmoreStartNumber, _ := c.GetIntOr(KeyCSVAllowMoreStartNum, 0)
102 78
	ignoreInvalid, _ := c.GetBoolOr(KeyCSVIgnoreInvalidField, false)
103 79
	keepRawData, _ := c.GetBoolOr(KeyKeepRawData, false)
104 -
	containSplitterKey, _ := c.GetStringOr(KeyCSVContainSplitterKey, "")
80 +
81 +
	timeZoneOffsetRaw, _ := c.GetStringOr(KeyTimeZoneOffset, "")
82 +
	timeZoneOffset := ParseTimeZoneOffset(timeZoneOffsetRaw)
83 +
	isAutoRename, _ := c.GetBoolOr(KeyCSVAutoRename, false)
84 +
	containSplitterKey, _ = c.GetStringOr(KeyCSVContainSplitterKey, "")
105 85
	containSplitterIndex := -1
106 -
	if containSplitterKey != "" {
107 -
		for index, f := range fields {
108 -
			if f.name == containSplitterKey {
109 -
				containSplitterIndex = index
110 -
				break
111 -
			}
112 -
		}
113 -
		if containSplitterIndex == -1 {
114 -
			return nil, errors.New("containSplitterKey:" + containSplitterKey + " not exists in column")
86 +
87 +
	labelList, _ = c.GetStringListOr(KeyLabels, []string{})
88 +
	if len(labelList) < 1 {
89 +
		labelList, _ = c.GetStringListOr(KeyCSVLabels, []string{}) //向前兼容老的配置
90 +
	}
91 +
92 +
	// 预先根据schema的长度和获取配置状态判断,len=0或者ErrConfMissingKey 使用列表第一行作为表头;否则 使用schema作为表头;
93 +
	// 使用第一行作为表头时,会造成数据类型的不明确;暂时使用string类型;
94 +
	hasHeader := true
95 +
	fields := make([]field, 0, 20)
96 +
	labels := make([]GrokLabel, 0, 20)
97 +
	schema, err := c.GetString(KeyCSVSchema)
98 +
	if err != nil || len(schema) == 0 {
99 +
		hasHeader = false
100 +
	}
101 +
102 +
	if hasHeader {
103 +
		// 头部处理
104 +
		fields, containSplitterIndex, labels, err = checkHeader(schema, labelList, containSplitterKey, splitter, hasHeader)
105 +
		if err != nil {
106 +
			return nil, err
115 107
		}
116 108
	}
117 109
@@ -134,9 +126,66 @@
Loading
134 126
		numRoutine:           numRoutine,
135 127
		keepRawData:          keepRawData,
136 128
		containSplitterIndex: containSplitterIndex,
129 +
		hasHeader:            hasHeader,
137 130
	}, nil
138 131
}
139 132
133 +
func checkHeader(schema string, labelList []string, containSplitterKey string, delim string,
134 +
	hasHeader bool) (fields []field, containSplitterIndex int, labels []GrokLabel, err error) {
135 +
	// 包装头部处理
136 +
	containSplitterIndex = -1
137 +
	// 用户设置的表头处理
138 +
	if hasHeader {
139 +
		fieldList, err := parseSchemaFieldList(schema)
140 +
		if err != nil {
141 +
			return nil, containSplitterIndex, nil, err
142 +
		}
143 +
		fields, err = parseSchemaFields(fieldList)
144 +
		if err != nil {
145 +
			return nil, containSplitterIndex, nil, err
146 +
		}
147 +
	} else {
148 +
		// 从第一行获取的表头处理
149 +
		fieldList := strings.Split(strings.TrimSpace(schema), delim)
150 +
		if len(fieldList) == 0 {
151 +
			return nil, containSplitterIndex, nil, fmt.Errorf("cannot parsed csv header, "+
152 +
				"hasHeader is %v, the csv first line is %v, the delim is %v", hasHeader, schema, delim)
153 +
		}
154 +
		fields = make([]field, len(fieldList))
155 +
		for i := 0; i < len(fieldList); i++ {
156 +
			fields[i], err = newCsvField(strings.TrimSpace(fieldList[i]), "string")
157 +
			if err != nil {
158 +
				return nil, containSplitterIndex, nil, err
159 +
			}
160 +
		}
161 +
	}
162 +
163 +
	nameMap := map[string]struct{}{}
164 +
	for _, newField := range fields {
165 +
		_, exist := nameMap[newField.name]
166 +
		if exist {
167 +
			return nil, containSplitterIndex, nil, fmt.Errorf("column conf error: duplicated column %s", newField.name)
168 +
		}
169 +
		nameMap[newField.name] = struct{}{}
170 +
	}
171 +
	// 合并labelList 和 schema
172 +
	labels = GetGrokLabels(labelList, nameMap)
173 +
174 +
	// 字段名是分隔符;
175 +
	if containSplitterKey != "" {
176 +
		for index, f := range fields {
177 +
			if f.name == containSplitterKey {
178 +
				containSplitterIndex = index
179 +
				break
180 +
			}
181 +
		}
182 +
		if containSplitterIndex == -1 {
183 +
			return nil, containSplitterIndex, nil, fmt.Errorf("containSplitterKey: %s not exists in column", containSplitterKey)
184 +
		}
185 +
	}
186 +
	return
187 +
}
188 +
140 189
func parseSchemaFieldList(schema string) (fieldList []string, err error) {
141 190
	fieldList = make([]string, 0)
142 191
	schema = strings.TrimSpace(schema)
@@ -437,6 +486,18 @@
Loading
437 486
}
438 487
439 488
func (p *Parser) parse(line string) (d Data, err error) {
489 +
490 +
	// 1.判断是否使用schema; 针对第一行转换为表头;
491 +
	if !p.hasHeader {
492 +
		// 转换表头
493 +
		p.schema, p.containSplitterIndex, p.labels, err = checkHeader(line, labelList, containSplitterKey, p.delim, p.hasHeader)
494 +
		if err != nil {
495 +
			return nil, err
496 +
		}
497 +
		p.hasHeader = true
498 +
		return nil, nil
499 +
	}
500 +
440 501
	d = make(Data)
441 502
	parts := strings.Split(line, p.delim)
442 503
	partsLength := len(parts)
@@ -592,6 +653,10 @@
Loading
592 653
			continue
593 654
		}
594 655
		if len(parseResult.Data) < 1 { //数据为空时不发送
656 +
			if p.hasHeader {
657 +
				// 忽略第一次数据
658 +
				continue
659 +
			}
595 660
			se.LastError = "parsed no data by line " + parseResult.Line
596 661
			se.AddErrors()
597 662
			continue
Files Coverage
conf 97.67%
mgr 66.28%
parser 87.32%
queue 74.13%
rateio 95.74%
reader 47.81%
router 54.13%
samples 100.00%
sender 44.94%
transforms 55.85%
utils 57.20%
audit/audit.go 73.63%
cleaner/cleaner.go 58.82%
cli/upgrade.go 47.56%
logkit.go 36.31%
self/logrunner.go 53.88%
times/times.go 91.30%
Project Totals (138 files) 57.05%
3948.1
TRAVIS_OS_NAME=linux
1.12.9=.12.9

No yaml found.

Create your codecov.yml to customize your Codecov experience

Sunburst
The inner-most circle is the entire project, moving away from the center are folders then, finally, a single file. The size and color of each slice is representing the number of statements and the coverage, respectively.
Icicle
The top section represents the entire project. Proceeding with folders and finally individual files. The size and color of each slice is representing the number of statements and the coverage, respectively.
Grid
Each block represents a single file in the project. The size and color of each block is represented by the number of statements and the coverage, respectively.
Loading