Skip to content

Commit 0529f0c

Browse files
committed
tweaks
1 parent 216de72 commit 0529f0c

File tree

5 files changed

+33
-38
lines changed

5 files changed

+33
-38
lines changed

src/cli/reporters/pretty.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ export function print({ report, context }: Report, params: CliArguments) {
6161
if (line_coverage[i] === 0) {
6262
// Rewind cursor N lines to render N previous lines
6363
for (let j = i - NUM_LEADING_LINES; j < i; j++) {
64-
console.log(styleText('dim', line_number(j)), styleText('dim', lines[j]!))
64+
console.log(styleText('dim', line_number(j)), styleText('dim', lines[j] || ''))
6565
}
6666
// Render uncovered lines while increasing cursor until reaching next covered block
6767
while (line_coverage[i] === 0) {

src/decuplicate.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import type { Coverage, Range } from './parse-coverage.ts'
66
* - if a duplicate stylesheet enters the room, we add it's ranges to the existing stylesheet's ranges
77
* - only bytes of deduplicated stylesheets are counted
88
*/
9-
export function deduplicate_entries(entries: Coverage[]): Map<Coverage['text'], Pick<Coverage, 'ranges' | 'url'>> {
9+
export function deduplicate_entries(entries: Coverage[]): Coverage[] {
1010
let checked_stylesheets = new Map<string, { url: string; ranges: Range[] }>()
1111

1212
for (let entry of entries) {
@@ -36,5 +36,5 @@ export function deduplicate_entries(entries: Coverage[]): Map<Coverage['text'],
3636
}
3737
}
3838

39-
return checked_stylesheets
39+
return Array.from(checked_stylesheets, ([text, { url, ranges }]) => ({ text, url, ranges }))
4040
}

src/deduplicate.test.ts

Lines changed: 16 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -7,16 +7,16 @@ test('handles a single entry', () => {
77
ranges: [{ start: 0, end: 4 }],
88
url: 'example.com',
99
}
10-
expect(deduplicate_entries([entry])).toEqual(new Map([[entry.text, { url: entry.url, ranges: entry.ranges }]]))
10+
expect(deduplicate_entries([entry])).toEqual([entry])
1111
})
1212

13-
test('deduplicats a simple duplicate entry', () => {
13+
test('deduplicates a simple duplicate entry', () => {
1414
let entry = {
1515
text: 'a {}',
1616
ranges: [{ start: 0, end: 4 }],
1717
url: 'example.com',
1818
}
19-
expect(deduplicate_entries([entry, entry])).toEqual(new Map([[entry.text, { url: entry.url, ranges: entry.ranges }]]))
19+
expect(deduplicate_entries([entry, entry])).toEqual([entry])
2020
})
2121

2222
test('merges two identical texts with different URLs and identical ranges', () => {
@@ -33,7 +33,7 @@ test('merges two identical texts with different URLs and identical ranges', () =
3333
},
3434
]
3535
let first = entries.at(0)!
36-
expect(deduplicate_entries(entries)).toEqual(new Map([[first.text, { url: first.url, ranges: first.ranges }]]))
36+
expect(deduplicate_entries(entries)).toEqual([{ text: first.text, url: first.url, ranges: first.ranges }])
3737
})
3838

3939
test('merges different ranges on identical CSS, different URLs', () => {
@@ -50,9 +50,7 @@ test('merges different ranges on identical CSS, different URLs', () => {
5050
},
5151
]
5252
let first = entries.at(0)!
53-
expect(deduplicate_entries(entries)).toEqual(
54-
new Map([[first.text, { url: first.url, ranges: [first.ranges[0], entries[1]!.ranges[0]] }]]),
55-
)
53+
expect(deduplicate_entries(entries)).toEqual([{ text: first.text, url: first.url, ranges: [first.ranges[0], entries[1]!.ranges[0]] }])
5654
})
5755

5856
test('merges different ranges on identical CSS, identical URLs', () => {
@@ -68,9 +66,9 @@ test('merges different ranges on identical CSS, identical URLs', () => {
6866
url: 'example.com',
6967
},
7068
]
71-
expect(deduplicate_entries(entries)).toEqual(
72-
new Map([[entries[0]!.text, { url: entries[0]!.url, ranges: [entries[0]!.ranges[0], entries[1]!.ranges[0]] }]]),
73-
)
69+
expect(deduplicate_entries(entries)).toEqual([
70+
{ text: entries[0]!.text, url: entries[0]!.url, ranges: [entries[0]!.ranges[0], entries[1]!.ranges[0]] },
71+
])
7472
})
7573

7674
test('does not merge different CSS with different URLs and identical ranges', () => {
@@ -86,12 +84,10 @@ test('does not merge different CSS with different URLs and identical ranges', ()
8684
url: 'example.com/b',
8785
},
8886
]
89-
expect(deduplicate_entries(entries)).toEqual(
90-
new Map([
91-
[entries[0]!.text, { url: entries[0]!.url, ranges: entries[0]!.ranges }],
92-
[entries[1]!.text, { url: entries[1]!.url, ranges: entries[1]!.ranges }],
93-
]),
94-
)
87+
expect(deduplicate_entries(entries)).toEqual([
88+
{ text: entries[0]!.text, url: entries[0]!.url, ranges: entries[0]!.ranges },
89+
{ text: entries[1]!.text, url: entries[1]!.url, ranges: entries[1]!.ranges },
90+
])
9591
})
9692

9793
test('does not merge different CSS with same URLs and identical ranges', () => {
@@ -107,10 +103,8 @@ test('does not merge different CSS with same URLs and identical ranges', () => {
107103
url: 'example.com',
108104
},
109105
]
110-
expect(deduplicate_entries(entries)).toEqual(
111-
new Map([
112-
[entries[0]!.text, { url: entries[0]!.url, ranges: entries[0]!.ranges }],
113-
[entries[1]!.text, { url: entries[1]!.url, ranges: entries[1]!.ranges }],
114-
]),
115-
)
106+
expect(deduplicate_entries(entries)).toEqual([
107+
{ text: entries[0]!.text, url: entries[0]!.url, ranges: entries[0]!.ranges },
108+
{ text: entries[1]!.text, url: entries[1]!.url, ranges: entries[1]!.ranges },
109+
])
116110
})

src/index.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -57,12 +57,12 @@ export function calculate_coverage(coverage: Coverage[], parse_html?: Parser): C
5757
throw new TypeError('No valid coverage data found')
5858
}
5959

60-
let filtered_coverage = filter_coverage(coverage, parse_html)
61-
let prettified_coverage = prettify(filtered_coverage)
62-
let deduplicated = deduplicate_entries(prettified_coverage)
60+
let filtered_coverage: Coverage[] = filter_coverage(coverage, parse_html)
61+
let prettified_coverage: Coverage[] = prettify(filtered_coverage)
62+
let deduplicated: Coverage[] = deduplicate_entries(prettified_coverage)
6363

6464
// Calculate coverage for each individual stylesheet we found
65-
let coverage_per_stylesheet = Array.from(deduplicated).map(([text, { url, ranges }]) => {
65+
let coverage_per_stylesheet = deduplicated.map(({ text, url, ranges }) => {
6666
function is_line_covered(line: string, start_offset: number) {
6767
let end = start_offset + line.length
6868
let next_offset = end + 1 // account for newline character

src/prettify.ts

Lines changed: 10 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3,18 +3,19 @@ import type { Range, Coverage } from './parse-coverage.ts'
33
// css-tree tokens: https://github.com/csstree/csstree/blob/be5ea1257009960c04cccdb58bb327263e27e3b3/lib/tokenizer/types.js
44
import { tokenize, tokenTypes } from 'css-tree/tokenizer'
55

6+
let irrelevant_tokens: Set<number> = new Set([
7+
tokenTypes.EOF,
8+
tokenTypes.BadString,
9+
tokenTypes.BadUrl,
10+
tokenTypes.WhiteSpace,
11+
tokenTypes.Semicolon,
12+
tokenTypes.Comment,
13+
tokenTypes.Colon,
14+
])
15+
616
export function prettify(coverage: Coverage[]): Coverage[] {
717
return coverage.map(({ url, text, ranges }) => {
818
let formatted = format(text)
9-
let irrelevant_tokens: Set<number> = new Set([
10-
tokenTypes.EOF,
11-
tokenTypes.BadString,
12-
tokenTypes.BadUrl,
13-
tokenTypes.WhiteSpace,
14-
tokenTypes.Semicolon,
15-
tokenTypes.Comment,
16-
tokenTypes.Colon,
17-
])
1819

1920
// Initialize the ranges with an empty array of token indexes
2021
let ext_ranges: (Range & { tokens: number[] })[] = ranges.map(({ start, end }) => ({ start, end, tokens: [] }))

0 commit comments

Comments
 (0)