Skip to content

Commit dd87cac

Browse files
NullVoxPopuliclaude
andcommitted
Fix corrupted files from git checkout
Co-Authored-By: Claude Opus 4.6 (1M context) <[email protected]>
1 parent af42b50 commit dd87cac

3 files changed

Lines changed: 10 additions & 420 deletions

File tree

src/parser/hbs-parser.js

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
import * as eslintScope from 'eslint-scope';
2-
import DocumentLines from '../utils/document.js';
3-
import { processGlimmerTemplate, buildGlimmerVisitorKeys } from './transforms.js';
2+
import { toTree, buildGlimmerVisitorKeys, DocumentLines } from 'ember-estree';
43

54
// Constant: Program + all Glimmer node types. Computed once at module load.
65
const hbsVisitorKeys = { Program: ['body'], ...buildGlimmerVisitorKeys() };
@@ -28,11 +27,7 @@ export function parseForESLint(code, options) {
2827

2928
let result;
3029
try {
31-
result = processGlimmerTemplate({
32-
templateContent: code,
33-
codeLines,
34-
templateRange: [0, code.length],
35-
});
30+
result = toTree(code, { templateOnly: true });
3631
} catch (e) {
3732
// Transform glimmer parse error to ESLint-compatible error
3833
const loc = e.location || (e.hash && e.hash.loc);

src/parser/transforms.js

Lines changed: 8 additions & 311 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,7 @@
11
import { createRequire } from 'node:module';
22
import ContentTag from 'content-tag';
3-
import {
4-
visitorKeys as glimmerVisitorKeys,
5-
traverse as glimmerTraverse,
6-
preprocess as glimmerPreprocess,
7-
isKeyword as glimmerIsKeyword,
8-
} from '@glimmer/syntax';
9-
import DocumentLines from '../utils/document.js';
3+
import { isKeyword as glimmerIsKeyword } from '@glimmer/syntax';
4+
import { toTree, buildGlimmerVisitorKeys, DocumentLines } from 'ember-estree';
105
import { Reference, Scope, Variable, Definition } from 'eslint-scope';
116
import htmlTags from 'html-tags';
127
import svgTags from 'svg-tags';
@@ -104,28 +99,6 @@ function registerNodeInScope(node, scope, variable) {
10499
scope.references.push(ref);
105100
}
106101

107-
/**
108-
* Builds the complete Glimmer visitor keys map with "Glimmer" prefix and
109-
* additional keys needed for traversal (blockParamNodes, parts, etc).
110-
* Result is cached since glimmerVisitorKeys is a constant.
111-
* @return {object}
112-
*/
113-
let _cachedGlimmerVisitorKeys = null;
114-
function buildGlimmerVisitorKeys() {
115-
if (_cachedGlimmerVisitorKeys) return _cachedGlimmerVisitorKeys;
116-
const keys = {};
117-
for (const [k, v] of Object.entries(glimmerVisitorKeys)) {
118-
keys[`Glimmer${k}`] = [...v];
119-
}
120-
if (!keys.GlimmerElementNode.includes('blockParamNodes')) {
121-
keys.GlimmerElementNode.push('blockParamNodes', 'parts');
122-
}
123-
keys.GlimmerProgram = ['body', 'blockParamNodes'];
124-
keys.GlimmerTemplate = ['body'];
125-
_cachedGlimmerVisitorKeys = keys;
126-
return keys;
127-
}
128-
129102
/**
130103
* traverses all nodes using the {visitorKeys} calling the callback function, visitor
131104
* @param visitorKeys
@@ -188,283 +161,6 @@ function isUpperCase(char) {
188161
return char.toUpperCase() === char;
189162
}
190163

191-
function isAlphaNumeric(code) {
192-
return !(
193-
!(code > 47 && code < 58) && // numeric (0-9)
194-
!(code > 64 && code < 91) && // upper alpha (A-Z)
195-
!(code > 96 && code < 123)
196-
);
197-
}
198-
199-
function isWhiteSpaceCode(code) {
200-
return (
201-
code === 32 /* space */ ||
202-
code === 9 /* tab */ ||
203-
code === 13 /* carriageReturn */ ||
204-
code === 10 /* lineFeed */ ||
205-
code === 11 /* verticalTab */
206-
);
207-
}
208-
209-
/**
210-
* simple tokenizer for templates, just splits it up into words and punctuators
211-
* @param template {string}
212-
* @param startOffset {number}
213-
* @param doc {DocumentLines}
214-
* @return {Token[]}
215-
*/
216-
function tokenize(template, doc, startOffset) {
217-
const tokens = [];
218-
let wordStart = -1;
219-
function pushToken(value, type, range) {
220-
const t = {
221-
type,
222-
value,
223-
range,
224-
start: range[0],
225-
end: range[1],
226-
loc: {
227-
start: { ...doc.offsetToPosition(range[0]), index: range[0] },
228-
end: { ...doc.offsetToPosition(range[1]), index: range[1] },
229-
},
230-
};
231-
tokens.push(t);
232-
}
233-
for (let i = 0; i < template.length; i++) {
234-
const code = template.charCodeAt(i);
235-
if (isAlphaNumeric(code)) {
236-
if (wordStart < 0) {
237-
wordStart = i;
238-
}
239-
} else {
240-
if (wordStart >= 0) {
241-
pushToken(template.slice(wordStart, i), 'word', [startOffset + wordStart, startOffset + i]);
242-
wordStart = -1;
243-
}
244-
if (!isWhiteSpaceCode(code)) {
245-
pushToken(template[i], 'Punctuator', [startOffset + i, startOffset + i + 1]);
246-
}
247-
}
248-
}
249-
if (wordStart >= 0) {
250-
pushToken(template.slice(wordStart), 'word', [
251-
startOffset + wordStart,
252-
startOffset + template.length,
253-
]);
254-
}
255-
return tokens;
256-
}
257-
258-
/**
259-
* Traverses a Glimmer AST, sets parent references, and categorizes nodes.
260-
* @param {object} ast
261-
* @return {{ allNodes: object[], comments: object[], textNodes: object[], emptyTextNodes: object[] }}
262-
*/
263-
function collectNodes(ast) {
264-
const allNodes = [];
265-
const comments = [];
266-
const textNodes = [];
267-
const emptyTextNodes = [];
268-
269-
glimmerTraverse(ast, {
270-
All(node, path) {
271-
node.parent = path.parentNode;
272-
allNodes.push(node);
273-
if (node.type === 'CommentStatement' || node.type === 'MustacheCommentStatement') {
274-
comments.push(node);
275-
}
276-
if (node.type === 'TextNode') {
277-
node.value = node.chars;
278-
if (node.value.trim().length !== 0 || (node.parent && node.parent.type === 'AttrNode')) {
279-
textNodes.push(node);
280-
} else {
281-
emptyTextNodes.push(node);
282-
}
283-
}
284-
},
285-
});
286-
287-
return { allNodes, comments, textNodes, emptyTextNodes };
288-
}
289-
290-
/**
291-
* Removes nodes from their parent's children/body/parts arrays.
292-
* @param {object[]} nodes
293-
*/
294-
function removeFromParent(nodes) {
295-
for (const node of nodes) {
296-
const children =
297-
(node.parent && (node.parent.children || node.parent.body || node.parent.parts)) || [];
298-
const idx = children.indexOf(node);
299-
if (idx >= 0) {
300-
children.splice(idx, 1);
301-
}
302-
}
303-
}
304-
305-
/**
306-
* Builds the final token stream by filtering out tokens covered by comments
307-
* or text nodes, then merging text nodes back in sorted order.
308-
* @param {object[]} rawTokens
309-
* @param {object[]} comments
310-
* @param {object[]} textNodes
311-
* @return {object[]}
312-
*/
313-
function buildTokenStream(rawTokens, comments, textNodes) {
314-
// Build sorted interval arrays for O(log n) exclusion checks
315-
const commentIntervals = comments.map((c) => c.range).sort((a, b) => a[0] - b[0]);
316-
const textNodeIntervals = textNodes.map((t) => t.range).sort((a, b) => a[0] - b[0]);
317-
318-
/**
319-
* Binary-search: is the token's range fully covered by any interval in `intervals`?
320-
* Intervals must be sorted by start offset.
321-
* @param {number[]} tokenRange
322-
* @param {number[][]} intervals
323-
*/
324-
function isCovered(tokenRange, intervals) {
325-
let lo = 0;
326-
let hi = intervals.length - 1;
327-
while (lo <= hi) {
328-
const mid = (lo + hi) >> 1;
329-
const iv = intervals[mid];
330-
if (iv[0] <= tokenRange[0] && iv[1] >= tokenRange[1]) {
331-
return true;
332-
}
333-
if (iv[0] > tokenRange[0]) {
334-
hi = mid - 1;
335-
} else {
336-
lo = mid + 1;
337-
}
338-
}
339-
return false;
340-
}
341-
342-
// Single-pass filter: drop tokens covered by a comment or text node
343-
const filteredTokens = rawTokens.filter(
344-
(t) => !isCovered(t.range, commentIntervals) && !isCovered(t.range, textNodeIntervals)
345-
);
346-
347-
// Merge text nodes (already sorted by position from the AST) into filteredTokens
348-
// using a single linear merge pass instead of repeated splice calls.
349-
const sortedTextNodes = [...textNodes].sort((a, b) => a.range[0] - b.range[0]);
350-
const result = [];
351-
let ti = 0;
352-
for (const token of filteredTokens) {
353-
while (ti < sortedTextNodes.length && sortedTextNodes[ti].range[0] < token.range[0]) {
354-
result.push(sortedTextNodes[ti++]);
355-
}
356-
result.push(token);
357-
}
358-
while (ti < sortedTextNodes.length) {
359-
result.push(sortedTextNodes[ti++]);
360-
}
361-
362-
return result;
363-
}
364-
365-
/**
366-
* Parses a Glimmer template and produces a processed AST ready for ESLint.
367-
* Shared between hbs-parser (standalone .hbs files) and gjs/gts parser (embedded templates).
368-
*
369-
* @param {object} options
370-
* @param {string} options.templateContent - The template string to parse with glimmer
371-
* @param {DocumentLines} options.codeLines - DocumentLines for the full source file
372-
* @param {[number, number]} options.templateRange - Range [start, end] for the Template root node
373-
* @param {string} [options.tokenSource] - String to tokenize (defaults to templateContent)
374-
* @return {{ ast: object, comments: object[] }}
375-
*/
376-
function processGlimmerTemplate({ templateContent, codeLines, templateRange, tokenSource }) {
377-
const offset = templateRange[0];
378-
const docLines = new DocumentLines(templateContent);
379-
380-
/** Convert a Glimmer loc to a file-level [start, end] range */
381-
const toFileRange = (loc) => [
382-
offset + docLines.positionToOffset(loc.start),
383-
offset + docLines.positionToOffset(loc.end),
384-
];
385-
/** Convert a file-level range to a file-level loc */
386-
const toFileLoc = (range) => ({
387-
start: codeLines.offsetToPosition(range[0]),
388-
end: codeLines.offsetToPosition(range[1]),
389-
});
390-
391-
const ast = glimmerPreprocess(templateContent, { mode: 'codemod' });
392-
const { allNodes, comments, textNodes, emptyTextNodes } = collectNodes(ast);
393-
394-
// Fix ranges, locs, and prefix types with "Glimmer"
395-
for (const n of allNodes) {
396-
if (n.type === 'PathExpression') {
397-
n.head.range = toFileRange(n.head.loc);
398-
n.head.loc = toFileLoc(n.head.range);
399-
}
400-
401-
n.range = n.type === 'Template' ? [...templateRange] : toFileRange(n.loc);
402-
n.start = n.range[0];
403-
n.end = n.range[1];
404-
n.loc = toFileLoc(n.range);
405-
406-
if (n.type === 'ElementNode') {
407-
n.name = n.tag;
408-
n.parts = [n.path.head].map((p) => {
409-
const range = toFileRange(p.loc);
410-
return {
411-
...p,
412-
name: p.original,
413-
parent: n,
414-
type: 'GlimmerElementNodePart',
415-
range,
416-
loc: toFileLoc(range),
417-
};
418-
});
419-
}
420-
421-
if ('blockParams' in n) {
422-
n.params = (n.params || []).map((p) => {
423-
const range = toFileRange(p.loc);
424-
return {
425-
...p,
426-
type: 'BlockParam',
427-
name: p.original,
428-
parent: n,
429-
range,
430-
loc: toFileLoc(range),
431-
};
432-
});
433-
}
434-
435-
// Nullify empty hashes before the type is renamed
436-
if (
437-
(n.type === 'MustacheStatement' ||
438-
n.type === 'BlockStatement' ||
439-
n.type === 'SubExpression') &&
440-
n.hash &&
441-
n.hash.pairs &&
442-
n.hash.pairs.length === 0
443-
) {
444-
n.hash = null;
445-
}
446-
447-
n.type = `Glimmer${n.type}`;
448-
}
449-
450-
// Clean up AST structure
451-
removeFromParent(emptyTextNodes);
452-
removeFromParent(comments);
453-
for (const comment of comments) {
454-
comment.type = 'Block';
455-
}
456-
457-
// Build final token stream
458-
ast.tokens = buildTokenStream(
459-
tokenize(tokenSource || templateContent, codeLines, offset),
460-
comments,
461-
textNodes
462-
);
463-
ast.contents = templateContent;
464-
465-
return { ast, comments };
466-
}
467-
468164
/**
469165
* Preprocesses the template info, parsing the template content to Glimmer AST,
470166
* fixing the offsets and locations of all nodes
@@ -484,8 +180,8 @@ export function preprocessGlimmerTemplates(info, code) {
484180
for (const tpl of templateInfos) {
485181
const template = code.slice(...tpl.utf16Range);
486182

487-
const { ast, comments } = processGlimmerTemplate({
488-
templateContent: template,
183+
const { ast, comments } = toTree(template, {
184+
templateOnly: true,
489185
codeLines,
490186
templateRange: [...tpl.utf16Range],
491187
});
@@ -608,6 +304,7 @@ export function convertAst(result, preprocessedResult, visitorKeys) {
608304
}
609305

610306
if ('blockParams' in node) {
307+
const blockParamNodes = node.blockParamNodes || node.params || [];
611308
const upperScope = findParentScope(result.scopeManager, path);
612309
const scope = result.isTypescript
613310
? new TypescriptScope.BlockScope(result.scopeManager, upperScope, node)
@@ -618,12 +315,12 @@ export function convertAst(result, preprocessedResult, visitorKeys) {
618315
declaredVariables.set(node, vars);
619316
const virtualJSParentNode = {
620317
type: 'FunctionDeclaration',
621-
params: node.params,
318+
params: blockParamNodes,
622319
range: node.range,
623320
loc: node.loc,
624321
parent: path.parent,
625322
};
626-
for (const [i, b] of node.params.entries()) {
323+
for (const [i, b] of blockParamNodes.entries()) {
627324
const v = new Variable(b.name, scope);
628325
v.identifiers.push(b);
629326
scope.variables.push(v);
@@ -785,4 +482,4 @@ export function transformForLint(code, fileName) {
785482
};
786483
}
787484

788-
export { traverse, tokenize, processGlimmerTemplate, buildGlimmerVisitorKeys };
485+
export { traverse, buildGlimmerVisitorKeys };

0 commit comments

Comments
 (0)