Commit 0979798a authored by David Sveningsson's avatar David Sveningsson
Browse files

feat(lexer): add attribute key-value delimiter to attribute value token

parent 5c1ef9ea
......@@ -283,7 +283,10 @@ describe("lexer", () => {
expect(token.next()).toBeToken({ type: TokenType.TAG_OPEN });
expect(token.next()).toBeToken({ type: TokenType.WHITESPACE });
expect(token.next()).toBeToken({ type: TokenType.ATTR_NAME });
expect(token.next()).toBeToken({ type: TokenType.ATTR_VALUE });
expect(token.next()).toBeToken({
type: TokenType.ATTR_VALUE,
data: ['="baz"', "=", "baz", '"'],
});
expect(token.next()).toBeToken({ type: TokenType.TAG_CLOSE });
expect(token.next()).toBeToken({ type: TokenType.EOF });
expect(token.next().done).toBeTruthy();
......@@ -297,7 +300,7 @@ describe("lexer", () => {
expect(token.next()).toBeToken({ type: TokenType.ATTR_NAME });
expect(token.next()).toBeToken({
type: TokenType.ATTR_VALUE,
data: [' = "baz"', "baz", '"'],
data: [' = "baz"', " = ", "baz", '"'],
});
expect(token.next()).toBeToken({ type: TokenType.TAG_CLOSE });
expect(token.next()).toBeToken({ type: TokenType.EOF });
......@@ -310,7 +313,10 @@ describe("lexer", () => {
expect(token.next()).toBeToken({ type: TokenType.TAG_OPEN });
expect(token.next()).toBeToken({ type: TokenType.WHITESPACE });
expect(token.next()).toBeToken({ type: TokenType.ATTR_NAME });
expect(token.next()).toBeToken({ type: TokenType.ATTR_VALUE });
expect(token.next()).toBeToken({
type: TokenType.ATTR_VALUE,
data: ["='baz'", "=", "baz", "'"],
});
expect(token.next()).toBeToken({ type: TokenType.TAG_CLOSE });
expect(token.next()).toBeToken({ type: TokenType.EOF });
expect(token.next().done).toBeTruthy();
......@@ -324,7 +330,7 @@ describe("lexer", () => {
expect(token.next()).toBeToken({ type: TokenType.ATTR_NAME });
expect(token.next()).toBeToken({
type: TokenType.ATTR_VALUE,
data: [" = 'baz'", "baz", "'"],
data: [" = 'baz'", " = ", "baz", "'"],
});
expect(token.next()).toBeToken({ type: TokenType.TAG_CLOSE });
expect(token.next()).toBeToken({ type: TokenType.EOF });
......@@ -340,7 +346,7 @@ describe("lexer", () => {
expect(token.next()).toBeToken({ type: TokenType.ATTR_NAME });
expect(token.next()).toBeToken({
type: TokenType.ATTR_VALUE,
data: ["=baz", "baz"],
data: ["=baz", "=", "baz"],
});
expect(token.next()).toBeToken({ type: TokenType.TAG_CLOSE });
expect(token.next()).toBeToken({ type: TokenType.EOF });
......@@ -358,7 +364,7 @@ describe("lexer", () => {
});
expect(token.next()).toBeToken({
type: TokenType.ATTR_VALUE,
data: ["=5", "5"],
data: ["=5", "=", "5"],
});
expect(token.next()).toBeToken({ type: TokenType.TAG_CLOSE });
expect(token.next()).toBeToken({ type: TokenType.EOF });
......@@ -373,7 +379,7 @@ describe("lexer", () => {
expect(token.next()).toBeToken({ type: TokenType.ATTR_NAME });
expect(token.next()).toBeToken({
type: TokenType.ATTR_VALUE,
data: [" = baz", "baz"],
data: [" = baz", " = ", "baz"],
});
expect(token.next()).toBeToken({ type: TokenType.TAG_CLOSE });
expect(token.next()).toBeToken({ type: TokenType.EOF });
......@@ -394,7 +400,7 @@ describe("lexer", () => {
});
expect(token.next()).toBeToken({
type: TokenType.ATTR_VALUE,
data: [`=${char}`, char],
data: [`=${char}`, "=", char],
});
expect(token.next()).toBeToken({ type: TokenType.TAG_CLOSE });
expect(token.next()).toBeToken({ type: TokenType.EOF });
......
......@@ -18,9 +18,9 @@ const MATCH_TEXT = /^[^]*?(?=(?:[ \t]*(?:\r\n|\r|\n)|<[^ ]|$))/;
const MATCH_TEMPLATING = /^(?:<%.*?%>|<\?.*?\?>|<\$.*?\$>)/;
const MATCH_TAG_LOOKAHEAD = /^[^]*?(?=<|$)/;
const MATCH_ATTR_START = /^([^\t\r\n\f \/><"'=]+)/; // https://www.w3.org/TR/html/syntax.html#elements-attributes
const MATCH_ATTR_SINGLE = /^\s*=\s*'([^']*?)(')/;
const MATCH_ATTR_DOUBLE = /^\s*=\s*"([^"]*?)(")/;
const MATCH_ATTR_UNQUOTED = /^\s*=\s*([^\t\r\n\f "'<>][^\t\r\n\f <>]*)/;
const MATCH_ATTR_SINGLE = /^(\s*=\s*)'([^']*?)(')/;
const MATCH_ATTR_DOUBLE = /^(\s*=\s*)"([^"]*?)(")/;
const MATCH_ATTR_UNQUOTED = /^(\s*=\s*)([^\t\r\n\f "'<>][^\t\r\n\f <>]*)/;
const MATCH_CDATA_BEGIN = /^<!\[CDATA\[/;
const MATCH_CDATA_END = /^[^]*?]]>/;
const MATCH_SCRIPT_DATA = /^[^]*?(?=<\/script)/;
......
......@@ -361,8 +361,9 @@ export class Parser {
};
if (next && haveValue) {
attrData.value = next.data[1] ?? null;
attrData.quote = next.data[2] ?? null;
const [, , value, quote] = next.data;
attrData.value = value ?? null;
attrData.quote = quote ?? null;
}
/* get callback to process attributes, default is to just return attribute
......@@ -418,10 +419,10 @@ export class Parser {
* ^^^ ^^^ ^^^ (null) (null)
*/
private getAttributeValueLocation(token?: Token): Location | null {
if (!token || token.type !== TokenType.ATTR_VALUE || token.data[1] === "") {
if (!token || token.type !== TokenType.ATTR_VALUE || token.data[2] === "") {
return null;
}
const quote = token.data[2];
const quote = token.data[3];
if (quote) {
return sliceLocation(token.location, 2, -1);
} else {
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment