format: prettify entire project
This commit is contained in:
44
node_modules/css-tree/cjs/convertor/create.cjs
generated
vendored
44
node_modules/css-tree/cjs/convertor/create.cjs
generated
vendored
@ -3,30 +3,30 @@
|
||||
const List = require('../utils/List.cjs');
|
||||
|
||||
function createConvertor(walk) {
|
||||
return {
|
||||
fromPlainObject(ast) {
|
||||
walk(ast, {
|
||||
enter(node) {
|
||||
if (node.children && node.children instanceof List.List === false) {
|
||||
node.children = new List.List().fromArray(node.children);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return ast;
|
||||
return {
|
||||
fromPlainObject(ast) {
|
||||
walk(ast, {
|
||||
enter(node) {
|
||||
if (node.children && node.children instanceof List.List === false) {
|
||||
node.children = new List.List().fromArray(node.children);
|
||||
}
|
||||
},
|
||||
toPlainObject(ast) {
|
||||
walk(ast, {
|
||||
leave(node) {
|
||||
if (node.children && node.children instanceof List.List) {
|
||||
node.children = node.children.toArray();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return ast;
|
||||
}
|
||||
};
|
||||
return ast;
|
||||
},
|
||||
toPlainObject(ast) {
|
||||
walk(ast, {
|
||||
leave(node) {
|
||||
if (node.children && node.children instanceof List.List) {
|
||||
node.children = node.children.toArray();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
return ast;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
exports.createConvertor = createConvertor;
|
||||
|
137
node_modules/css-tree/cjs/data.cjs
generated
vendored
137
node_modules/css-tree/cjs/data.cjs
generated
vendored
@ -9,89 +9,102 @@ const mdnSyntaxes = require('mdn-data/css/syntaxes.json');
|
||||
const extendSyntax = /^\s*\|\s*/;
|
||||
|
||||
function preprocessAtrules(dict) {
|
||||
const result = Object.create(null);
|
||||
const result = Object.create(null);
|
||||
|
||||
for (const atruleName in dict) {
|
||||
const atrule = dict[atruleName];
|
||||
let descriptors = null;
|
||||
for (const atruleName in dict) {
|
||||
const atrule = dict[atruleName];
|
||||
let descriptors = null;
|
||||
|
||||
if (atrule.descriptors) {
|
||||
descriptors = Object.create(null);
|
||||
if (atrule.descriptors) {
|
||||
descriptors = Object.create(null);
|
||||
|
||||
for (const descriptor in atrule.descriptors) {
|
||||
descriptors[descriptor] = atrule.descriptors[descriptor].syntax;
|
||||
}
|
||||
}
|
||||
|
||||
result[atruleName.substr(1)] = {
|
||||
prelude: atrule.syntax.trim().replace(/\{(.|\s)+\}/, '').match(/^@\S+\s+([^;\{]*)/)[1].trim() || null,
|
||||
descriptors
|
||||
};
|
||||
for (const descriptor in atrule.descriptors) {
|
||||
descriptors[descriptor] = atrule.descriptors[descriptor].syntax;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
result[atruleName.substr(1)] = {
|
||||
prelude:
|
||||
atrule.syntax
|
||||
.trim()
|
||||
.replace(/\{(.|\s)+\}/, '')
|
||||
.match(/^@\S+\s+([^;\{]*)/)[1]
|
||||
.trim() || null,
|
||||
descriptors,
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function patchDictionary(dict, patchDict) {
|
||||
const result = {};
|
||||
const result = {};
|
||||
|
||||
// copy all syntaxes for an original dict
|
||||
for (const key in dict) {
|
||||
result[key] = dict[key].syntax || dict[key];
|
||||
// copy all syntaxes for an original dict
|
||||
for (const key in dict) {
|
||||
result[key] = dict[key].syntax || dict[key];
|
||||
}
|
||||
|
||||
// apply a patch
|
||||
for (const key in patchDict) {
|
||||
if (key in dict) {
|
||||
if (patchDict[key].syntax) {
|
||||
result[key] =
|
||||
extendSyntax.test(patchDict[key].syntax) ?
|
||||
result[key] + ' ' + patchDict[key].syntax.trim()
|
||||
: patchDict[key].syntax;
|
||||
} else {
|
||||
delete result[key];
|
||||
}
|
||||
} else {
|
||||
if (patchDict[key].syntax) {
|
||||
result[key] = patchDict[key].syntax.replace(extendSyntax, '');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// apply a patch
|
||||
for (const key in patchDict) {
|
||||
if (key in dict) {
|
||||
if (patchDict[key].syntax) {
|
||||
result[key] = extendSyntax.test(patchDict[key].syntax)
|
||||
? result[key] + ' ' + patchDict[key].syntax.trim()
|
||||
: patchDict[key].syntax;
|
||||
} else {
|
||||
delete result[key];
|
||||
}
|
||||
} else {
|
||||
if (patchDict[key].syntax) {
|
||||
result[key] = patchDict[key].syntax.replace(extendSyntax, '');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
function patchAtrules(dict, patchDict) {
|
||||
const result = {};
|
||||
const result = {};
|
||||
|
||||
// copy all syntaxes for an original dict
|
||||
for (const key in dict) {
|
||||
const patchDescriptors = (patchDict[key] && patchDict[key].descriptors) || null;
|
||||
// copy all syntaxes for an original dict
|
||||
for (const key in dict) {
|
||||
const patchDescriptors =
|
||||
(patchDict[key] && patchDict[key].descriptors) || null;
|
||||
|
||||
result[key] = {
|
||||
prelude: key in patchDict && 'prelude' in patchDict[key]
|
||||
? patchDict[key].prelude
|
||||
: dict[key].prelude || null,
|
||||
descriptors: patchDictionary(dict[key].descriptors || {}, patchDescriptors || {})
|
||||
};
|
||||
result[key] = {
|
||||
prelude:
|
||||
key in patchDict && 'prelude' in patchDict[key] ?
|
||||
patchDict[key].prelude
|
||||
: dict[key].prelude || null,
|
||||
descriptors: patchDictionary(
|
||||
dict[key].descriptors || {},
|
||||
patchDescriptors || {}
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
// apply a patch
|
||||
for (const key in patchDict) {
|
||||
if (!hasOwnProperty.call(dict, key)) {
|
||||
result[key] = {
|
||||
prelude: patchDict[key].prelude || null,
|
||||
descriptors:
|
||||
patchDict[key].descriptors &&
|
||||
patchDictionary({}, patchDict[key].descriptors),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// apply a patch
|
||||
for (const key in patchDict) {
|
||||
if (!hasOwnProperty.call(dict, key)) {
|
||||
result[key] = {
|
||||
prelude: patchDict[key].prelude || null,
|
||||
descriptors: patchDict[key].descriptors && patchDictionary({}, patchDict[key].descriptors)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
const definitions = {
|
||||
types: patchDictionary(mdnSyntaxes, dataPatch.types),
|
||||
atrules: patchAtrules(preprocessAtrules(mdnAtrules), dataPatch.atrules),
|
||||
properties: patchDictionary(mdnProperties, dataPatch.properties)
|
||||
types: patchDictionary(mdnSyntaxes, dataPatch.types),
|
||||
atrules: patchAtrules(preprocessAtrules(mdnAtrules), dataPatch.atrules),
|
||||
properties: patchDictionary(mdnProperties, dataPatch.properties),
|
||||
};
|
||||
|
||||
module.exports = definitions;
|
||||
|
25
node_modules/css-tree/cjs/definition-syntax/SyntaxError.cjs
generated
vendored
25
node_modules/css-tree/cjs/definition-syntax/SyntaxError.cjs
generated
vendored
@ -3,14 +3,23 @@
|
||||
const createCustomError = require('../utils/create-custom-error.cjs');
|
||||
|
||||
function SyntaxError(message, input, offset) {
|
||||
return Object.assign(createCustomError.createCustomError('SyntaxError', message), {
|
||||
input,
|
||||
offset,
|
||||
rawMessage: message,
|
||||
message: message + '\n' +
|
||||
' ' + input + '\n' +
|
||||
'--' + new Array((offset || input.length) + 1).join('-') + '^'
|
||||
});
|
||||
return Object.assign(
|
||||
createCustomError.createCustomError('SyntaxError', message),
|
||||
{
|
||||
input,
|
||||
offset,
|
||||
rawMessage: message,
|
||||
message:
|
||||
message +
|
||||
'\n' +
|
||||
' ' +
|
||||
input +
|
||||
'\n' +
|
||||
'--' +
|
||||
new Array((offset || input.length) + 1).join('-') +
|
||||
'^',
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
exports.SyntaxError = SyntaxError;
|
||||
|
194
node_modules/css-tree/cjs/definition-syntax/generate.cjs
generated
vendored
194
node_modules/css-tree/cjs/definition-syntax/generate.cjs
generated
vendored
@ -1,135 +1,145 @@
|
||||
'use strict';
|
||||
|
||||
function noop(value) {
|
||||
return value;
|
||||
return value;
|
||||
}
|
||||
|
||||
function generateMultiplier(multiplier) {
|
||||
const { min, max, comma } = multiplier;
|
||||
const { min, max, comma } = multiplier;
|
||||
|
||||
if (min === 0 && max === 0) {
|
||||
return comma ? '#?' : '*';
|
||||
}
|
||||
if (min === 0 && max === 0) {
|
||||
return comma ? '#?' : '*';
|
||||
}
|
||||
|
||||
if (min === 0 && max === 1) {
|
||||
return '?';
|
||||
}
|
||||
if (min === 0 && max === 1) {
|
||||
return '?';
|
||||
}
|
||||
|
||||
if (min === 1 && max === 0) {
|
||||
return comma ? '#' : '+';
|
||||
}
|
||||
if (min === 1 && max === 0) {
|
||||
return comma ? '#' : '+';
|
||||
}
|
||||
|
||||
if (min === 1 && max === 1) {
|
||||
return '';
|
||||
}
|
||||
if (min === 1 && max === 1) {
|
||||
return '';
|
||||
}
|
||||
|
||||
return (
|
||||
(comma ? '#' : '') +
|
||||
(min === max
|
||||
? '{' + min + '}'
|
||||
: '{' + min + ',' + (max !== 0 ? max : '') + '}'
|
||||
)
|
||||
);
|
||||
return (
|
||||
(comma ? '#' : '') +
|
||||
(min === max ?
|
||||
'{' + min + '}'
|
||||
: '{' + min + ',' + (max !== 0 ? max : '') + '}')
|
||||
);
|
||||
}
|
||||
|
||||
function generateTypeOpts(node) {
|
||||
switch (node.type) {
|
||||
case 'Range':
|
||||
return (
|
||||
' [' +
|
||||
(node.min === null ? '-∞' : node.min) +
|
||||
',' +
|
||||
(node.max === null ? '∞' : node.max) +
|
||||
']'
|
||||
);
|
||||
switch (node.type) {
|
||||
case 'Range':
|
||||
return (
|
||||
' [' +
|
||||
(node.min === null ? '-∞' : node.min) +
|
||||
',' +
|
||||
(node.max === null ? '∞' : node.max) +
|
||||
']'
|
||||
);
|
||||
|
||||
default:
|
||||
throw new Error('Unknown node type `' + node.type + '`');
|
||||
}
|
||||
default:
|
||||
throw new Error('Unknown node type `' + node.type + '`');
|
||||
}
|
||||
}
|
||||
|
||||
function generateSequence(node, decorate, forceBraces, compact) {
|
||||
const combinator = node.combinator === ' ' || compact ? node.combinator : ' ' + node.combinator + ' ';
|
||||
const result = node.terms
|
||||
.map(term => internalGenerate(term, decorate, forceBraces, compact))
|
||||
.join(combinator);
|
||||
const combinator =
|
||||
node.combinator === ' ' || compact ?
|
||||
node.combinator
|
||||
: ' ' + node.combinator + ' ';
|
||||
const result = node.terms
|
||||
.map((term) => internalGenerate(term, decorate, forceBraces, compact))
|
||||
.join(combinator);
|
||||
|
||||
if (node.explicit || forceBraces) {
|
||||
return (compact || result[0] === ',' ? '[' : '[ ') + result + (compact ? ']' : ' ]');
|
||||
}
|
||||
if (node.explicit || forceBraces) {
|
||||
return (
|
||||
(compact || result[0] === ',' ? '[' : '[ ') +
|
||||
result +
|
||||
(compact ? ']' : ' ]')
|
||||
);
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
function internalGenerate(node, decorate, forceBraces, compact) {
|
||||
let result;
|
||||
let result;
|
||||
|
||||
switch (node.type) {
|
||||
case 'Group':
|
||||
result =
|
||||
generateSequence(node, decorate, forceBraces, compact) +
|
||||
(node.disallowEmpty ? '!' : '');
|
||||
break;
|
||||
switch (node.type) {
|
||||
case 'Group':
|
||||
result =
|
||||
generateSequence(node, decorate, forceBraces, compact) +
|
||||
(node.disallowEmpty ? '!' : '');
|
||||
break;
|
||||
|
||||
case 'Multiplier':
|
||||
// return since node is a composition
|
||||
return (
|
||||
internalGenerate(node.term, decorate, forceBraces, compact) +
|
||||
decorate(generateMultiplier(node), node)
|
||||
);
|
||||
case 'Multiplier':
|
||||
// return since node is a composition
|
||||
return (
|
||||
internalGenerate(node.term, decorate, forceBraces, compact) +
|
||||
decorate(generateMultiplier(node), node)
|
||||
);
|
||||
|
||||
case 'Type':
|
||||
result = '<' + node.name + (node.opts ? decorate(generateTypeOpts(node.opts), node.opts) : '') + '>';
|
||||
break;
|
||||
case 'Type':
|
||||
result =
|
||||
'<' +
|
||||
node.name +
|
||||
(node.opts ? decorate(generateTypeOpts(node.opts), node.opts) : '') +
|
||||
'>';
|
||||
break;
|
||||
|
||||
case 'Property':
|
||||
result = '<\'' + node.name + '\'>';
|
||||
break;
|
||||
case 'Property':
|
||||
result = "<'" + node.name + "'>";
|
||||
break;
|
||||
|
||||
case 'Keyword':
|
||||
result = node.name;
|
||||
break;
|
||||
case 'Keyword':
|
||||
result = node.name;
|
||||
break;
|
||||
|
||||
case 'AtKeyword':
|
||||
result = '@' + node.name;
|
||||
break;
|
||||
case 'AtKeyword':
|
||||
result = '@' + node.name;
|
||||
break;
|
||||
|
||||
case 'Function':
|
||||
result = node.name + '(';
|
||||
break;
|
||||
case 'Function':
|
||||
result = node.name + '(';
|
||||
break;
|
||||
|
||||
case 'String':
|
||||
case 'Token':
|
||||
result = node.value;
|
||||
break;
|
||||
case 'String':
|
||||
case 'Token':
|
||||
result = node.value;
|
||||
break;
|
||||
|
||||
case 'Comma':
|
||||
result = ',';
|
||||
break;
|
||||
case 'Comma':
|
||||
result = ',';
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error('Unknown node type `' + node.type + '`');
|
||||
}
|
||||
default:
|
||||
throw new Error('Unknown node type `' + node.type + '`');
|
||||
}
|
||||
|
||||
return decorate(result, node);
|
||||
return decorate(result, node);
|
||||
}
|
||||
|
||||
function generate(node, options) {
|
||||
let decorate = noop;
|
||||
let forceBraces = false;
|
||||
let compact = false;
|
||||
let decorate = noop;
|
||||
let forceBraces = false;
|
||||
let compact = false;
|
||||
|
||||
if (typeof options === 'function') {
|
||||
decorate = options;
|
||||
} else if (options) {
|
||||
forceBraces = Boolean(options.forceBraces);
|
||||
compact = Boolean(options.compact);
|
||||
if (typeof options.decorate === 'function') {
|
||||
decorate = options.decorate;
|
||||
}
|
||||
if (typeof options === 'function') {
|
||||
decorate = options;
|
||||
} else if (options) {
|
||||
forceBraces = Boolean(options.forceBraces);
|
||||
compact = Boolean(options.compact);
|
||||
if (typeof options.decorate === 'function') {
|
||||
decorate = options.decorate;
|
||||
}
|
||||
}
|
||||
|
||||
return internalGenerate(node, decorate, forceBraces, compact);
|
||||
return internalGenerate(node, decorate, forceBraces, compact);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
2
node_modules/css-tree/cjs/definition-syntax/index.cjs
generated
vendored
2
node_modules/css-tree/cjs/definition-syntax/index.cjs
generated
vendored
@ -5,8 +5,6 @@ const generate = require('./generate.cjs');
|
||||
const parse = require('./parse.cjs');
|
||||
const walk = require('./walk.cjs');
|
||||
|
||||
|
||||
|
||||
exports.SyntaxError = SyntaxError.SyntaxError;
|
||||
exports.generate = generate.generate;
|
||||
exports.parse = parse.parse;
|
||||
|
834
node_modules/css-tree/cjs/definition-syntax/parse.cjs
generated
vendored
834
node_modules/css-tree/cjs/definition-syntax/parse.cjs
generated
vendored
@ -7,244 +7,244 @@ const N = 10;
|
||||
const F = 12;
|
||||
const R = 13;
|
||||
const SPACE = 32;
|
||||
const EXCLAMATIONMARK = 33; // !
|
||||
const NUMBERSIGN = 35; // #
|
||||
const AMPERSAND = 38; // &
|
||||
const APOSTROPHE = 39; // '
|
||||
const LEFTPARENTHESIS = 40; // (
|
||||
const RIGHTPARENTHESIS = 41; // )
|
||||
const ASTERISK = 42; // *
|
||||
const PLUSSIGN = 43; // +
|
||||
const COMMA = 44; // ,
|
||||
const HYPERMINUS = 45; // -
|
||||
const LESSTHANSIGN = 60; // <
|
||||
const GREATERTHANSIGN = 62; // >
|
||||
const QUESTIONMARK = 63; // ?
|
||||
const COMMERCIALAT = 64; // @
|
||||
const LEFTSQUAREBRACKET = 91; // [
|
||||
const EXCLAMATIONMARK = 33; // !
|
||||
const NUMBERSIGN = 35; // #
|
||||
const AMPERSAND = 38; // &
|
||||
const APOSTROPHE = 39; // '
|
||||
const LEFTPARENTHESIS = 40; // (
|
||||
const RIGHTPARENTHESIS = 41; // )
|
||||
const ASTERISK = 42; // *
|
||||
const PLUSSIGN = 43; // +
|
||||
const COMMA = 44; // ,
|
||||
const HYPERMINUS = 45; // -
|
||||
const LESSTHANSIGN = 60; // <
|
||||
const GREATERTHANSIGN = 62; // >
|
||||
const QUESTIONMARK = 63; // ?
|
||||
const COMMERCIALAT = 64; // @
|
||||
const LEFTSQUAREBRACKET = 91; // [
|
||||
const RIGHTSQUAREBRACKET = 93; // ]
|
||||
const LEFTCURLYBRACKET = 123; // {
|
||||
const VERTICALLINE = 124; // |
|
||||
const LEFTCURLYBRACKET = 123; // {
|
||||
const VERTICALLINE = 124; // |
|
||||
const RIGHTCURLYBRACKET = 125; // }
|
||||
const INFINITY = 8734; // ∞
|
||||
const INFINITY = 8734; // ∞
|
||||
const NAME_CHAR = new Uint8Array(128).map((_, idx) =>
|
||||
/[a-zA-Z0-9\-]/.test(String.fromCharCode(idx)) ? 1 : 0
|
||||
/[a-zA-Z0-9\-]/.test(String.fromCharCode(idx)) ? 1 : 0
|
||||
);
|
||||
const COMBINATOR_PRECEDENCE = {
|
||||
' ': 1,
|
||||
'&&': 2,
|
||||
'||': 3,
|
||||
'|': 4
|
||||
' ': 1,
|
||||
'&&': 2,
|
||||
'||': 3,
|
||||
'|': 4,
|
||||
};
|
||||
|
||||
function scanSpaces(tokenizer) {
|
||||
return tokenizer.substringToPos(
|
||||
tokenizer.findWsEnd(tokenizer.pos)
|
||||
);
|
||||
return tokenizer.substringToPos(tokenizer.findWsEnd(tokenizer.pos));
|
||||
}
|
||||
|
||||
function scanWord(tokenizer) {
|
||||
let end = tokenizer.pos;
|
||||
let end = tokenizer.pos;
|
||||
|
||||
for (; end < tokenizer.str.length; end++) {
|
||||
const code = tokenizer.str.charCodeAt(end);
|
||||
if (code >= 128 || NAME_CHAR[code] === 0) {
|
||||
break;
|
||||
}
|
||||
for (; end < tokenizer.str.length; end++) {
|
||||
const code = tokenizer.str.charCodeAt(end);
|
||||
if (code >= 128 || NAME_CHAR[code] === 0) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.pos === end) {
|
||||
tokenizer.error('Expect a keyword');
|
||||
}
|
||||
if (tokenizer.pos === end) {
|
||||
tokenizer.error('Expect a keyword');
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end);
|
||||
return tokenizer.substringToPos(end);
|
||||
}
|
||||
|
||||
function scanNumber(tokenizer) {
|
||||
let end = tokenizer.pos;
|
||||
let end = tokenizer.pos;
|
||||
|
||||
for (; end < tokenizer.str.length; end++) {
|
||||
const code = tokenizer.str.charCodeAt(end);
|
||||
if (code < 48 || code > 57) {
|
||||
break;
|
||||
}
|
||||
for (; end < tokenizer.str.length; end++) {
|
||||
const code = tokenizer.str.charCodeAt(end);
|
||||
if (code < 48 || code > 57) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.pos === end) {
|
||||
tokenizer.error('Expect a number');
|
||||
}
|
||||
if (tokenizer.pos === end) {
|
||||
tokenizer.error('Expect a number');
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end);
|
||||
return tokenizer.substringToPos(end);
|
||||
}
|
||||
|
||||
function scanString(tokenizer) {
|
||||
const end = tokenizer.str.indexOf('\'', tokenizer.pos + 1);
|
||||
const end = tokenizer.str.indexOf("'", tokenizer.pos + 1);
|
||||
|
||||
if (end === -1) {
|
||||
tokenizer.pos = tokenizer.str.length;
|
||||
tokenizer.error('Expect an apostrophe');
|
||||
}
|
||||
if (end === -1) {
|
||||
tokenizer.pos = tokenizer.str.length;
|
||||
tokenizer.error('Expect an apostrophe');
|
||||
}
|
||||
|
||||
return tokenizer.substringToPos(end + 1);
|
||||
return tokenizer.substringToPos(end + 1);
|
||||
}
|
||||
|
||||
function readMultiplierRange(tokenizer) {
|
||||
let min = null;
|
||||
let max = null;
|
||||
let min = null;
|
||||
let max = null;
|
||||
|
||||
tokenizer.eat(LEFTCURLYBRACKET);
|
||||
tokenizer.eat(LEFTCURLYBRACKET);
|
||||
|
||||
min = scanNumber(tokenizer);
|
||||
min = scanNumber(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === COMMA) {
|
||||
tokenizer.pos++;
|
||||
if (tokenizer.charCode() !== RIGHTCURLYBRACKET) {
|
||||
max = scanNumber(tokenizer);
|
||||
}
|
||||
} else {
|
||||
max = min;
|
||||
if (tokenizer.charCode() === COMMA) {
|
||||
tokenizer.pos++;
|
||||
if (tokenizer.charCode() !== RIGHTCURLYBRACKET) {
|
||||
max = scanNumber(tokenizer);
|
||||
}
|
||||
} else {
|
||||
max = min;
|
||||
}
|
||||
|
||||
tokenizer.eat(RIGHTCURLYBRACKET);
|
||||
tokenizer.eat(RIGHTCURLYBRACKET);
|
||||
|
||||
return {
|
||||
min: Number(min),
|
||||
max: max ? Number(max) : 0
|
||||
};
|
||||
return {
|
||||
min: Number(min),
|
||||
max: max ? Number(max) : 0,
|
||||
};
|
||||
}
|
||||
|
||||
function readMultiplier(tokenizer) {
|
||||
let range = null;
|
||||
let comma = false;
|
||||
let range = null;
|
||||
let comma = false;
|
||||
|
||||
switch (tokenizer.charCode()) {
|
||||
case ASTERISK:
|
||||
tokenizer.pos++;
|
||||
switch (tokenizer.charCode()) {
|
||||
case ASTERISK:
|
||||
tokenizer.pos++;
|
||||
|
||||
range = {
|
||||
min: 0,
|
||||
max: 0
|
||||
};
|
||||
range = {
|
||||
min: 0,
|
||||
max: 0,
|
||||
};
|
||||
|
||||
break;
|
||||
break;
|
||||
|
||||
case PLUSSIGN:
|
||||
tokenizer.pos++;
|
||||
case PLUSSIGN:
|
||||
tokenizer.pos++;
|
||||
|
||||
range = {
|
||||
min: 1,
|
||||
max: 0
|
||||
};
|
||||
range = {
|
||||
min: 1,
|
||||
max: 0,
|
||||
};
|
||||
|
||||
break;
|
||||
break;
|
||||
|
||||
case QUESTIONMARK:
|
||||
tokenizer.pos++;
|
||||
case QUESTIONMARK:
|
||||
tokenizer.pos++;
|
||||
|
||||
range = {
|
||||
min: 0,
|
||||
max: 1
|
||||
};
|
||||
range = {
|
||||
min: 0,
|
||||
max: 1,
|
||||
};
|
||||
|
||||
break;
|
||||
break;
|
||||
|
||||
case NUMBERSIGN:
|
||||
tokenizer.pos++;
|
||||
case NUMBERSIGN:
|
||||
tokenizer.pos++;
|
||||
|
||||
comma = true;
|
||||
comma = true;
|
||||
|
||||
if (tokenizer.charCode() === LEFTCURLYBRACKET) {
|
||||
range = readMultiplierRange(tokenizer);
|
||||
} else if (tokenizer.charCode() === QUESTIONMARK) {
|
||||
// https://www.w3.org/TR/css-values-4/#component-multipliers
|
||||
// > the # and ? multipliers may be stacked as #?
|
||||
// In this case just treat "#?" as a single multiplier
|
||||
// { min: 0, max: 0, comma: true }
|
||||
tokenizer.pos++;
|
||||
range = {
|
||||
min: 0,
|
||||
max: 0
|
||||
};
|
||||
} else {
|
||||
range = {
|
||||
min: 1,
|
||||
max: 0
|
||||
};
|
||||
}
|
||||
if (tokenizer.charCode() === LEFTCURLYBRACKET) {
|
||||
range = readMultiplierRange(tokenizer);
|
||||
} else if (tokenizer.charCode() === QUESTIONMARK) {
|
||||
// https://www.w3.org/TR/css-values-4/#component-multipliers
|
||||
// > the # and ? multipliers may be stacked as #?
|
||||
// In this case just treat "#?" as a single multiplier
|
||||
// { min: 0, max: 0, comma: true }
|
||||
tokenizer.pos++;
|
||||
range = {
|
||||
min: 0,
|
||||
max: 0,
|
||||
};
|
||||
} else {
|
||||
range = {
|
||||
min: 1,
|
||||
max: 0,
|
||||
};
|
||||
}
|
||||
|
||||
break;
|
||||
break;
|
||||
|
||||
case LEFTCURLYBRACKET:
|
||||
range = readMultiplierRange(tokenizer);
|
||||
break;
|
||||
case LEFTCURLYBRACKET:
|
||||
range = readMultiplierRange(tokenizer);
|
||||
break;
|
||||
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Multiplier',
|
||||
comma,
|
||||
min: range.min,
|
||||
max: range.max,
|
||||
term: null
|
||||
};
|
||||
return {
|
||||
type: 'Multiplier',
|
||||
comma,
|
||||
min: range.min,
|
||||
max: range.max,
|
||||
term: null,
|
||||
};
|
||||
}
|
||||
|
||||
function maybeMultiplied(tokenizer, node) {
|
||||
const multiplier = readMultiplier(tokenizer);
|
||||
const multiplier = readMultiplier(tokenizer);
|
||||
|
||||
if (multiplier !== null) {
|
||||
multiplier.term = node;
|
||||
if (multiplier !== null) {
|
||||
multiplier.term = node;
|
||||
|
||||
// https://www.w3.org/TR/css-values-4/#component-multipliers
|
||||
// > The + and # multipliers may be stacked as +#;
|
||||
// Represent "+#" as nested multipliers:
|
||||
// { ...<multiplier #>,
|
||||
// term: {
|
||||
// ...<multipler +>,
|
||||
// term: node
|
||||
// }
|
||||
// }
|
||||
if (tokenizer.charCode() === NUMBERSIGN &&
|
||||
tokenizer.charCodeAt(tokenizer.pos - 1) === PLUSSIGN) {
|
||||
return maybeMultiplied(tokenizer, multiplier);
|
||||
}
|
||||
|
||||
return multiplier;
|
||||
// https://www.w3.org/TR/css-values-4/#component-multipliers
|
||||
// > The + and # multipliers may be stacked as +#;
|
||||
// Represent "+#" as nested multipliers:
|
||||
// { ...<multiplier #>,
|
||||
// term: {
|
||||
// ...<multipler +>,
|
||||
// term: node
|
||||
// }
|
||||
// }
|
||||
if (
|
||||
tokenizer.charCode() === NUMBERSIGN &&
|
||||
tokenizer.charCodeAt(tokenizer.pos - 1) === PLUSSIGN
|
||||
) {
|
||||
return maybeMultiplied(tokenizer, multiplier);
|
||||
}
|
||||
|
||||
return node;
|
||||
return multiplier;
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
function maybeToken(tokenizer) {
|
||||
const ch = tokenizer.peek();
|
||||
const ch = tokenizer.peek();
|
||||
|
||||
if (ch === '') {
|
||||
return null;
|
||||
}
|
||||
if (ch === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Token',
|
||||
value: ch
|
||||
};
|
||||
return {
|
||||
type: 'Token',
|
||||
value: ch,
|
||||
};
|
||||
}
|
||||
|
||||
function readProperty(tokenizer) {
|
||||
let name;
|
||||
let name;
|
||||
|
||||
tokenizer.eat(LESSTHANSIGN);
|
||||
tokenizer.eat(APOSTROPHE);
|
||||
tokenizer.eat(LESSTHANSIGN);
|
||||
tokenizer.eat(APOSTROPHE);
|
||||
|
||||
name = scanWord(tokenizer);
|
||||
name = scanWord(tokenizer);
|
||||
|
||||
tokenizer.eat(APOSTROPHE);
|
||||
tokenizer.eat(GREATERTHANSIGN);
|
||||
tokenizer.eat(APOSTROPHE);
|
||||
tokenizer.eat(GREATERTHANSIGN);
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Property',
|
||||
name
|
||||
});
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Property',
|
||||
name,
|
||||
});
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-values-3/#numeric-ranges
|
||||
@ -255,334 +255,340 @@ function readProperty(tokenizer) {
|
||||
// indicating a closed range between (and including) min and max.
|
||||
// For example, <integer [0, 10]> indicates an integer between 0 and 10, inclusive.
|
||||
function readTypeRange(tokenizer) {
|
||||
// use null for Infinity to make AST format JSON serializable/deserializable
|
||||
let min = null; // -Infinity
|
||||
let max = null; // Infinity
|
||||
let sign = 1;
|
||||
// use null for Infinity to make AST format JSON serializable/deserializable
|
||||
let min = null; // -Infinity
|
||||
let max = null; // Infinity
|
||||
let sign = 1;
|
||||
|
||||
tokenizer.eat(LEFTSQUAREBRACKET);
|
||||
tokenizer.eat(LEFTSQUAREBRACKET);
|
||||
|
||||
if (tokenizer.charCode() === HYPERMINUS) {
|
||||
tokenizer.peek();
|
||||
sign = -1;
|
||||
}
|
||||
|
||||
if (sign == -1 && tokenizer.charCode() === INFINITY) {
|
||||
tokenizer.peek();
|
||||
} else {
|
||||
min = sign * Number(scanNumber(tokenizer));
|
||||
|
||||
if (NAME_CHAR[tokenizer.charCode()] !== 0) {
|
||||
min += scanWord(tokenizer);
|
||||
}
|
||||
}
|
||||
|
||||
scanSpaces(tokenizer);
|
||||
tokenizer.eat(COMMA);
|
||||
scanSpaces(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === INFINITY) {
|
||||
tokenizer.peek();
|
||||
} else {
|
||||
sign = 1;
|
||||
|
||||
if (tokenizer.charCode() === HYPERMINUS) {
|
||||
tokenizer.peek();
|
||||
sign = -1;
|
||||
tokenizer.peek();
|
||||
sign = -1;
|
||||
}
|
||||
|
||||
if (sign == -1 && tokenizer.charCode() === INFINITY) {
|
||||
tokenizer.peek();
|
||||
} else {
|
||||
min = sign * Number(scanNumber(tokenizer));
|
||||
max = sign * Number(scanNumber(tokenizer));
|
||||
|
||||
if (NAME_CHAR[tokenizer.charCode()] !== 0) {
|
||||
min += scanWord(tokenizer);
|
||||
}
|
||||
if (NAME_CHAR[tokenizer.charCode()] !== 0) {
|
||||
max += scanWord(tokenizer);
|
||||
}
|
||||
}
|
||||
|
||||
scanSpaces(tokenizer);
|
||||
tokenizer.eat(COMMA);
|
||||
scanSpaces(tokenizer);
|
||||
tokenizer.eat(RIGHTSQUAREBRACKET);
|
||||
|
||||
if (tokenizer.charCode() === INFINITY) {
|
||||
tokenizer.peek();
|
||||
} else {
|
||||
sign = 1;
|
||||
|
||||
if (tokenizer.charCode() === HYPERMINUS) {
|
||||
tokenizer.peek();
|
||||
sign = -1;
|
||||
}
|
||||
|
||||
max = sign * Number(scanNumber(tokenizer));
|
||||
|
||||
if (NAME_CHAR[tokenizer.charCode()] !== 0) {
|
||||
max += scanWord(tokenizer);
|
||||
}
|
||||
}
|
||||
|
||||
tokenizer.eat(RIGHTSQUAREBRACKET);
|
||||
|
||||
return {
|
||||
type: 'Range',
|
||||
min,
|
||||
max
|
||||
};
|
||||
return {
|
||||
type: 'Range',
|
||||
min,
|
||||
max,
|
||||
};
|
||||
}
|
||||
|
||||
function readType(tokenizer) {
|
||||
let name;
|
||||
let opts = null;
|
||||
let name;
|
||||
let opts = null;
|
||||
|
||||
tokenizer.eat(LESSTHANSIGN);
|
||||
name = scanWord(tokenizer);
|
||||
tokenizer.eat(LESSTHANSIGN);
|
||||
name = scanWord(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === LEFTPARENTHESIS &&
|
||||
tokenizer.nextCharCode() === RIGHTPARENTHESIS) {
|
||||
tokenizer.pos += 2;
|
||||
name += '()';
|
||||
}
|
||||
if (
|
||||
tokenizer.charCode() === LEFTPARENTHESIS &&
|
||||
tokenizer.nextCharCode() === RIGHTPARENTHESIS
|
||||
) {
|
||||
tokenizer.pos += 2;
|
||||
name += '()';
|
||||
}
|
||||
|
||||
if (tokenizer.charCodeAt(tokenizer.findWsEnd(tokenizer.pos)) === LEFTSQUAREBRACKET) {
|
||||
scanSpaces(tokenizer);
|
||||
opts = readTypeRange(tokenizer);
|
||||
}
|
||||
if (
|
||||
tokenizer.charCodeAt(tokenizer.findWsEnd(tokenizer.pos)) ===
|
||||
LEFTSQUAREBRACKET
|
||||
) {
|
||||
scanSpaces(tokenizer);
|
||||
opts = readTypeRange(tokenizer);
|
||||
}
|
||||
|
||||
tokenizer.eat(GREATERTHANSIGN);
|
||||
tokenizer.eat(GREATERTHANSIGN);
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Type',
|
||||
name,
|
||||
opts
|
||||
});
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Type',
|
||||
name,
|
||||
opts,
|
||||
});
|
||||
}
|
||||
|
||||
function readKeywordOrFunction(tokenizer) {
|
||||
const name = scanWord(tokenizer);
|
||||
const name = scanWord(tokenizer);
|
||||
|
||||
if (tokenizer.charCode() === LEFTPARENTHESIS) {
|
||||
tokenizer.pos++;
|
||||
if (tokenizer.charCode() === LEFTPARENTHESIS) {
|
||||
tokenizer.pos++;
|
||||
|
||||
return {
|
||||
type: 'Function',
|
||||
name
|
||||
};
|
||||
}
|
||||
return {
|
||||
type: 'Function',
|
||||
name,
|
||||
};
|
||||
}
|
||||
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Keyword',
|
||||
name
|
||||
});
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'Keyword',
|
||||
name,
|
||||
});
|
||||
}
|
||||
|
||||
function regroupTerms(terms, combinators) {
|
||||
function createGroup(terms, combinator) {
|
||||
return {
|
||||
type: 'Group',
|
||||
terms,
|
||||
combinator,
|
||||
disallowEmpty: false,
|
||||
explicit: false
|
||||
};
|
||||
}
|
||||
function createGroup(terms, combinator) {
|
||||
return {
|
||||
type: 'Group',
|
||||
terms,
|
||||
combinator,
|
||||
disallowEmpty: false,
|
||||
explicit: false,
|
||||
};
|
||||
}
|
||||
|
||||
let combinator;
|
||||
let combinator;
|
||||
|
||||
combinators = Object.keys(combinators)
|
||||
.sort((a, b) => COMBINATOR_PRECEDENCE[a] - COMBINATOR_PRECEDENCE[b]);
|
||||
combinators = Object.keys(combinators).sort(
|
||||
(a, b) => COMBINATOR_PRECEDENCE[a] - COMBINATOR_PRECEDENCE[b]
|
||||
);
|
||||
|
||||
while (combinators.length > 0) {
|
||||
combinator = combinators.shift();
|
||||
while (combinators.length > 0) {
|
||||
combinator = combinators.shift();
|
||||
|
||||
let i = 0;
|
||||
let subgroupStart = 0;
|
||||
let i = 0;
|
||||
let subgroupStart = 0;
|
||||
|
||||
for (; i < terms.length; i++) {
|
||||
const term = terms[i];
|
||||
for (; i < terms.length; i++) {
|
||||
const term = terms[i];
|
||||
|
||||
if (term.type === 'Combinator') {
|
||||
if (term.value === combinator) {
|
||||
if (subgroupStart === -1) {
|
||||
subgroupStart = i - 1;
|
||||
}
|
||||
terms.splice(i, 1);
|
||||
i--;
|
||||
} else {
|
||||
if (subgroupStart !== -1 && i - subgroupStart > 1) {
|
||||
terms.splice(
|
||||
subgroupStart,
|
||||
i - subgroupStart,
|
||||
createGroup(terms.slice(subgroupStart, i), combinator)
|
||||
);
|
||||
i = subgroupStart + 1;
|
||||
}
|
||||
subgroupStart = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (subgroupStart !== -1 && combinators.length) {
|
||||
if (term.type === 'Combinator') {
|
||||
if (term.value === combinator) {
|
||||
if (subgroupStart === -1) {
|
||||
subgroupStart = i - 1;
|
||||
}
|
||||
terms.splice(i, 1);
|
||||
i--;
|
||||
} else {
|
||||
if (subgroupStart !== -1 && i - subgroupStart > 1) {
|
||||
terms.splice(
|
||||
subgroupStart,
|
||||
i - subgroupStart,
|
||||
createGroup(terms.slice(subgroupStart, i), combinator)
|
||||
subgroupStart,
|
||||
i - subgroupStart,
|
||||
createGroup(terms.slice(subgroupStart, i), combinator)
|
||||
);
|
||||
i = subgroupStart + 1;
|
||||
}
|
||||
subgroupStart = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return combinator;
|
||||
if (subgroupStart !== -1 && combinators.length) {
|
||||
terms.splice(
|
||||
subgroupStart,
|
||||
i - subgroupStart,
|
||||
createGroup(terms.slice(subgroupStart, i), combinator)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return combinator;
|
||||
}
|
||||
|
||||
function readImplicitGroup(tokenizer) {
|
||||
const terms = [];
|
||||
const combinators = {};
|
||||
let token;
|
||||
let prevToken = null;
|
||||
let prevTokenPos = tokenizer.pos;
|
||||
const terms = [];
|
||||
const combinators = {};
|
||||
let token;
|
||||
let prevToken = null;
|
||||
let prevTokenPos = tokenizer.pos;
|
||||
|
||||
while (token = peek(tokenizer)) {
|
||||
if (token.type !== 'Spaces') {
|
||||
if (token.type === 'Combinator') {
|
||||
// check for combinator in group beginning and double combinator sequence
|
||||
if (prevToken === null || prevToken.type === 'Combinator') {
|
||||
tokenizer.pos = prevTokenPos;
|
||||
tokenizer.error('Unexpected combinator');
|
||||
}
|
||||
|
||||
combinators[token.value] = true;
|
||||
} else if (prevToken !== null && prevToken.type !== 'Combinator') {
|
||||
combinators[' '] = true; // a b
|
||||
terms.push({
|
||||
type: 'Combinator',
|
||||
value: ' '
|
||||
});
|
||||
}
|
||||
|
||||
terms.push(token);
|
||||
prevToken = token;
|
||||
prevTokenPos = tokenizer.pos;
|
||||
while ((token = peek(tokenizer))) {
|
||||
if (token.type !== 'Spaces') {
|
||||
if (token.type === 'Combinator') {
|
||||
// check for combinator in group beginning and double combinator sequence
|
||||
if (prevToken === null || prevToken.type === 'Combinator') {
|
||||
tokenizer.pos = prevTokenPos;
|
||||
tokenizer.error('Unexpected combinator');
|
||||
}
|
||||
}
|
||||
|
||||
// check for combinator in group ending
|
||||
if (prevToken !== null && prevToken.type === 'Combinator') {
|
||||
tokenizer.pos -= prevTokenPos;
|
||||
tokenizer.error('Unexpected combinator');
|
||||
}
|
||||
combinators[token.value] = true;
|
||||
} else if (prevToken !== null && prevToken.type !== 'Combinator') {
|
||||
combinators[' '] = true; // a b
|
||||
terms.push({
|
||||
type: 'Combinator',
|
||||
value: ' ',
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Group',
|
||||
terms,
|
||||
combinator: regroupTerms(terms, combinators) || ' ',
|
||||
disallowEmpty: false,
|
||||
explicit: false
|
||||
};
|
||||
terms.push(token);
|
||||
prevToken = token;
|
||||
prevTokenPos = tokenizer.pos;
|
||||
}
|
||||
}
|
||||
|
||||
// check for combinator in group ending
|
||||
if (prevToken !== null && prevToken.type === 'Combinator') {
|
||||
tokenizer.pos -= prevTokenPos;
|
||||
tokenizer.error('Unexpected combinator');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Group',
|
||||
terms,
|
||||
combinator: regroupTerms(terms, combinators) || ' ',
|
||||
disallowEmpty: false,
|
||||
explicit: false,
|
||||
};
|
||||
}
|
||||
|
||||
function readGroup(tokenizer) {
|
||||
let result;
|
||||
let result;
|
||||
|
||||
tokenizer.eat(LEFTSQUAREBRACKET);
|
||||
result = readImplicitGroup(tokenizer);
|
||||
tokenizer.eat(RIGHTSQUAREBRACKET);
|
||||
tokenizer.eat(LEFTSQUAREBRACKET);
|
||||
result = readImplicitGroup(tokenizer);
|
||||
tokenizer.eat(RIGHTSQUAREBRACKET);
|
||||
|
||||
result.explicit = true;
|
||||
result.explicit = true;
|
||||
|
||||
if (tokenizer.charCode() === EXCLAMATIONMARK) {
|
||||
tokenizer.pos++;
|
||||
result.disallowEmpty = true;
|
||||
}
|
||||
if (tokenizer.charCode() === EXCLAMATIONMARK) {
|
||||
tokenizer.pos++;
|
||||
result.disallowEmpty = true;
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
function peek(tokenizer) {
|
||||
let code = tokenizer.charCode();
|
||||
let code = tokenizer.charCode();
|
||||
|
||||
if (code < 128 && NAME_CHAR[code] === 1) {
|
||||
return readKeywordOrFunction(tokenizer);
|
||||
}
|
||||
if (code < 128 && NAME_CHAR[code] === 1) {
|
||||
return readKeywordOrFunction(tokenizer);
|
||||
}
|
||||
|
||||
switch (code) {
|
||||
case RIGHTSQUAREBRACKET:
|
||||
// don't eat, stop scan a group
|
||||
break;
|
||||
switch (code) {
|
||||
case RIGHTSQUAREBRACKET:
|
||||
// don't eat, stop scan a group
|
||||
break;
|
||||
|
||||
case LEFTSQUAREBRACKET:
|
||||
return maybeMultiplied(tokenizer, readGroup(tokenizer));
|
||||
case LEFTSQUAREBRACKET:
|
||||
return maybeMultiplied(tokenizer, readGroup(tokenizer));
|
||||
|
||||
case LESSTHANSIGN:
|
||||
return tokenizer.nextCharCode() === APOSTROPHE
|
||||
? readProperty(tokenizer)
|
||||
: readType(tokenizer);
|
||||
case LESSTHANSIGN:
|
||||
return tokenizer.nextCharCode() === APOSTROPHE ?
|
||||
readProperty(tokenizer)
|
||||
: readType(tokenizer);
|
||||
|
||||
case VERTICALLINE:
|
||||
return {
|
||||
type: 'Combinator',
|
||||
value: tokenizer.substringToPos(
|
||||
tokenizer.pos + (tokenizer.nextCharCode() === VERTICALLINE ? 2 : 1)
|
||||
)
|
||||
};
|
||||
case VERTICALLINE:
|
||||
return {
|
||||
type: 'Combinator',
|
||||
value: tokenizer.substringToPos(
|
||||
tokenizer.pos + (tokenizer.nextCharCode() === VERTICALLINE ? 2 : 1)
|
||||
),
|
||||
};
|
||||
|
||||
case AMPERSAND:
|
||||
tokenizer.pos++;
|
||||
tokenizer.eat(AMPERSAND);
|
||||
case AMPERSAND:
|
||||
tokenizer.pos++;
|
||||
tokenizer.eat(AMPERSAND);
|
||||
|
||||
return {
|
||||
type: 'Combinator',
|
||||
value: '&&'
|
||||
};
|
||||
return {
|
||||
type: 'Combinator',
|
||||
value: '&&',
|
||||
};
|
||||
|
||||
case COMMA:
|
||||
tokenizer.pos++;
|
||||
return {
|
||||
type: 'Comma'
|
||||
};
|
||||
case COMMA:
|
||||
tokenizer.pos++;
|
||||
return {
|
||||
type: 'Comma',
|
||||
};
|
||||
|
||||
case APOSTROPHE:
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'String',
|
||||
value: scanString(tokenizer)
|
||||
});
|
||||
case APOSTROPHE:
|
||||
return maybeMultiplied(tokenizer, {
|
||||
type: 'String',
|
||||
value: scanString(tokenizer),
|
||||
});
|
||||
|
||||
case SPACE:
|
||||
case TAB:
|
||||
case N:
|
||||
case R:
|
||||
case F:
|
||||
return {
|
||||
type: 'Spaces',
|
||||
value: scanSpaces(tokenizer)
|
||||
};
|
||||
case SPACE:
|
||||
case TAB:
|
||||
case N:
|
||||
case R:
|
||||
case F:
|
||||
return {
|
||||
type: 'Spaces',
|
||||
value: scanSpaces(tokenizer),
|
||||
};
|
||||
|
||||
case COMMERCIALAT:
|
||||
code = tokenizer.nextCharCode();
|
||||
case COMMERCIALAT:
|
||||
code = tokenizer.nextCharCode();
|
||||
|
||||
if (code < 128 && NAME_CHAR[code] === 1) {
|
||||
tokenizer.pos++;
|
||||
return {
|
||||
type: 'AtKeyword',
|
||||
name: scanWord(tokenizer)
|
||||
};
|
||||
}
|
||||
if (code < 128 && NAME_CHAR[code] === 1) {
|
||||
tokenizer.pos++;
|
||||
return {
|
||||
type: 'AtKeyword',
|
||||
name: scanWord(tokenizer),
|
||||
};
|
||||
}
|
||||
|
||||
return maybeToken(tokenizer);
|
||||
return maybeToken(tokenizer);
|
||||
|
||||
case ASTERISK:
|
||||
case PLUSSIGN:
|
||||
case QUESTIONMARK:
|
||||
case NUMBERSIGN:
|
||||
case EXCLAMATIONMARK:
|
||||
// prohibited tokens (used as a multiplier start)
|
||||
break;
|
||||
case ASTERISK:
|
||||
case PLUSSIGN:
|
||||
case QUESTIONMARK:
|
||||
case NUMBERSIGN:
|
||||
case EXCLAMATIONMARK:
|
||||
// prohibited tokens (used as a multiplier start)
|
||||
break;
|
||||
|
||||
case LEFTCURLYBRACKET:
|
||||
// LEFTCURLYBRACKET is allowed since mdn/data uses it w/o quoting
|
||||
// check next char isn't a number, because it's likely a disjoined multiplier
|
||||
code = tokenizer.nextCharCode();
|
||||
case LEFTCURLYBRACKET:
|
||||
// LEFTCURLYBRACKET is allowed since mdn/data uses it w/o quoting
|
||||
// check next char isn't a number, because it's likely a disjoined multiplier
|
||||
code = tokenizer.nextCharCode();
|
||||
|
||||
if (code < 48 || code > 57) {
|
||||
return maybeToken(tokenizer);
|
||||
}
|
||||
if (code < 48 || code > 57) {
|
||||
return maybeToken(tokenizer);
|
||||
}
|
||||
|
||||
break;
|
||||
break;
|
||||
|
||||
default:
|
||||
return maybeToken(tokenizer);
|
||||
}
|
||||
default:
|
||||
return maybeToken(tokenizer);
|
||||
}
|
||||
}
|
||||
|
||||
function parse(source) {
|
||||
const tokenizer$1 = new tokenizer.Tokenizer(source);
|
||||
const result = readImplicitGroup(tokenizer$1);
|
||||
const tokenizer$1 = new tokenizer.Tokenizer(source);
|
||||
const result = readImplicitGroup(tokenizer$1);
|
||||
|
||||
if (tokenizer$1.pos !== source.length) {
|
||||
tokenizer$1.error('Unexpected input');
|
||||
}
|
||||
if (tokenizer$1.pos !== source.length) {
|
||||
tokenizer$1.error('Unexpected input');
|
||||
}
|
||||
|
||||
// reduce redundant groups with single group term
|
||||
if (result.terms.length === 1 && result.terms[0].type === 'Group') {
|
||||
return result.terms[0];
|
||||
}
|
||||
// reduce redundant groups with single group term
|
||||
if (result.terms.length === 1 && result.terms[0].type === 'Group') {
|
||||
return result.terms[0];
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
exports.parse = parse;
|
||||
|
82
node_modules/css-tree/cjs/definition-syntax/tokenizer.cjs
generated
vendored
82
node_modules/css-tree/cjs/definition-syntax/tokenizer.cjs
generated
vendored
@ -9,48 +9,54 @@ const R = 13;
|
||||
const SPACE = 32;
|
||||
|
||||
class Tokenizer {
|
||||
constructor(str) {
|
||||
this.str = str;
|
||||
this.pos = 0;
|
||||
constructor(str) {
|
||||
this.str = str;
|
||||
this.pos = 0;
|
||||
}
|
||||
charCodeAt(pos) {
|
||||
return pos < this.str.length ? this.str.charCodeAt(pos) : 0;
|
||||
}
|
||||
charCode() {
|
||||
return this.charCodeAt(this.pos);
|
||||
}
|
||||
nextCharCode() {
|
||||
return this.charCodeAt(this.pos + 1);
|
||||
}
|
||||
nextNonWsCode(pos) {
|
||||
return this.charCodeAt(this.findWsEnd(pos));
|
||||
}
|
||||
findWsEnd(pos) {
|
||||
for (; pos < this.str.length; pos++) {
|
||||
const code = this.str.charCodeAt(pos);
|
||||
if (
|
||||
code !== R &&
|
||||
code !== N &&
|
||||
code !== F &&
|
||||
code !== SPACE &&
|
||||
code !== TAB
|
||||
) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
charCodeAt(pos) {
|
||||
return pos < this.str.length ? this.str.charCodeAt(pos) : 0;
|
||||
}
|
||||
charCode() {
|
||||
return this.charCodeAt(this.pos);
|
||||
}
|
||||
nextCharCode() {
|
||||
return this.charCodeAt(this.pos + 1);
|
||||
}
|
||||
nextNonWsCode(pos) {
|
||||
return this.charCodeAt(this.findWsEnd(pos));
|
||||
}
|
||||
findWsEnd(pos) {
|
||||
for (; pos < this.str.length; pos++) {
|
||||
const code = this.str.charCodeAt(pos);
|
||||
if (code !== R && code !== N && code !== F && code !== SPACE && code !== TAB) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return pos;
|
||||
return pos;
|
||||
}
|
||||
substringToPos(end) {
|
||||
return this.str.substring(this.pos, (this.pos = end));
|
||||
}
|
||||
eat(code) {
|
||||
if (this.charCode() !== code) {
|
||||
this.error('Expect `' + String.fromCharCode(code) + '`');
|
||||
}
|
||||
substringToPos(end) {
|
||||
return this.str.substring(this.pos, this.pos = end);
|
||||
}
|
||||
eat(code) {
|
||||
if (this.charCode() !== code) {
|
||||
this.error('Expect `' + String.fromCharCode(code) + '`');
|
||||
}
|
||||
|
||||
this.pos++;
|
||||
}
|
||||
peek() {
|
||||
return this.pos < this.str.length ? this.str.charAt(this.pos++) : '';
|
||||
}
|
||||
error(message) {
|
||||
throw new SyntaxError.SyntaxError(message, this.str, this.pos);
|
||||
}
|
||||
this.pos++;
|
||||
}
|
||||
peek() {
|
||||
return this.pos < this.str.length ? this.str.charAt(this.pos++) : '';
|
||||
}
|
||||
error(message) {
|
||||
throw new SyntaxError.SyntaxError(message, this.str, this.pos);
|
||||
}
|
||||
}
|
||||
|
||||
exports.Tokenizer = Tokenizer;
|
||||
|
76
node_modules/css-tree/cjs/definition-syntax/walk.cjs
generated
vendored
76
node_modules/css-tree/cjs/definition-syntax/walk.cjs
generated
vendored
@ -1,56 +1,58 @@
|
||||
'use strict';
|
||||
|
||||
const noop = function() {};
|
||||
const noop = function () {};
|
||||
|
||||
function ensureFunction(value) {
|
||||
return typeof value === 'function' ? value : noop;
|
||||
return typeof value === 'function' ? value : noop;
|
||||
}
|
||||
|
||||
function walk(node, options, context) {
|
||||
function walk(node) {
|
||||
enter.call(context, node);
|
||||
function walk(node) {
|
||||
enter.call(context, node);
|
||||
|
||||
switch (node.type) {
|
||||
case 'Group':
|
||||
node.terms.forEach(walk);
|
||||
break;
|
||||
switch (node.type) {
|
||||
case 'Group':
|
||||
node.terms.forEach(walk);
|
||||
break;
|
||||
|
||||
case 'Multiplier':
|
||||
walk(node.term);
|
||||
break;
|
||||
case 'Multiplier':
|
||||
walk(node.term);
|
||||
break;
|
||||
|
||||
case 'Type':
|
||||
case 'Property':
|
||||
case 'Keyword':
|
||||
case 'AtKeyword':
|
||||
case 'Function':
|
||||
case 'String':
|
||||
case 'Token':
|
||||
case 'Comma':
|
||||
break;
|
||||
case 'Type':
|
||||
case 'Property':
|
||||
case 'Keyword':
|
||||
case 'AtKeyword':
|
||||
case 'Function':
|
||||
case 'String':
|
||||
case 'Token':
|
||||
case 'Comma':
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error('Unknown type: ' + node.type);
|
||||
}
|
||||
|
||||
leave.call(context, node);
|
||||
default:
|
||||
throw new Error('Unknown type: ' + node.type);
|
||||
}
|
||||
|
||||
let enter = noop;
|
||||
let leave = noop;
|
||||
leave.call(context, node);
|
||||
}
|
||||
|
||||
if (typeof options === 'function') {
|
||||
enter = options;
|
||||
} else if (options) {
|
||||
enter = ensureFunction(options.enter);
|
||||
leave = ensureFunction(options.leave);
|
||||
}
|
||||
let enter = noop;
|
||||
let leave = noop;
|
||||
|
||||
if (enter === noop && leave === noop) {
|
||||
throw new Error('Neither `enter` nor `leave` walker handler is set or both aren\'t a function');
|
||||
}
|
||||
if (typeof options === 'function') {
|
||||
enter = options;
|
||||
} else if (options) {
|
||||
enter = ensureFunction(options.enter);
|
||||
leave = ensureFunction(options.leave);
|
||||
}
|
||||
|
||||
walk(node);
|
||||
if (enter === noop && leave === noop) {
|
||||
throw new Error(
|
||||
"Neither `enter` nor `leave` walker handler is set or both aren't a function"
|
||||
);
|
||||
}
|
||||
|
||||
walk(node);
|
||||
}
|
||||
|
||||
exports.walk = walk;
|
||||
|
148
node_modules/css-tree/cjs/generator/create.cjs
generated
vendored
148
node_modules/css-tree/cjs/generator/create.cjs
generated
vendored
@ -8,96 +8,96 @@ const types = require('../tokenizer/types.cjs');
|
||||
const REVERSESOLIDUS = 0x005c; // U+005C REVERSE SOLIDUS (\)
|
||||
|
||||
function processChildren(node, delimeter) {
|
||||
if (typeof delimeter === 'function') {
|
||||
let prev = null;
|
||||
if (typeof delimeter === 'function') {
|
||||
let prev = null;
|
||||
|
||||
node.children.forEach(node => {
|
||||
if (prev !== null) {
|
||||
delimeter.call(this, prev);
|
||||
}
|
||||
node.children.forEach((node) => {
|
||||
if (prev !== null) {
|
||||
delimeter.call(this, prev);
|
||||
}
|
||||
|
||||
this.node(node);
|
||||
prev = node;
|
||||
});
|
||||
this.node(node);
|
||||
prev = node;
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
node.children.forEach(this.node, this);
|
||||
node.children.forEach(this.node, this);
|
||||
}
|
||||
|
||||
function processChunk(chunk) {
|
||||
index.tokenize(chunk, (type, start, end) => {
|
||||
this.token(type, chunk.slice(start, end));
|
||||
});
|
||||
index.tokenize(chunk, (type, start, end) => {
|
||||
this.token(type, chunk.slice(start, end));
|
||||
});
|
||||
}
|
||||
|
||||
function createGenerator(config) {
|
||||
const types$1 = new Map();
|
||||
const types$1 = new Map();
|
||||
|
||||
for (let name in config.node) {
|
||||
const item = config.node[name];
|
||||
const fn = item.generate || item;
|
||||
for (let name in config.node) {
|
||||
const item = config.node[name];
|
||||
const fn = item.generate || item;
|
||||
|
||||
if (typeof fn === 'function') {
|
||||
types$1.set(name, item.generate || item);
|
||||
if (typeof fn === 'function') {
|
||||
types$1.set(name, item.generate || item);
|
||||
}
|
||||
}
|
||||
|
||||
return function (node, options) {
|
||||
let buffer = '';
|
||||
let prevCode = 0;
|
||||
let handlers = {
|
||||
node(node) {
|
||||
if (types$1.has(node.type)) {
|
||||
types$1.get(node.type).call(publicApi, node);
|
||||
} else {
|
||||
throw new Error('Unknown node type: ' + node.type);
|
||||
}
|
||||
},
|
||||
tokenBefore: tokenBefore.safe,
|
||||
token(type, value) {
|
||||
prevCode = this.tokenBefore(prevCode, type, value);
|
||||
|
||||
this.emit(value, type, false);
|
||||
|
||||
if (type === types.Delim && value.charCodeAt(0) === REVERSESOLIDUS) {
|
||||
this.emit('\n', types.WhiteSpace, true);
|
||||
}
|
||||
},
|
||||
emit(value) {
|
||||
buffer += value;
|
||||
},
|
||||
result() {
|
||||
return buffer;
|
||||
},
|
||||
};
|
||||
|
||||
if (options) {
|
||||
if (typeof options.decorator === 'function') {
|
||||
handlers = options.decorator(handlers);
|
||||
}
|
||||
|
||||
if (options.sourceMap) {
|
||||
handlers = sourceMap.generateSourceMap(handlers);
|
||||
}
|
||||
|
||||
if (options.mode in tokenBefore) {
|
||||
handlers.tokenBefore = tokenBefore[options.mode];
|
||||
}
|
||||
}
|
||||
|
||||
return function(node, options) {
|
||||
let buffer = '';
|
||||
let prevCode = 0;
|
||||
let handlers = {
|
||||
node(node) {
|
||||
if (types$1.has(node.type)) {
|
||||
types$1.get(node.type).call(publicApi, node);
|
||||
} else {
|
||||
throw new Error('Unknown node type: ' + node.type);
|
||||
}
|
||||
},
|
||||
tokenBefore: tokenBefore.safe,
|
||||
token(type, value) {
|
||||
prevCode = this.tokenBefore(prevCode, type, value);
|
||||
|
||||
this.emit(value, type, false);
|
||||
|
||||
if (type === types.Delim && value.charCodeAt(0) === REVERSESOLIDUS) {
|
||||
this.emit('\n', types.WhiteSpace, true);
|
||||
}
|
||||
},
|
||||
emit(value) {
|
||||
buffer += value;
|
||||
},
|
||||
result() {
|
||||
return buffer;
|
||||
}
|
||||
};
|
||||
|
||||
if (options) {
|
||||
if (typeof options.decorator === 'function') {
|
||||
handlers = options.decorator(handlers);
|
||||
}
|
||||
|
||||
if (options.sourceMap) {
|
||||
handlers = sourceMap.generateSourceMap(handlers);
|
||||
}
|
||||
|
||||
if (options.mode in tokenBefore) {
|
||||
handlers.tokenBefore = tokenBefore[options.mode];
|
||||
}
|
||||
}
|
||||
|
||||
const publicApi = {
|
||||
node: (node) => handlers.node(node),
|
||||
children: processChildren,
|
||||
token: (type, value) => handlers.token(type, value),
|
||||
tokenize: processChunk
|
||||
};
|
||||
|
||||
handlers.node(node);
|
||||
|
||||
return handlers.result();
|
||||
const publicApi = {
|
||||
node: (node) => handlers.node(node),
|
||||
children: processChildren,
|
||||
token: (type, value) => handlers.token(type, value),
|
||||
tokenize: processChunk,
|
||||
};
|
||||
|
||||
handlers.node(node);
|
||||
|
||||
return handlers.result();
|
||||
};
|
||||
}
|
||||
|
||||
exports.createGenerator = createGenerator;
|
||||
|
156
node_modules/css-tree/cjs/generator/sourceMap.cjs
generated
vendored
156
node_modules/css-tree/cjs/generator/sourceMap.cjs
generated
vendored
@ -5,92 +5,94 @@ const sourceMapGenerator_js = require('source-map-js/lib/source-map-generator.js
|
||||
const trackNodes = new Set(['Atrule', 'Selector', 'Declaration']);
|
||||
|
||||
function generateSourceMap(handlers) {
|
||||
const map = new sourceMapGenerator_js.SourceMapGenerator();
|
||||
const generated = {
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
const original = {
|
||||
line: 0, // should be zero to add first mapping
|
||||
column: 0
|
||||
};
|
||||
const activatedGenerated = {
|
||||
line: 1,
|
||||
column: 0
|
||||
};
|
||||
const activatedMapping = {
|
||||
generated: activatedGenerated
|
||||
};
|
||||
let line = 1;
|
||||
let column = 0;
|
||||
let sourceMappingActive = false;
|
||||
const map = new sourceMapGenerator_js.SourceMapGenerator();
|
||||
const generated = {
|
||||
line: 1,
|
||||
column: 0,
|
||||
};
|
||||
const original = {
|
||||
line: 0, // should be zero to add first mapping
|
||||
column: 0,
|
||||
};
|
||||
const activatedGenerated = {
|
||||
line: 1,
|
||||
column: 0,
|
||||
};
|
||||
const activatedMapping = {
|
||||
generated: activatedGenerated,
|
||||
};
|
||||
let line = 1;
|
||||
let column = 0;
|
||||
let sourceMappingActive = false;
|
||||
|
||||
const origHandlersNode = handlers.node;
|
||||
handlers.node = function(node) {
|
||||
if (node.loc && node.loc.start && trackNodes.has(node.type)) {
|
||||
const nodeLine = node.loc.start.line;
|
||||
const nodeColumn = node.loc.start.column - 1;
|
||||
const origHandlersNode = handlers.node;
|
||||
handlers.node = function (node) {
|
||||
if (node.loc && node.loc.start && trackNodes.has(node.type)) {
|
||||
const nodeLine = node.loc.start.line;
|
||||
const nodeColumn = node.loc.start.column - 1;
|
||||
|
||||
if (original.line !== nodeLine ||
|
||||
original.column !== nodeColumn) {
|
||||
original.line = nodeLine;
|
||||
original.column = nodeColumn;
|
||||
if (original.line !== nodeLine || original.column !== nodeColumn) {
|
||||
original.line = nodeLine;
|
||||
original.column = nodeColumn;
|
||||
|
||||
generated.line = line;
|
||||
generated.column = column;
|
||||
generated.line = line;
|
||||
generated.column = column;
|
||||
|
||||
if (sourceMappingActive) {
|
||||
sourceMappingActive = false;
|
||||
if (generated.line !== activatedGenerated.line ||
|
||||
generated.column !== activatedGenerated.column) {
|
||||
map.addMapping(activatedMapping);
|
||||
}
|
||||
}
|
||||
|
||||
sourceMappingActive = true;
|
||||
map.addMapping({
|
||||
source: node.loc.source,
|
||||
original,
|
||||
generated
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
origHandlersNode.call(this, node);
|
||||
|
||||
if (sourceMappingActive && trackNodes.has(node.type)) {
|
||||
activatedGenerated.line = line;
|
||||
activatedGenerated.column = column;
|
||||
}
|
||||
};
|
||||
|
||||
const origHandlersEmit = handlers.emit;
|
||||
handlers.emit = function(value, type, auto) {
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
if (value.charCodeAt(i) === 10) { // \n
|
||||
line++;
|
||||
column = 0;
|
||||
} else {
|
||||
column++;
|
||||
}
|
||||
}
|
||||
|
||||
origHandlersEmit(value, type, auto);
|
||||
};
|
||||
|
||||
const origHandlersResult = handlers.result;
|
||||
handlers.result = function() {
|
||||
if (sourceMappingActive) {
|
||||
sourceMappingActive = false;
|
||||
if (
|
||||
generated.line !== activatedGenerated.line ||
|
||||
generated.column !== activatedGenerated.column
|
||||
) {
|
||||
map.addMapping(activatedMapping);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
css: origHandlersResult(),
|
||||
map
|
||||
};
|
||||
};
|
||||
sourceMappingActive = true;
|
||||
map.addMapping({
|
||||
source: node.loc.source,
|
||||
original,
|
||||
generated,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return handlers;
|
||||
origHandlersNode.call(this, node);
|
||||
|
||||
if (sourceMappingActive && trackNodes.has(node.type)) {
|
||||
activatedGenerated.line = line;
|
||||
activatedGenerated.column = column;
|
||||
}
|
||||
};
|
||||
|
||||
const origHandlersEmit = handlers.emit;
|
||||
handlers.emit = function (value, type, auto) {
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
if (value.charCodeAt(i) === 10) {
|
||||
// \n
|
||||
line++;
|
||||
column = 0;
|
||||
} else {
|
||||
column++;
|
||||
}
|
||||
}
|
||||
|
||||
origHandlersEmit(value, type, auto);
|
||||
};
|
||||
|
||||
const origHandlersResult = handlers.result;
|
||||
handlers.result = function () {
|
||||
if (sourceMappingActive) {
|
||||
map.addMapping(activatedMapping);
|
||||
}
|
||||
|
||||
return {
|
||||
css: origHandlersResult(),
|
||||
map,
|
||||
};
|
||||
};
|
||||
|
||||
return handlers;
|
||||
}
|
||||
|
||||
exports.generateSourceMap = generateSourceMap;
|
||||
|
246
node_modules/css-tree/cjs/generator/token-before.cjs
generated
vendored
246
node_modules/css-tree/cjs/generator/token-before.cjs
generated
vendored
@ -2,20 +2,20 @@
|
||||
|
||||
const types = require('../tokenizer/types.cjs');
|
||||
|
||||
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
|
||||
const PLUSSIGN = 0x002b; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002d; // U+002D HYPHEN-MINUS (-)
|
||||
|
||||
const code = (type, value) => {
|
||||
if (type === types.Delim) {
|
||||
type = value;
|
||||
}
|
||||
if (type === types.Delim) {
|
||||
type = value;
|
||||
}
|
||||
|
||||
if (typeof type === 'string') {
|
||||
const charCode = type.charCodeAt(0);
|
||||
return charCode > 0x7F ? 0x8000 : charCode << 8;
|
||||
}
|
||||
if (typeof type === 'string') {
|
||||
const charCode = type.charCodeAt(0);
|
||||
return charCode > 0x7f ? 0x8000 : charCode << 8;
|
||||
}
|
||||
|
||||
return type;
|
||||
return type;
|
||||
};
|
||||
|
||||
// https://www.w3.org/TR/css-syntax-3/#serialization
|
||||
@ -25,142 +25,144 @@ const code = (type, value) => {
|
||||
// which may be collapsed into a single token.
|
||||
|
||||
const specPairs = [
|
||||
[types.Ident, types.Ident],
|
||||
[types.Ident, types.Function],
|
||||
[types.Ident, types.Url],
|
||||
[types.Ident, types.BadUrl],
|
||||
[types.Ident, '-'],
|
||||
[types.Ident, types.Number],
|
||||
[types.Ident, types.Percentage],
|
||||
[types.Ident, types.Dimension],
|
||||
[types.Ident, types.CDC],
|
||||
[types.Ident, types.LeftParenthesis],
|
||||
[types.Ident, types.Ident],
|
||||
[types.Ident, types.Function],
|
||||
[types.Ident, types.Url],
|
||||
[types.Ident, types.BadUrl],
|
||||
[types.Ident, '-'],
|
||||
[types.Ident, types.Number],
|
||||
[types.Ident, types.Percentage],
|
||||
[types.Ident, types.Dimension],
|
||||
[types.Ident, types.CDC],
|
||||
[types.Ident, types.LeftParenthesis],
|
||||
|
||||
[types.AtKeyword, types.Ident],
|
||||
[types.AtKeyword, types.Function],
|
||||
[types.AtKeyword, types.Url],
|
||||
[types.AtKeyword, types.BadUrl],
|
||||
[types.AtKeyword, '-'],
|
||||
[types.AtKeyword, types.Number],
|
||||
[types.AtKeyword, types.Percentage],
|
||||
[types.AtKeyword, types.Dimension],
|
||||
[types.AtKeyword, types.CDC],
|
||||
[types.AtKeyword, types.Ident],
|
||||
[types.AtKeyword, types.Function],
|
||||
[types.AtKeyword, types.Url],
|
||||
[types.AtKeyword, types.BadUrl],
|
||||
[types.AtKeyword, '-'],
|
||||
[types.AtKeyword, types.Number],
|
||||
[types.AtKeyword, types.Percentage],
|
||||
[types.AtKeyword, types.Dimension],
|
||||
[types.AtKeyword, types.CDC],
|
||||
|
||||
[types.Hash, types.Ident],
|
||||
[types.Hash, types.Function],
|
||||
[types.Hash, types.Url],
|
||||
[types.Hash, types.BadUrl],
|
||||
[types.Hash, '-'],
|
||||
[types.Hash, types.Number],
|
||||
[types.Hash, types.Percentage],
|
||||
[types.Hash, types.Dimension],
|
||||
[types.Hash, types.CDC],
|
||||
[types.Hash, types.Ident],
|
||||
[types.Hash, types.Function],
|
||||
[types.Hash, types.Url],
|
||||
[types.Hash, types.BadUrl],
|
||||
[types.Hash, '-'],
|
||||
[types.Hash, types.Number],
|
||||
[types.Hash, types.Percentage],
|
||||
[types.Hash, types.Dimension],
|
||||
[types.Hash, types.CDC],
|
||||
|
||||
[types.Dimension, types.Ident],
|
||||
[types.Dimension, types.Function],
|
||||
[types.Dimension, types.Url],
|
||||
[types.Dimension, types.BadUrl],
|
||||
[types.Dimension, '-'],
|
||||
[types.Dimension, types.Number],
|
||||
[types.Dimension, types.Percentage],
|
||||
[types.Dimension, types.Dimension],
|
||||
[types.Dimension, types.CDC],
|
||||
[types.Dimension, types.Ident],
|
||||
[types.Dimension, types.Function],
|
||||
[types.Dimension, types.Url],
|
||||
[types.Dimension, types.BadUrl],
|
||||
[types.Dimension, '-'],
|
||||
[types.Dimension, types.Number],
|
||||
[types.Dimension, types.Percentage],
|
||||
[types.Dimension, types.Dimension],
|
||||
[types.Dimension, types.CDC],
|
||||
|
||||
['#', types.Ident],
|
||||
['#', types.Function],
|
||||
['#', types.Url],
|
||||
['#', types.BadUrl],
|
||||
['#', '-'],
|
||||
['#', types.Number],
|
||||
['#', types.Percentage],
|
||||
['#', types.Dimension],
|
||||
['#', types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
|
||||
['#', types.Ident],
|
||||
['#', types.Function],
|
||||
['#', types.Url],
|
||||
['#', types.BadUrl],
|
||||
['#', '-'],
|
||||
['#', types.Number],
|
||||
['#', types.Percentage],
|
||||
['#', types.Dimension],
|
||||
['#', types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
|
||||
|
||||
['-', types.Ident],
|
||||
['-', types.Function],
|
||||
['-', types.Url],
|
||||
['-', types.BadUrl],
|
||||
['-', '-'],
|
||||
['-', types.Number],
|
||||
['-', types.Percentage],
|
||||
['-', types.Dimension],
|
||||
['-', types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
|
||||
['-', types.Ident],
|
||||
['-', types.Function],
|
||||
['-', types.Url],
|
||||
['-', types.BadUrl],
|
||||
['-', '-'],
|
||||
['-', types.Number],
|
||||
['-', types.Percentage],
|
||||
['-', types.Dimension],
|
||||
['-', types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
|
||||
|
||||
[types.Number, types.Ident],
|
||||
[types.Number, types.Function],
|
||||
[types.Number, types.Url],
|
||||
[types.Number, types.BadUrl],
|
||||
[types.Number, types.Number],
|
||||
[types.Number, types.Percentage],
|
||||
[types.Number, types.Dimension],
|
||||
[types.Number, '%'],
|
||||
[types.Number, types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
|
||||
[types.Number, types.Ident],
|
||||
[types.Number, types.Function],
|
||||
[types.Number, types.Url],
|
||||
[types.Number, types.BadUrl],
|
||||
[types.Number, types.Number],
|
||||
[types.Number, types.Percentage],
|
||||
[types.Number, types.Dimension],
|
||||
[types.Number, '%'],
|
||||
[types.Number, types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
|
||||
|
||||
['@', types.Ident],
|
||||
['@', types.Function],
|
||||
['@', types.Url],
|
||||
['@', types.BadUrl],
|
||||
['@', '-'],
|
||||
['@', types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
|
||||
['@', types.Ident],
|
||||
['@', types.Function],
|
||||
['@', types.Url],
|
||||
['@', types.BadUrl],
|
||||
['@', '-'],
|
||||
['@', types.CDC], // https://github.com/w3c/csswg-drafts/pull/6874
|
||||
|
||||
['.', types.Number],
|
||||
['.', types.Percentage],
|
||||
['.', types.Dimension],
|
||||
['.', types.Number],
|
||||
['.', types.Percentage],
|
||||
['.', types.Dimension],
|
||||
|
||||
['+', types.Number],
|
||||
['+', types.Percentage],
|
||||
['+', types.Dimension],
|
||||
['+', types.Number],
|
||||
['+', types.Percentage],
|
||||
['+', types.Dimension],
|
||||
|
||||
['/', '*']
|
||||
['/', '*'],
|
||||
];
|
||||
// validate with scripts/generate-safe
|
||||
const safePairs = specPairs.concat([
|
||||
[types.Ident, types.Hash],
|
||||
[types.Ident, types.Hash],
|
||||
|
||||
[types.Dimension, types.Hash],
|
||||
[types.Dimension, types.Hash],
|
||||
|
||||
[types.Hash, types.Hash],
|
||||
[types.Hash, types.Hash],
|
||||
|
||||
[types.AtKeyword, types.LeftParenthesis],
|
||||
[types.AtKeyword, types.String],
|
||||
[types.AtKeyword, types.Colon],
|
||||
[types.AtKeyword, types.LeftParenthesis],
|
||||
[types.AtKeyword, types.String],
|
||||
[types.AtKeyword, types.Colon],
|
||||
|
||||
[types.Percentage, types.Percentage],
|
||||
[types.Percentage, types.Dimension],
|
||||
[types.Percentage, types.Function],
|
||||
[types.Percentage, '-'],
|
||||
[types.Percentage, types.Percentage],
|
||||
[types.Percentage, types.Dimension],
|
||||
[types.Percentage, types.Function],
|
||||
[types.Percentage, '-'],
|
||||
|
||||
[types.RightParenthesis, types.Ident],
|
||||
[types.RightParenthesis, types.Function],
|
||||
[types.RightParenthesis, types.Percentage],
|
||||
[types.RightParenthesis, types.Dimension],
|
||||
[types.RightParenthesis, types.Hash],
|
||||
[types.RightParenthesis, '-']
|
||||
[types.RightParenthesis, types.Ident],
|
||||
[types.RightParenthesis, types.Function],
|
||||
[types.RightParenthesis, types.Percentage],
|
||||
[types.RightParenthesis, types.Dimension],
|
||||
[types.RightParenthesis, types.Hash],
|
||||
[types.RightParenthesis, '-'],
|
||||
]);
|
||||
|
||||
function createMap(pairs) {
|
||||
const isWhiteSpaceRequired = new Set(
|
||||
pairs.map(([prev, next]) => (code(prev) << 16 | code(next)))
|
||||
);
|
||||
const isWhiteSpaceRequired = new Set(
|
||||
pairs.map(([prev, next]) => (code(prev) << 16) | code(next))
|
||||
);
|
||||
|
||||
return function(prevCode, type, value) {
|
||||
const nextCode = code(type, value);
|
||||
const nextCharCode = value.charCodeAt(0);
|
||||
const emitWs =
|
||||
(nextCharCode === HYPHENMINUS &&
|
||||
type !== types.Ident &&
|
||||
type !== types.Function &&
|
||||
type !== types.CDC) ||
|
||||
(nextCharCode === PLUSSIGN)
|
||||
? isWhiteSpaceRequired.has(prevCode << 16 | nextCharCode << 8)
|
||||
: isWhiteSpaceRequired.has(prevCode << 16 | nextCode);
|
||||
return function (prevCode, type, value) {
|
||||
const nextCode = code(type, value);
|
||||
const nextCharCode = value.charCodeAt(0);
|
||||
const emitWs =
|
||||
(
|
||||
(nextCharCode === HYPHENMINUS &&
|
||||
type !== types.Ident &&
|
||||
type !== types.Function &&
|
||||
type !== types.CDC) ||
|
||||
nextCharCode === PLUSSIGN
|
||||
) ?
|
||||
isWhiteSpaceRequired.has((prevCode << 16) | (nextCharCode << 8))
|
||||
: isWhiteSpaceRequired.has((prevCode << 16) | nextCode);
|
||||
|
||||
if (emitWs) {
|
||||
this.emit(' ', types.WhiteSpace, true);
|
||||
}
|
||||
if (emitWs) {
|
||||
this.emit(' ', types.WhiteSpace, true);
|
||||
}
|
||||
|
||||
return nextCode;
|
||||
};
|
||||
return nextCode;
|
||||
};
|
||||
}
|
||||
|
||||
const spec = createMap(specPairs);
|
||||
|
24
node_modules/css-tree/cjs/index.cjs
generated
vendored
24
node_modules/css-tree/cjs/index.cjs
generated
vendored
@ -16,21 +16,21 @@ const names = require('./tokenizer/names.cjs');
|
||||
const TokenStream = require('./tokenizer/TokenStream.cjs');
|
||||
|
||||
const {
|
||||
tokenize,
|
||||
parse,
|
||||
generate,
|
||||
lexer,
|
||||
createLexer,
|
||||
tokenize,
|
||||
parse,
|
||||
generate,
|
||||
lexer,
|
||||
createLexer,
|
||||
|
||||
walk,
|
||||
find,
|
||||
findLast,
|
||||
findAll,
|
||||
walk,
|
||||
find,
|
||||
findLast,
|
||||
findAll,
|
||||
|
||||
toPlainObject,
|
||||
fromPlainObject,
|
||||
toPlainObject,
|
||||
fromPlainObject,
|
||||
|
||||
fork
|
||||
fork,
|
||||
} = index$1;
|
||||
|
||||
exports.version = version.version;
|
||||
|
854
node_modules/css-tree/cjs/lexer/Lexer.cjs
generated
vendored
854
node_modules/css-tree/cjs/lexer/Lexer.cjs
generated
vendored
@ -14,446 +14,520 @@ const parse = require('../definition-syntax/parse.cjs');
|
||||
const generate = require('../definition-syntax/generate.cjs');
|
||||
const walk = require('../definition-syntax/walk.cjs');
|
||||
|
||||
const cssWideKeywordsSyntax = matchGraph.buildMatchGraph(genericConst.cssWideKeywords.join(' | '));
|
||||
const cssWideKeywordsSyntax = matchGraph.buildMatchGraph(
|
||||
genericConst.cssWideKeywords.join(' | ')
|
||||
);
|
||||
|
||||
function dumpMapSyntax(map, compact, syntaxAsAst) {
|
||||
const result = {};
|
||||
const result = {};
|
||||
|
||||
for (const name in map) {
|
||||
if (map[name].syntax) {
|
||||
result[name] = syntaxAsAst
|
||||
? map[name].syntax
|
||||
: generate.generate(map[name].syntax, { compact });
|
||||
}
|
||||
for (const name in map) {
|
||||
if (map[name].syntax) {
|
||||
result[name] =
|
||||
syntaxAsAst ?
|
||||
map[name].syntax
|
||||
: generate.generate(map[name].syntax, { compact });
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
function dumpAtruleMapSyntax(map, compact, syntaxAsAst) {
|
||||
const result = {};
|
||||
const result = {};
|
||||
|
||||
for (const [name, atrule] of Object.entries(map)) {
|
||||
result[name] = {
|
||||
prelude: atrule.prelude && (
|
||||
syntaxAsAst
|
||||
? atrule.prelude.syntax
|
||||
: generate.generate(atrule.prelude.syntax, { compact })
|
||||
),
|
||||
descriptors: atrule.descriptors && dumpMapSyntax(atrule.descriptors, compact, syntaxAsAst)
|
||||
};
|
||||
}
|
||||
for (const [name, atrule] of Object.entries(map)) {
|
||||
result[name] = {
|
||||
prelude:
|
||||
atrule.prelude &&
|
||||
(syntaxAsAst ?
|
||||
atrule.prelude.syntax
|
||||
: generate.generate(atrule.prelude.syntax, { compact })),
|
||||
descriptors:
|
||||
atrule.descriptors &&
|
||||
dumpMapSyntax(atrule.descriptors, compact, syntaxAsAst),
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
function valueHasVar(tokens) {
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
if (tokens[i].value.toLowerCase() === 'var(') {
|
||||
return true;
|
||||
}
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
if (tokens[i].value.toLowerCase() === 'var(') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
return false;
|
||||
}
|
||||
|
||||
function buildMatchResult(matched, error, iterations) {
|
||||
return {
|
||||
matched,
|
||||
iterations,
|
||||
error,
|
||||
...trace
|
||||
};
|
||||
return {
|
||||
matched,
|
||||
iterations,
|
||||
error,
|
||||
...trace,
|
||||
};
|
||||
}
|
||||
|
||||
function matchSyntax(lexer, syntax, value, useCssWideKeywords) {
|
||||
const tokens = prepareTokens(value, lexer.syntax);
|
||||
let result;
|
||||
const tokens = prepareTokens(value, lexer.syntax);
|
||||
let result;
|
||||
|
||||
if (valueHasVar(tokens)) {
|
||||
return buildMatchResult(null, new Error('Matching for a tree with var() is not supported'));
|
||||
if (valueHasVar(tokens)) {
|
||||
return buildMatchResult(
|
||||
null,
|
||||
new Error('Matching for a tree with var() is not supported')
|
||||
);
|
||||
}
|
||||
|
||||
if (useCssWideKeywords) {
|
||||
result = match.matchAsTree(tokens, lexer.cssWideKeywordsSyntax, lexer);
|
||||
}
|
||||
|
||||
if (!useCssWideKeywords || !result.match) {
|
||||
result = match.matchAsTree(tokens, syntax.match, lexer);
|
||||
if (!result.match) {
|
||||
return buildMatchResult(
|
||||
null,
|
||||
new error.SyntaxMatchError(result.reason, syntax.syntax, value, result),
|
||||
result.iterations
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (useCssWideKeywords) {
|
||||
result = match.matchAsTree(tokens, lexer.cssWideKeywordsSyntax, lexer);
|
||||
}
|
||||
|
||||
if (!useCssWideKeywords || !result.match) {
|
||||
result = match.matchAsTree(tokens, syntax.match, lexer);
|
||||
if (!result.match) {
|
||||
return buildMatchResult(
|
||||
null,
|
||||
new error.SyntaxMatchError(result.reason, syntax.syntax, value, result),
|
||||
result.iterations
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return buildMatchResult(result.match, null, result.iterations);
|
||||
return buildMatchResult(result.match, null, result.iterations);
|
||||
}
|
||||
|
||||
class Lexer {
|
||||
constructor(config, syntax, structure$1) {
|
||||
this.cssWideKeywordsSyntax = cssWideKeywordsSyntax;
|
||||
this.syntax = syntax;
|
||||
this.generic = false;
|
||||
this.atrules = Object.create(null);
|
||||
this.properties = Object.create(null);
|
||||
this.types = Object.create(null);
|
||||
this.structure = structure$1 || structure.getStructureFromConfig(config);
|
||||
constructor(config, syntax, structure$1) {
|
||||
this.cssWideKeywordsSyntax = cssWideKeywordsSyntax;
|
||||
this.syntax = syntax;
|
||||
this.generic = false;
|
||||
this.atrules = Object.create(null);
|
||||
this.properties = Object.create(null);
|
||||
this.types = Object.create(null);
|
||||
this.structure = structure$1 || structure.getStructureFromConfig(config);
|
||||
|
||||
if (config) {
|
||||
if (config.types) {
|
||||
for (const name in config.types) {
|
||||
this.addType_(name, config.types[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.generic) {
|
||||
this.generic = true;
|
||||
for (const name in generic) {
|
||||
this.addType_(name, generic[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.atrules) {
|
||||
for (const name in config.atrules) {
|
||||
this.addAtrule_(name, config.atrules[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.properties) {
|
||||
for (const name in config.properties) {
|
||||
this.addProperty_(name, config.properties[name]);
|
||||
}
|
||||
}
|
||||
if (config) {
|
||||
if (config.types) {
|
||||
for (const name in config.types) {
|
||||
this.addType_(name, config.types[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.generic) {
|
||||
this.generic = true;
|
||||
for (const name in generic) {
|
||||
this.addType_(name, generic[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.atrules) {
|
||||
for (const name in config.atrules) {
|
||||
this.addAtrule_(name, config.atrules[name]);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.properties) {
|
||||
for (const name in config.properties) {
|
||||
this.addProperty_(name, config.properties[name]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
checkStructure(ast) {
|
||||
function collectWarning(node, message) {
|
||||
warns.push({ node, message });
|
||||
}
|
||||
|
||||
checkStructure(ast) {
|
||||
function collectWarning(node, message) {
|
||||
warns.push({ node, message });
|
||||
}
|
||||
const structure = this.structure;
|
||||
const warns = [];
|
||||
|
||||
const structure = this.structure;
|
||||
const warns = [];
|
||||
this.syntax.walk(ast, function (node) {
|
||||
if (structure.hasOwnProperty(node.type)) {
|
||||
structure[node.type].check(node, collectWarning);
|
||||
} else {
|
||||
collectWarning(node, 'Unknown node type `' + node.type + '`');
|
||||
}
|
||||
});
|
||||
|
||||
this.syntax.walk(ast, function(node) {
|
||||
if (structure.hasOwnProperty(node.type)) {
|
||||
structure[node.type].check(node, collectWarning);
|
||||
} else {
|
||||
collectWarning(node, 'Unknown node type `' + node.type + '`');
|
||||
}
|
||||
});
|
||||
return warns.length ? warns : false;
|
||||
}
|
||||
|
||||
return warns.length ? warns : false;
|
||||
}
|
||||
createDescriptor(syntax, type, name, parent = null) {
|
||||
const ref = {
|
||||
type,
|
||||
name,
|
||||
};
|
||||
const descriptor = {
|
||||
type,
|
||||
name,
|
||||
parent,
|
||||
serializable:
|
||||
typeof syntax === 'string' ||
|
||||
(syntax && typeof syntax.type === 'string'),
|
||||
syntax: null,
|
||||
match: null,
|
||||
};
|
||||
|
||||
createDescriptor(syntax, type, name, parent = null) {
|
||||
const ref = {
|
||||
type,
|
||||
name
|
||||
};
|
||||
const descriptor = {
|
||||
type,
|
||||
name,
|
||||
parent,
|
||||
serializable: typeof syntax === 'string' || (syntax && typeof syntax.type === 'string'),
|
||||
syntax: null,
|
||||
match: null
|
||||
};
|
||||
|
||||
if (typeof syntax === 'function') {
|
||||
descriptor.match = matchGraph.buildMatchGraph(syntax, ref);
|
||||
} else {
|
||||
if (typeof syntax === 'string') {
|
||||
// lazy parsing on first access
|
||||
Object.defineProperty(descriptor, 'syntax', {
|
||||
get() {
|
||||
Object.defineProperty(descriptor, 'syntax', {
|
||||
value: parse.parse(syntax)
|
||||
});
|
||||
|
||||
return descriptor.syntax;
|
||||
}
|
||||
});
|
||||
} else {
|
||||
descriptor.syntax = syntax;
|
||||
}
|
||||
|
||||
// lazy graph build on first access
|
||||
Object.defineProperty(descriptor, 'match', {
|
||||
get() {
|
||||
Object.defineProperty(descriptor, 'match', {
|
||||
value: matchGraph.buildMatchGraph(descriptor.syntax, ref)
|
||||
});
|
||||
|
||||
return descriptor.match;
|
||||
}
|
||||
if (typeof syntax === 'function') {
|
||||
descriptor.match = matchGraph.buildMatchGraph(syntax, ref);
|
||||
} else {
|
||||
if (typeof syntax === 'string') {
|
||||
// lazy parsing on first access
|
||||
Object.defineProperty(descriptor, 'syntax', {
|
||||
get() {
|
||||
Object.defineProperty(descriptor, 'syntax', {
|
||||
value: parse.parse(syntax),
|
||||
});
|
||||
}
|
||||
|
||||
return descriptor;
|
||||
}
|
||||
addAtrule_(name, syntax) {
|
||||
if (!syntax) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.atrules[name] = {
|
||||
type: 'Atrule',
|
||||
name: name,
|
||||
prelude: syntax.prelude ? this.createDescriptor(syntax.prelude, 'AtrulePrelude', name) : null,
|
||||
descriptors: syntax.descriptors
|
||||
? Object.keys(syntax.descriptors).reduce(
|
||||
(map, descName) => {
|
||||
map[descName] = this.createDescriptor(syntax.descriptors[descName], 'AtruleDescriptor', descName, name);
|
||||
return map;
|
||||
},
|
||||
Object.create(null)
|
||||
)
|
||||
: null
|
||||
};
|
||||
}
|
||||
addProperty_(name, syntax) {
|
||||
if (!syntax) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.properties[name] = this.createDescriptor(syntax, 'Property', name);
|
||||
}
|
||||
addType_(name, syntax) {
|
||||
if (!syntax) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.types[name] = this.createDescriptor(syntax, 'Type', name);
|
||||
}
|
||||
|
||||
checkAtruleName(atruleName) {
|
||||
if (!this.getAtrule(atruleName)) {
|
||||
return new error.SyntaxReferenceError('Unknown at-rule', '@' + atruleName);
|
||||
}
|
||||
}
|
||||
checkAtrulePrelude(atruleName, prelude) {
|
||||
const error = this.checkAtruleName(atruleName);
|
||||
|
||||
if (error) {
|
||||
return error;
|
||||
}
|
||||
|
||||
const atrule = this.getAtrule(atruleName);
|
||||
|
||||
if (!atrule.prelude && prelude) {
|
||||
return new SyntaxError('At-rule `@' + atruleName + '` should not contain a prelude');
|
||||
}
|
||||
|
||||
if (atrule.prelude && !prelude) {
|
||||
if (!matchSyntax(this, atrule.prelude, '', false).matched) {
|
||||
return new SyntaxError('At-rule `@' + atruleName + '` should contain a prelude');
|
||||
}
|
||||
}
|
||||
}
|
||||
checkAtruleDescriptorName(atruleName, descriptorName) {
|
||||
const error$1 = this.checkAtruleName(atruleName);
|
||||
|
||||
if (error$1) {
|
||||
return error$1;
|
||||
}
|
||||
|
||||
const atrule = this.getAtrule(atruleName);
|
||||
const descriptor = names.keyword(descriptorName);
|
||||
|
||||
if (!atrule.descriptors) {
|
||||
return new SyntaxError('At-rule `@' + atruleName + '` has no known descriptors');
|
||||
}
|
||||
|
||||
if (!atrule.descriptors[descriptor.name] &&
|
||||
!atrule.descriptors[descriptor.basename]) {
|
||||
return new error.SyntaxReferenceError('Unknown at-rule descriptor', descriptorName);
|
||||
}
|
||||
}
|
||||
checkPropertyName(propertyName) {
|
||||
if (!this.getProperty(propertyName)) {
|
||||
return new error.SyntaxReferenceError('Unknown property', propertyName);
|
||||
}
|
||||
}
|
||||
|
||||
matchAtrulePrelude(atruleName, prelude) {
|
||||
const error = this.checkAtrulePrelude(atruleName, prelude);
|
||||
|
||||
if (error) {
|
||||
return buildMatchResult(null, error);
|
||||
}
|
||||
|
||||
const atrule = this.getAtrule(atruleName);
|
||||
|
||||
if (!atrule.prelude) {
|
||||
return buildMatchResult(null, null);
|
||||
}
|
||||
|
||||
return matchSyntax(this, atrule.prelude, prelude || '', false);
|
||||
}
|
||||
matchAtruleDescriptor(atruleName, descriptorName, value) {
|
||||
const error = this.checkAtruleDescriptorName(atruleName, descriptorName);
|
||||
|
||||
if (error) {
|
||||
return buildMatchResult(null, error);
|
||||
}
|
||||
|
||||
const atrule = this.getAtrule(atruleName);
|
||||
const descriptor = names.keyword(descriptorName);
|
||||
|
||||
return matchSyntax(this, atrule.descriptors[descriptor.name] || atrule.descriptors[descriptor.basename], value, false);
|
||||
}
|
||||
matchDeclaration(node) {
|
||||
if (node.type !== 'Declaration') {
|
||||
return buildMatchResult(null, new Error('Not a Declaration node'));
|
||||
}
|
||||
|
||||
return this.matchProperty(node.property, node.value);
|
||||
}
|
||||
matchProperty(propertyName, value) {
|
||||
// don't match syntax for a custom property at the moment
|
||||
if (names.property(propertyName).custom) {
|
||||
return buildMatchResult(null, new Error('Lexer matching doesn\'t applicable for custom properties'));
|
||||
}
|
||||
|
||||
const error = this.checkPropertyName(propertyName);
|
||||
|
||||
if (error) {
|
||||
return buildMatchResult(null, error);
|
||||
}
|
||||
|
||||
return matchSyntax(this, this.getProperty(propertyName), value, true);
|
||||
}
|
||||
matchType(typeName, value) {
|
||||
const typeSyntax = this.getType(typeName);
|
||||
|
||||
if (!typeSyntax) {
|
||||
return buildMatchResult(null, new error.SyntaxReferenceError('Unknown type', typeName));
|
||||
}
|
||||
|
||||
return matchSyntax(this, typeSyntax, value, false);
|
||||
}
|
||||
match(syntax, value) {
|
||||
if (typeof syntax !== 'string' && (!syntax || !syntax.type)) {
|
||||
return buildMatchResult(null, new error.SyntaxReferenceError('Bad syntax'));
|
||||
}
|
||||
|
||||
if (typeof syntax === 'string' || !syntax.match) {
|
||||
syntax = this.createDescriptor(syntax, 'Type', 'anonymous');
|
||||
}
|
||||
|
||||
return matchSyntax(this, syntax, value, false);
|
||||
}
|
||||
|
||||
findValueFragments(propertyName, value, type, name) {
|
||||
return search.matchFragments(this, value, this.matchProperty(propertyName, value), type, name);
|
||||
}
|
||||
findDeclarationValueFragments(declaration, type, name) {
|
||||
return search.matchFragments(this, declaration.value, this.matchDeclaration(declaration), type, name);
|
||||
}
|
||||
findAllFragments(ast, type, name) {
|
||||
const result = [];
|
||||
|
||||
this.syntax.walk(ast, {
|
||||
visit: 'Declaration',
|
||||
enter: (declaration) => {
|
||||
result.push.apply(result, this.findDeclarationValueFragments(declaration, type, name));
|
||||
}
|
||||
return descriptor.syntax;
|
||||
},
|
||||
});
|
||||
} else {
|
||||
descriptor.syntax = syntax;
|
||||
}
|
||||
|
||||
return result;
|
||||
// lazy graph build on first access
|
||||
Object.defineProperty(descriptor, 'match', {
|
||||
get() {
|
||||
Object.defineProperty(descriptor, 'match', {
|
||||
value: matchGraph.buildMatchGraph(descriptor.syntax, ref),
|
||||
});
|
||||
|
||||
return descriptor.match;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
getAtrule(atruleName, fallbackBasename = true) {
|
||||
const atrule = names.keyword(atruleName);
|
||||
const atruleEntry = atrule.vendor && fallbackBasename
|
||||
? this.atrules[atrule.name] || this.atrules[atrule.basename]
|
||||
: this.atrules[atrule.name];
|
||||
|
||||
return atruleEntry || null;
|
||||
}
|
||||
getAtrulePrelude(atruleName, fallbackBasename = true) {
|
||||
const atrule = this.getAtrule(atruleName, fallbackBasename);
|
||||
|
||||
return atrule && atrule.prelude || null;
|
||||
}
|
||||
getAtruleDescriptor(atruleName, name) {
|
||||
return this.atrules.hasOwnProperty(atruleName) && this.atrules.declarators
|
||||
? this.atrules[atruleName].declarators[name] || null
|
||||
: null;
|
||||
}
|
||||
getProperty(propertyName, fallbackBasename = true) {
|
||||
const property = names.property(propertyName);
|
||||
const propertyEntry = property.vendor && fallbackBasename
|
||||
? this.properties[property.name] || this.properties[property.basename]
|
||||
: this.properties[property.name];
|
||||
|
||||
return propertyEntry || null;
|
||||
}
|
||||
getType(name) {
|
||||
return hasOwnProperty.call(this.types, name) ? this.types[name] : null;
|
||||
return descriptor;
|
||||
}
|
||||
addAtrule_(name, syntax) {
|
||||
if (!syntax) {
|
||||
return;
|
||||
}
|
||||
|
||||
validate() {
|
||||
function validate(syntax, name, broken, descriptor) {
|
||||
if (broken.has(name)) {
|
||||
return broken.get(name);
|
||||
this.atrules[name] = {
|
||||
type: 'Atrule',
|
||||
name: name,
|
||||
prelude:
|
||||
syntax.prelude ?
|
||||
this.createDescriptor(syntax.prelude, 'AtrulePrelude', name)
|
||||
: null,
|
||||
descriptors:
|
||||
syntax.descriptors ?
|
||||
Object.keys(syntax.descriptors).reduce((map, descName) => {
|
||||
map[descName] = this.createDescriptor(
|
||||
syntax.descriptors[descName],
|
||||
'AtruleDescriptor',
|
||||
descName,
|
||||
name
|
||||
);
|
||||
return map;
|
||||
}, Object.create(null))
|
||||
: null,
|
||||
};
|
||||
}
|
||||
addProperty_(name, syntax) {
|
||||
if (!syntax) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.properties[name] = this.createDescriptor(syntax, 'Property', name);
|
||||
}
|
||||
addType_(name, syntax) {
|
||||
if (!syntax) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.types[name] = this.createDescriptor(syntax, 'Type', name);
|
||||
}
|
||||
|
||||
checkAtruleName(atruleName) {
|
||||
if (!this.getAtrule(atruleName)) {
|
||||
return new error.SyntaxReferenceError(
|
||||
'Unknown at-rule',
|
||||
'@' + atruleName
|
||||
);
|
||||
}
|
||||
}
|
||||
checkAtrulePrelude(atruleName, prelude) {
|
||||
const error = this.checkAtruleName(atruleName);
|
||||
|
||||
if (error) {
|
||||
return error;
|
||||
}
|
||||
|
||||
const atrule = this.getAtrule(atruleName);
|
||||
|
||||
if (!atrule.prelude && prelude) {
|
||||
return new SyntaxError(
|
||||
'At-rule `@' + atruleName + '` should not contain a prelude'
|
||||
);
|
||||
}
|
||||
|
||||
if (atrule.prelude && !prelude) {
|
||||
if (!matchSyntax(this, atrule.prelude, '', false).matched) {
|
||||
return new SyntaxError(
|
||||
'At-rule `@' + atruleName + '` should contain a prelude'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
checkAtruleDescriptorName(atruleName, descriptorName) {
|
||||
const error$1 = this.checkAtruleName(atruleName);
|
||||
|
||||
if (error$1) {
|
||||
return error$1;
|
||||
}
|
||||
|
||||
const atrule = this.getAtrule(atruleName);
|
||||
const descriptor = names.keyword(descriptorName);
|
||||
|
||||
if (!atrule.descriptors) {
|
||||
return new SyntaxError(
|
||||
'At-rule `@' + atruleName + '` has no known descriptors'
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
!atrule.descriptors[descriptor.name] &&
|
||||
!atrule.descriptors[descriptor.basename]
|
||||
) {
|
||||
return new error.SyntaxReferenceError(
|
||||
'Unknown at-rule descriptor',
|
||||
descriptorName
|
||||
);
|
||||
}
|
||||
}
|
||||
checkPropertyName(propertyName) {
|
||||
if (!this.getProperty(propertyName)) {
|
||||
return new error.SyntaxReferenceError('Unknown property', propertyName);
|
||||
}
|
||||
}
|
||||
|
||||
matchAtrulePrelude(atruleName, prelude) {
|
||||
const error = this.checkAtrulePrelude(atruleName, prelude);
|
||||
|
||||
if (error) {
|
||||
return buildMatchResult(null, error);
|
||||
}
|
||||
|
||||
const atrule = this.getAtrule(atruleName);
|
||||
|
||||
if (!atrule.prelude) {
|
||||
return buildMatchResult(null, null);
|
||||
}
|
||||
|
||||
return matchSyntax(this, atrule.prelude, prelude || '', false);
|
||||
}
|
||||
matchAtruleDescriptor(atruleName, descriptorName, value) {
|
||||
const error = this.checkAtruleDescriptorName(atruleName, descriptorName);
|
||||
|
||||
if (error) {
|
||||
return buildMatchResult(null, error);
|
||||
}
|
||||
|
||||
const atrule = this.getAtrule(atruleName);
|
||||
const descriptor = names.keyword(descriptorName);
|
||||
|
||||
return matchSyntax(
|
||||
this,
|
||||
atrule.descriptors[descriptor.name] ||
|
||||
atrule.descriptors[descriptor.basename],
|
||||
value,
|
||||
false
|
||||
);
|
||||
}
|
||||
matchDeclaration(node) {
|
||||
if (node.type !== 'Declaration') {
|
||||
return buildMatchResult(null, new Error('Not a Declaration node'));
|
||||
}
|
||||
|
||||
return this.matchProperty(node.property, node.value);
|
||||
}
|
||||
matchProperty(propertyName, value) {
|
||||
// don't match syntax for a custom property at the moment
|
||||
if (names.property(propertyName).custom) {
|
||||
return buildMatchResult(
|
||||
null,
|
||||
new Error("Lexer matching doesn't applicable for custom properties")
|
||||
);
|
||||
}
|
||||
|
||||
const error = this.checkPropertyName(propertyName);
|
||||
|
||||
if (error) {
|
||||
return buildMatchResult(null, error);
|
||||
}
|
||||
|
||||
return matchSyntax(this, this.getProperty(propertyName), value, true);
|
||||
}
|
||||
matchType(typeName, value) {
|
||||
const typeSyntax = this.getType(typeName);
|
||||
|
||||
if (!typeSyntax) {
|
||||
return buildMatchResult(
|
||||
null,
|
||||
new error.SyntaxReferenceError('Unknown type', typeName)
|
||||
);
|
||||
}
|
||||
|
||||
return matchSyntax(this, typeSyntax, value, false);
|
||||
}
|
||||
match(syntax, value) {
|
||||
if (typeof syntax !== 'string' && (!syntax || !syntax.type)) {
|
||||
return buildMatchResult(
|
||||
null,
|
||||
new error.SyntaxReferenceError('Bad syntax')
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof syntax === 'string' || !syntax.match) {
|
||||
syntax = this.createDescriptor(syntax, 'Type', 'anonymous');
|
||||
}
|
||||
|
||||
return matchSyntax(this, syntax, value, false);
|
||||
}
|
||||
|
||||
findValueFragments(propertyName, value, type, name) {
|
||||
return search.matchFragments(
|
||||
this,
|
||||
value,
|
||||
this.matchProperty(propertyName, value),
|
||||
type,
|
||||
name
|
||||
);
|
||||
}
|
||||
findDeclarationValueFragments(declaration, type, name) {
|
||||
return search.matchFragments(
|
||||
this,
|
||||
declaration.value,
|
||||
this.matchDeclaration(declaration),
|
||||
type,
|
||||
name
|
||||
);
|
||||
}
|
||||
findAllFragments(ast, type, name) {
|
||||
const result = [];
|
||||
|
||||
this.syntax.walk(ast, {
|
||||
visit: 'Declaration',
|
||||
enter: (declaration) => {
|
||||
result.push.apply(
|
||||
result,
|
||||
this.findDeclarationValueFragments(declaration, type, name)
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
getAtrule(atruleName, fallbackBasename = true) {
|
||||
const atrule = names.keyword(atruleName);
|
||||
const atruleEntry =
|
||||
atrule.vendor && fallbackBasename ?
|
||||
this.atrules[atrule.name] || this.atrules[atrule.basename]
|
||||
: this.atrules[atrule.name];
|
||||
|
||||
return atruleEntry || null;
|
||||
}
|
||||
getAtrulePrelude(atruleName, fallbackBasename = true) {
|
||||
const atrule = this.getAtrule(atruleName, fallbackBasename);
|
||||
|
||||
return (atrule && atrule.prelude) || null;
|
||||
}
|
||||
getAtruleDescriptor(atruleName, name) {
|
||||
return this.atrules.hasOwnProperty(atruleName) && this.atrules.declarators ?
|
||||
this.atrules[atruleName].declarators[name] || null
|
||||
: null;
|
||||
}
|
||||
getProperty(propertyName, fallbackBasename = true) {
|
||||
const property = names.property(propertyName);
|
||||
const propertyEntry =
|
||||
property.vendor && fallbackBasename ?
|
||||
this.properties[property.name] || this.properties[property.basename]
|
||||
: this.properties[property.name];
|
||||
|
||||
return propertyEntry || null;
|
||||
}
|
||||
getType(name) {
|
||||
return hasOwnProperty.call(this.types, name) ? this.types[name] : null;
|
||||
}
|
||||
|
||||
validate() {
|
||||
function validate(syntax, name, broken, descriptor) {
|
||||
if (broken.has(name)) {
|
||||
return broken.get(name);
|
||||
}
|
||||
|
||||
broken.set(name, false);
|
||||
if (descriptor.syntax !== null) {
|
||||
walk.walk(
|
||||
descriptor.syntax,
|
||||
function (node) {
|
||||
if (node.type !== 'Type' && node.type !== 'Property') {
|
||||
return;
|
||||
}
|
||||
|
||||
broken.set(name, false);
|
||||
if (descriptor.syntax !== null) {
|
||||
walk.walk(descriptor.syntax, function(node) {
|
||||
if (node.type !== 'Type' && node.type !== 'Property') {
|
||||
return;
|
||||
}
|
||||
const map = node.type === 'Type' ? syntax.types : syntax.properties;
|
||||
const brokenMap =
|
||||
node.type === 'Type' ? brokenTypes : brokenProperties;
|
||||
|
||||
const map = node.type === 'Type' ? syntax.types : syntax.properties;
|
||||
const brokenMap = node.type === 'Type' ? brokenTypes : brokenProperties;
|
||||
|
||||
if (!hasOwnProperty.call(map, node.name) || validate(syntax, node.name, brokenMap, map[node.name])) {
|
||||
broken.set(name, true);
|
||||
}
|
||||
}, this);
|
||||
if (
|
||||
!hasOwnProperty.call(map, node.name) ||
|
||||
validate(syntax, node.name, brokenMap, map[node.name])
|
||||
) {
|
||||
broken.set(name, true);
|
||||
}
|
||||
}
|
||||
|
||||
let brokenTypes = new Map();
|
||||
let brokenProperties = new Map();
|
||||
|
||||
for (const key in this.types) {
|
||||
validate(this, key, brokenTypes, this.types[key]);
|
||||
}
|
||||
|
||||
for (const key in this.properties) {
|
||||
validate(this, key, brokenProperties, this.properties[key]);
|
||||
}
|
||||
|
||||
brokenTypes = [...brokenTypes.keys()].filter(name => brokenTypes.get(name));
|
||||
brokenProperties = [...brokenProperties.keys()].filter(name => brokenProperties.get(name));
|
||||
|
||||
if (brokenTypes.length || brokenProperties.length) {
|
||||
return {
|
||||
types: brokenTypes,
|
||||
properties: brokenProperties
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
this
|
||||
);
|
||||
}
|
||||
}
|
||||
dump(syntaxAsAst, pretty) {
|
||||
return {
|
||||
generic: this.generic,
|
||||
types: dumpMapSyntax(this.types, !pretty, syntaxAsAst),
|
||||
properties: dumpMapSyntax(this.properties, !pretty, syntaxAsAst),
|
||||
atrules: dumpAtruleMapSyntax(this.atrules, !pretty, syntaxAsAst)
|
||||
};
|
||||
|
||||
let brokenTypes = new Map();
|
||||
let brokenProperties = new Map();
|
||||
|
||||
for (const key in this.types) {
|
||||
validate(this, key, brokenTypes, this.types[key]);
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this.dump());
|
||||
|
||||
for (const key in this.properties) {
|
||||
validate(this, key, brokenProperties, this.properties[key]);
|
||||
}
|
||||
|
||||
brokenTypes = [...brokenTypes.keys()].filter((name) =>
|
||||
brokenTypes.get(name)
|
||||
);
|
||||
brokenProperties = [...brokenProperties.keys()].filter((name) =>
|
||||
brokenProperties.get(name)
|
||||
);
|
||||
|
||||
if (brokenTypes.length || brokenProperties.length) {
|
||||
return {
|
||||
types: brokenTypes,
|
||||
properties: brokenProperties,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
dump(syntaxAsAst, pretty) {
|
||||
return {
|
||||
generic: this.generic,
|
||||
types: dumpMapSyntax(this.types, !pretty, syntaxAsAst),
|
||||
properties: dumpMapSyntax(this.properties, !pretty, syntaxAsAst),
|
||||
atrules: dumpAtruleMapSyntax(this.atrules, !pretty, syntaxAsAst),
|
||||
};
|
||||
}
|
||||
toString() {
|
||||
return JSON.stringify(this.dump());
|
||||
}
|
||||
}
|
||||
|
||||
exports.Lexer = Lexer;
|
||||
|
196
node_modules/css-tree/cjs/lexer/error.cjs
generated
vendored
196
node_modules/css-tree/cjs/lexer/error.cjs
generated
vendored
@ -6,122 +6,138 @@ const generate = require('../definition-syntax/generate.cjs');
|
||||
const defaultLoc = { offset: 0, line: 1, column: 1 };
|
||||
|
||||
function locateMismatch(matchResult, node) {
|
||||
const tokens = matchResult.tokens;
|
||||
const longestMatch = matchResult.longestMatch;
|
||||
const mismatchNode = longestMatch < tokens.length ? tokens[longestMatch].node || null : null;
|
||||
const badNode = mismatchNode !== node ? mismatchNode : null;
|
||||
let mismatchOffset = 0;
|
||||
let mismatchLength = 0;
|
||||
let entries = 0;
|
||||
let css = '';
|
||||
let start;
|
||||
let end;
|
||||
const tokens = matchResult.tokens;
|
||||
const longestMatch = matchResult.longestMatch;
|
||||
const mismatchNode =
|
||||
longestMatch < tokens.length ? tokens[longestMatch].node || null : null;
|
||||
const badNode = mismatchNode !== node ? mismatchNode : null;
|
||||
let mismatchOffset = 0;
|
||||
let mismatchLength = 0;
|
||||
let entries = 0;
|
||||
let css = '';
|
||||
let start;
|
||||
let end;
|
||||
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
const token = tokens[i].value;
|
||||
for (let i = 0; i < tokens.length; i++) {
|
||||
const token = tokens[i].value;
|
||||
|
||||
if (i === longestMatch) {
|
||||
mismatchLength = token.length;
|
||||
mismatchOffset = css.length;
|
||||
}
|
||||
|
||||
if (badNode !== null && tokens[i].node === badNode) {
|
||||
if (i <= longestMatch) {
|
||||
entries++;
|
||||
} else {
|
||||
entries = 0;
|
||||
}
|
||||
}
|
||||
|
||||
css += token;
|
||||
if (i === longestMatch) {
|
||||
mismatchLength = token.length;
|
||||
mismatchOffset = css.length;
|
||||
}
|
||||
|
||||
if (longestMatch === tokens.length || entries > 1) { // last
|
||||
start = fromLoc(badNode || node, 'end') || buildLoc(defaultLoc, css);
|
||||
end = buildLoc(start);
|
||||
} else {
|
||||
start = fromLoc(badNode, 'start') ||
|
||||
buildLoc(fromLoc(node, 'start') || defaultLoc, css.slice(0, mismatchOffset));
|
||||
end = fromLoc(badNode, 'end') ||
|
||||
buildLoc(start, css.substr(mismatchOffset, mismatchLength));
|
||||
if (badNode !== null && tokens[i].node === badNode) {
|
||||
if (i <= longestMatch) {
|
||||
entries++;
|
||||
} else {
|
||||
entries = 0;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
css,
|
||||
mismatchOffset,
|
||||
mismatchLength,
|
||||
start,
|
||||
end
|
||||
};
|
||||
css += token;
|
||||
}
|
||||
|
||||
if (longestMatch === tokens.length || entries > 1) {
|
||||
// last
|
||||
start = fromLoc(badNode || node, 'end') || buildLoc(defaultLoc, css);
|
||||
end = buildLoc(start);
|
||||
} else {
|
||||
start =
|
||||
fromLoc(badNode, 'start') ||
|
||||
buildLoc(
|
||||
fromLoc(node, 'start') || defaultLoc,
|
||||
css.slice(0, mismatchOffset)
|
||||
);
|
||||
end =
|
||||
fromLoc(badNode, 'end') ||
|
||||
buildLoc(start, css.substr(mismatchOffset, mismatchLength));
|
||||
}
|
||||
|
||||
return {
|
||||
css,
|
||||
mismatchOffset,
|
||||
mismatchLength,
|
||||
start,
|
||||
end,
|
||||
};
|
||||
}
|
||||
|
||||
function fromLoc(node, point) {
|
||||
const value = node && node.loc && node.loc[point];
|
||||
const value = node && node.loc && node.loc[point];
|
||||
|
||||
if (value) {
|
||||
return 'line' in value ? buildLoc(value) : value;
|
||||
}
|
||||
if (value) {
|
||||
return 'line' in value ? buildLoc(value) : value;
|
||||
}
|
||||
|
||||
return null;
|
||||
return null;
|
||||
}
|
||||
|
||||
function buildLoc({ offset, line, column }, extra) {
|
||||
const loc = {
|
||||
offset,
|
||||
line,
|
||||
column
|
||||
};
|
||||
const loc = {
|
||||
offset,
|
||||
line,
|
||||
column,
|
||||
};
|
||||
|
||||
if (extra) {
|
||||
const lines = extra.split(/\n|\r\n?|\f/);
|
||||
if (extra) {
|
||||
const lines = extra.split(/\n|\r\n?|\f/);
|
||||
|
||||
loc.offset += extra.length;
|
||||
loc.line += lines.length - 1;
|
||||
loc.column = lines.length === 1 ? loc.column + extra.length : lines.pop().length + 1;
|
||||
}
|
||||
loc.offset += extra.length;
|
||||
loc.line += lines.length - 1;
|
||||
loc.column =
|
||||
lines.length === 1 ? loc.column + extra.length : lines.pop().length + 1;
|
||||
}
|
||||
|
||||
return loc;
|
||||
return loc;
|
||||
}
|
||||
|
||||
const SyntaxReferenceError = function(type, referenceName) {
|
||||
const error = createCustomError.createCustomError(
|
||||
'SyntaxReferenceError',
|
||||
type + (referenceName ? ' `' + referenceName + '`' : '')
|
||||
);
|
||||
const SyntaxReferenceError = function (type, referenceName) {
|
||||
const error = createCustomError.createCustomError(
|
||||
'SyntaxReferenceError',
|
||||
type + (referenceName ? ' `' + referenceName + '`' : '')
|
||||
);
|
||||
|
||||
error.reference = referenceName;
|
||||
error.reference = referenceName;
|
||||
|
||||
return error;
|
||||
return error;
|
||||
};
|
||||
|
||||
const SyntaxMatchError = function(message, syntax, node, matchResult) {
|
||||
const error = createCustomError.createCustomError('SyntaxMatchError', message);
|
||||
const {
|
||||
css,
|
||||
mismatchOffset,
|
||||
mismatchLength,
|
||||
start,
|
||||
end
|
||||
} = locateMismatch(matchResult, node);
|
||||
const SyntaxMatchError = function (message, syntax, node, matchResult) {
|
||||
const error = createCustomError.createCustomError(
|
||||
'SyntaxMatchError',
|
||||
message
|
||||
);
|
||||
const { css, mismatchOffset, mismatchLength, start, end } = locateMismatch(
|
||||
matchResult,
|
||||
node
|
||||
);
|
||||
|
||||
error.rawMessage = message;
|
||||
error.syntax = syntax ? generate.generate(syntax) : '<generic>';
|
||||
error.css = css;
|
||||
error.mismatchOffset = mismatchOffset;
|
||||
error.mismatchLength = mismatchLength;
|
||||
error.message = message + '\n' +
|
||||
' syntax: ' + error.syntax + '\n' +
|
||||
' value: ' + (css || '<empty string>') + '\n' +
|
||||
' --------' + new Array(error.mismatchOffset + 1).join('-') + '^';
|
||||
error.rawMessage = message;
|
||||
error.syntax = syntax ? generate.generate(syntax) : '<generic>';
|
||||
error.css = css;
|
||||
error.mismatchOffset = mismatchOffset;
|
||||
error.mismatchLength = mismatchLength;
|
||||
error.message =
|
||||
message +
|
||||
'\n' +
|
||||
' syntax: ' +
|
||||
error.syntax +
|
||||
'\n' +
|
||||
' value: ' +
|
||||
(css || '<empty string>') +
|
||||
'\n' +
|
||||
' --------' +
|
||||
new Array(error.mismatchOffset + 1).join('-') +
|
||||
'^';
|
||||
|
||||
Object.assign(error, start);
|
||||
error.loc = {
|
||||
source: (node && node.loc && node.loc.source) || '<unknown>',
|
||||
start,
|
||||
end
|
||||
};
|
||||
Object.assign(error, start);
|
||||
error.loc = {
|
||||
source: (node && node.loc && node.loc.source) || '<unknown>',
|
||||
start,
|
||||
end,
|
||||
};
|
||||
|
||||
return error;
|
||||
return error;
|
||||
};
|
||||
|
||||
exports.SyntaxMatchError = SyntaxMatchError;
|
||||
|
361
node_modules/css-tree/cjs/lexer/generic-an-plus-b.cjs
generated
vendored
361
node_modules/css-tree/cjs/lexer/generic-an-plus-b.cjs
generated
vendored
@ -4,232 +4,245 @@ const charCodeDefinitions = require('../tokenizer/char-code-definitions.cjs');
|
||||
const types = require('../tokenizer/types.cjs');
|
||||
const utils = require('../tokenizer/utils.cjs');
|
||||
|
||||
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
|
||||
const N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
|
||||
const PLUSSIGN = 0x002b; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002d; // U+002D HYPHEN-MINUS (-)
|
||||
const N = 0x006e; // U+006E LATIN SMALL LETTER N (n)
|
||||
const DISALLOW_SIGN = true;
|
||||
const ALLOW_SIGN = false;
|
||||
|
||||
function isDelim(token, code) {
|
||||
return token !== null && token.type === types.Delim && token.value.charCodeAt(0) === code;
|
||||
return (
|
||||
token !== null &&
|
||||
token.type === types.Delim &&
|
||||
token.value.charCodeAt(0) === code
|
||||
);
|
||||
}
|
||||
|
||||
function skipSC(token, offset, getNextToken) {
|
||||
while (token !== null && (token.type === types.WhiteSpace || token.type === types.Comment)) {
|
||||
token = getNextToken(++offset);
|
||||
}
|
||||
while (
|
||||
token !== null &&
|
||||
(token.type === types.WhiteSpace || token.type === types.Comment)
|
||||
) {
|
||||
token = getNextToken(++offset);
|
||||
}
|
||||
|
||||
return offset;
|
||||
return offset;
|
||||
}
|
||||
|
||||
function checkInteger(token, valueOffset, disallowSign, offset) {
|
||||
if (!token) {
|
||||
return 0;
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const code = token.value.charCodeAt(valueOffset);
|
||||
|
||||
if (code === PLUSSIGN || code === HYPHENMINUS) {
|
||||
if (disallowSign) {
|
||||
// Number sign is not allowed
|
||||
return 0;
|
||||
}
|
||||
valueOffset++;
|
||||
}
|
||||
|
||||
const code = token.value.charCodeAt(valueOffset);
|
||||
|
||||
if (code === PLUSSIGN || code === HYPHENMINUS) {
|
||||
if (disallowSign) {
|
||||
// Number sign is not allowed
|
||||
return 0;
|
||||
}
|
||||
valueOffset++;
|
||||
for (; valueOffset < token.value.length; valueOffset++) {
|
||||
if (!charCodeDefinitions.isDigit(token.value.charCodeAt(valueOffset))) {
|
||||
// Integer is expected
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
for (; valueOffset < token.value.length; valueOffset++) {
|
||||
if (!charCodeDefinitions.isDigit(token.value.charCodeAt(valueOffset))) {
|
||||
// Integer is expected
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
return offset + 1;
|
||||
return offset + 1;
|
||||
}
|
||||
|
||||
// ... <signed-integer>
|
||||
// ... ['+' | '-'] <signless-integer>
|
||||
function consumeB(token, offset_, getNextToken) {
|
||||
let sign = false;
|
||||
let offset = skipSC(token, offset_, getNextToken);
|
||||
let sign = false;
|
||||
let offset = skipSC(token, offset_, getNextToken);
|
||||
|
||||
token = getNextToken(offset);
|
||||
token = getNextToken(offset);
|
||||
|
||||
if (token === null) {
|
||||
return offset_;
|
||||
if (token === null) {
|
||||
return offset_;
|
||||
}
|
||||
|
||||
if (token.type !== types.Number) {
|
||||
if (isDelim(token, PLUSSIGN) || isDelim(token, HYPHENMINUS)) {
|
||||
sign = true;
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
if (token === null || token.type !== types.Number) {
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
return offset_;
|
||||
}
|
||||
}
|
||||
|
||||
if (token.type !== types.Number) {
|
||||
if (isDelim(token, PLUSSIGN) || isDelim(token, HYPHENMINUS)) {
|
||||
sign = true;
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
if (token === null || token.type !== types.Number) {
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
return offset_;
|
||||
}
|
||||
if (!sign) {
|
||||
const code = token.value.charCodeAt(0);
|
||||
if (code !== PLUSSIGN && code !== HYPHENMINUS) {
|
||||
// Number sign is expected
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (!sign) {
|
||||
const code = token.value.charCodeAt(0);
|
||||
if (code !== PLUSSIGN && code !== HYPHENMINUS) {
|
||||
// Number sign is expected
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
return checkInteger(token, sign ? 0 : 1, sign, offset);
|
||||
return checkInteger(token, sign ? 0 : 1, sign, offset);
|
||||
}
|
||||
|
||||
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
|
||||
function anPlusB(token, getNextToken) {
|
||||
/* eslint-disable brace-style*/
|
||||
let offset = 0;
|
||||
/* eslint-disable brace-style*/
|
||||
let offset = 0;
|
||||
|
||||
if (!token) {
|
||||
return 0;
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// <integer>
|
||||
if (token.type === types.Number) {
|
||||
return checkInteger(token, 0, ALLOW_SIGN, offset); // b
|
||||
}
|
||||
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
// -n- <signless-integer>
|
||||
// <dashndashdigit-ident>
|
||||
else if (
|
||||
token.type === types.Ident &&
|
||||
token.value.charCodeAt(0) === HYPHENMINUS
|
||||
) {
|
||||
// expect 1st char is N
|
||||
if (!utils.cmpChar(token.value, 1, N)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// <integer>
|
||||
if (token.type === types.Number) {
|
||||
return checkInteger(token, 0, ALLOW_SIGN, offset); // b
|
||||
switch (token.value.length) {
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
case 2:
|
||||
return consumeB(getNextToken(++offset), offset, getNextToken);
|
||||
|
||||
// -n- <signless-integer>
|
||||
case 3:
|
||||
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
return checkInteger(token, 0, DISALLOW_SIGN, offset);
|
||||
|
||||
// <dashndashdigit-ident>
|
||||
default:
|
||||
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return checkInteger(token, 3, DISALLOW_SIGN, offset);
|
||||
}
|
||||
}
|
||||
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
// '+'? n- <signless-integer>
|
||||
// '+'? <ndashdigit-ident>
|
||||
else if (
|
||||
token.type === types.Ident ||
|
||||
(isDelim(token, PLUSSIGN) && getNextToken(offset + 1).type === types.Ident)
|
||||
) {
|
||||
// just ignore a plus
|
||||
if (token.type !== types.Ident) {
|
||||
token = getNextToken(++offset);
|
||||
}
|
||||
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
// -n- <signless-integer>
|
||||
// <dashndashdigit-ident>
|
||||
else if (token.type === types.Ident && token.value.charCodeAt(0) === HYPHENMINUS) {
|
||||
// expect 1st char is N
|
||||
if (!utils.cmpChar(token.value, 1, N)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
switch (token.value.length) {
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
case 2:
|
||||
return consumeB(getNextToken(++offset), offset, getNextToken);
|
||||
|
||||
// -n- <signless-integer>
|
||||
case 3:
|
||||
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
return checkInteger(token, 0, DISALLOW_SIGN, offset);
|
||||
|
||||
// <dashndashdigit-ident>
|
||||
default:
|
||||
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return checkInteger(token, 3, DISALLOW_SIGN, offset);
|
||||
}
|
||||
if (token === null || !utils.cmpChar(token.value, 0, N)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
// '+'? n- <signless-integer>
|
||||
// '+'? <ndashdigit-ident>
|
||||
else if (token.type === types.Ident || (isDelim(token, PLUSSIGN) && getNextToken(offset + 1).type === types.Ident)) {
|
||||
// just ignore a plus
|
||||
if (token.type !== types.Ident) {
|
||||
token = getNextToken(++offset);
|
||||
switch (token.value.length) {
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
case 1:
|
||||
return consumeB(getNextToken(++offset), offset, getNextToken);
|
||||
|
||||
// '+'? n- <signless-integer>
|
||||
case 2:
|
||||
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (token === null || !utils.cmpChar(token.value, 0, N)) {
|
||||
return 0;
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
return checkInteger(token, 0, DISALLOW_SIGN, offset);
|
||||
|
||||
// '+'? <ndashdigit-ident>
|
||||
default:
|
||||
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
switch (token.value.length) {
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
case 1:
|
||||
return consumeB(getNextToken(++offset), offset, getNextToken);
|
||||
return checkInteger(token, 2, DISALLOW_SIGN, offset);
|
||||
}
|
||||
}
|
||||
|
||||
// '+'? n- <signless-integer>
|
||||
case 2:
|
||||
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
// <ndashdigit-dimension>
|
||||
// <ndash-dimension> <signless-integer>
|
||||
// <n-dimension>
|
||||
// <n-dimension> <signed-integer>
|
||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||
else if (token.type === types.Dimension) {
|
||||
let code = token.value.charCodeAt(0);
|
||||
let sign = code === PLUSSIGN || code === HYPHENMINUS ? 1 : 0;
|
||||
let i = sign;
|
||||
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
return checkInteger(token, 0, DISALLOW_SIGN, offset);
|
||||
|
||||
// '+'? <ndashdigit-ident>
|
||||
default:
|
||||
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return checkInteger(token, 2, DISALLOW_SIGN, offset);
|
||||
}
|
||||
for (; i < token.value.length; i++) {
|
||||
if (!charCodeDefinitions.isDigit(token.value.charCodeAt(i))) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (i === sign) {
|
||||
// Integer is expected
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!utils.cmpChar(token.value, i, N)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// <ndashdigit-dimension>
|
||||
// <ndash-dimension> <signless-integer>
|
||||
// <n-dimension>
|
||||
// <n-dimension> <signed-integer>
|
||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||
else if (token.type === types.Dimension) {
|
||||
let code = token.value.charCodeAt(0);
|
||||
let sign = code === PLUSSIGN || code === HYPHENMINUS ? 1 : 0;
|
||||
let i = sign;
|
||||
if (i + 1 === token.value.length) {
|
||||
return consumeB(getNextToken(++offset), offset, getNextToken);
|
||||
} else {
|
||||
if (token.value.charCodeAt(i + 1) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
for (; i < token.value.length; i++) {
|
||||
if (!charCodeDefinitions.isDigit(token.value.charCodeAt(i))) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// <ndash-dimension> <signless-integer>
|
||||
if (i + 2 === token.value.length) {
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
if (i === sign) {
|
||||
// Integer is expected
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!utils.cmpChar(token.value, i, N)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// <n-dimension>
|
||||
// <n-dimension> <signed-integer>
|
||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||
if (i + 1 === token.value.length) {
|
||||
return consumeB(getNextToken(++offset), offset, getNextToken);
|
||||
} else {
|
||||
if (token.value.charCodeAt(i + 1) !== HYPHENMINUS) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// <ndash-dimension> <signless-integer>
|
||||
if (i + 2 === token.value.length) {
|
||||
offset = skipSC(getNextToken(++offset), offset, getNextToken);
|
||||
token = getNextToken(offset);
|
||||
|
||||
return checkInteger(token, 0, DISALLOW_SIGN, offset);
|
||||
}
|
||||
// <ndashdigit-dimension>
|
||||
else {
|
||||
return checkInteger(token, i + 2, DISALLOW_SIGN, offset);
|
||||
}
|
||||
}
|
||||
return checkInteger(token, 0, DISALLOW_SIGN, offset);
|
||||
}
|
||||
// <ndashdigit-dimension>
|
||||
else {
|
||||
return checkInteger(token, i + 2, DISALLOW_SIGN, offset);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
module.exports = anPlusB;
|
||||
|
10
node_modules/css-tree/cjs/lexer/generic-const.cjs
generated
vendored
10
node_modules/css-tree/cjs/lexer/generic-const.cjs
generated
vendored
@ -2,11 +2,11 @@
|
||||
|
||||
// https://drafts.csswg.org/css-cascade-5/
|
||||
const cssWideKeywords = [
|
||||
'initial',
|
||||
'inherit',
|
||||
'unset',
|
||||
'revert',
|
||||
'revert-layer'
|
||||
'initial',
|
||||
'inherit',
|
||||
'unset',
|
||||
'revert',
|
||||
'revert-layer',
|
||||
];
|
||||
|
||||
exports.cssWideKeywords = cssWideKeywords;
|
||||
|
189
node_modules/css-tree/cjs/lexer/generic-urange.cjs
generated
vendored
189
node_modules/css-tree/cjs/lexer/generic-urange.cjs
generated
vendored
@ -4,55 +4,60 @@ const charCodeDefinitions = require('../tokenizer/char-code-definitions.cjs');
|
||||
const types = require('../tokenizer/types.cjs');
|
||||
const utils = require('../tokenizer/utils.cjs');
|
||||
|
||||
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
|
||||
const QUESTIONMARK = 0x003F; // U+003F QUESTION MARK (?)
|
||||
const U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
|
||||
const PLUSSIGN = 0x002b; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002d; // U+002D HYPHEN-MINUS (-)
|
||||
const QUESTIONMARK = 0x003f; // U+003F QUESTION MARK (?)
|
||||
const U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
|
||||
|
||||
function isDelim(token, code) {
|
||||
return token !== null && token.type === types.Delim && token.value.charCodeAt(0) === code;
|
||||
return (
|
||||
token !== null &&
|
||||
token.type === types.Delim &&
|
||||
token.value.charCodeAt(0) === code
|
||||
);
|
||||
}
|
||||
|
||||
function startsWith(token, code) {
|
||||
return token.value.charCodeAt(0) === code;
|
||||
return token.value.charCodeAt(0) === code;
|
||||
}
|
||||
|
||||
function hexSequence(token, offset, allowDash) {
|
||||
let hexlen = 0;
|
||||
let hexlen = 0;
|
||||
|
||||
for (let pos = offset; pos < token.value.length; pos++) {
|
||||
const code = token.value.charCodeAt(pos);
|
||||
for (let pos = offset; pos < token.value.length; pos++) {
|
||||
const code = token.value.charCodeAt(pos);
|
||||
|
||||
if (code === HYPHENMINUS && allowDash && hexlen !== 0) {
|
||||
hexSequence(token, offset + hexlen + 1, false);
|
||||
return 6; // dissallow following question marks
|
||||
}
|
||||
if (code === HYPHENMINUS && allowDash && hexlen !== 0) {
|
||||
hexSequence(token, offset + hexlen + 1, false);
|
||||
return 6; // dissallow following question marks
|
||||
}
|
||||
|
||||
if (!charCodeDefinitions.isHexDigit(code)) {
|
||||
return 0; // not a hex digit
|
||||
}
|
||||
if (!charCodeDefinitions.isHexDigit(code)) {
|
||||
return 0; // not a hex digit
|
||||
}
|
||||
|
||||
if (++hexlen > 6) {
|
||||
return 0; // too many hex digits
|
||||
} }
|
||||
if (++hexlen > 6) {
|
||||
return 0; // too many hex digits
|
||||
}
|
||||
}
|
||||
|
||||
return hexlen;
|
||||
return hexlen;
|
||||
}
|
||||
|
||||
function withQuestionMarkSequence(consumed, length, getNextToken) {
|
||||
if (!consumed) {
|
||||
return 0; // nothing consumed
|
||||
if (!consumed) {
|
||||
return 0; // nothing consumed
|
||||
}
|
||||
|
||||
while (isDelim(getNextToken(length), QUESTIONMARK)) {
|
||||
if (++consumed > 6) {
|
||||
return 0; // too many question marks
|
||||
}
|
||||
|
||||
while (isDelim(getNextToken(length), QUESTIONMARK)) {
|
||||
if (++consumed > 6) {
|
||||
return 0; // too many question marks
|
||||
}
|
||||
length++;
|
||||
}
|
||||
|
||||
length++;
|
||||
}
|
||||
|
||||
return length;
|
||||
return length;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-syntax/#urange
|
||||
@ -75,75 +80,87 @@ function withQuestionMarkSequence(consumed, length, getNextToken) {
|
||||
// u <number-token> <number-token> |
|
||||
// u '+' '?'+
|
||||
function urange(token, getNextToken) {
|
||||
let length = 0;
|
||||
let length = 0;
|
||||
|
||||
// should start with `u` or `U`
|
||||
if (token === null || token.type !== types.Ident || !utils.cmpChar(token.value, 0, U)) {
|
||||
return 0;
|
||||
// should start with `u` or `U`
|
||||
if (
|
||||
token === null ||
|
||||
token.type !== types.Ident ||
|
||||
!utils.cmpChar(token.value, 0, U)
|
||||
) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
token = getNextToken(++length);
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// u '+' <ident-token> '?'*
|
||||
// u '+' '?'+
|
||||
if (isDelim(token, PLUSSIGN)) {
|
||||
token = getNextToken(++length);
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (token.type === types.Ident) {
|
||||
// u '+' <ident-token> '?'*
|
||||
return withQuestionMarkSequence(
|
||||
hexSequence(token, 0, true),
|
||||
++length,
|
||||
getNextToken
|
||||
);
|
||||
}
|
||||
|
||||
if (isDelim(token, QUESTIONMARK)) {
|
||||
// u '+' '?'+
|
||||
return withQuestionMarkSequence(1, ++length, getNextToken);
|
||||
}
|
||||
|
||||
// Hex digit or question mark is expected
|
||||
return 0;
|
||||
}
|
||||
|
||||
// u <number-token> '?'*
|
||||
// u <number-token> <dimension-token>
|
||||
// u <number-token> <number-token>
|
||||
if (token.type === types.Number) {
|
||||
const consumedHexLength = hexSequence(token, 1, true);
|
||||
if (consumedHexLength === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
token = getNextToken(++length);
|
||||
if (token === null) {
|
||||
return 0;
|
||||
// u <number-token> <eof>
|
||||
return length;
|
||||
}
|
||||
|
||||
// u '+' <ident-token> '?'*
|
||||
// u '+' '?'+
|
||||
if (isDelim(token, PLUSSIGN)) {
|
||||
token = getNextToken(++length);
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (token.type === types.Ident) {
|
||||
// u '+' <ident-token> '?'*
|
||||
return withQuestionMarkSequence(hexSequence(token, 0, true), ++length, getNextToken);
|
||||
}
|
||||
|
||||
if (isDelim(token, QUESTIONMARK)) {
|
||||
// u '+' '?'+
|
||||
return withQuestionMarkSequence(1, ++length, getNextToken);
|
||||
}
|
||||
|
||||
// Hex digit or question mark is expected
|
||||
if (token.type === types.Dimension || token.type === types.Number) {
|
||||
// u <number-token> <dimension-token>
|
||||
// u <number-token> <number-token>
|
||||
if (!startsWith(token, HYPHENMINUS) || !hexSequence(token, 1, false)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return length + 1;
|
||||
}
|
||||
|
||||
// u <number-token> '?'*
|
||||
// u <number-token> <dimension-token>
|
||||
// u <number-token> <number-token>
|
||||
if (token.type === types.Number) {
|
||||
const consumedHexLength = hexSequence(token, 1, true);
|
||||
if (consumedHexLength === 0) {
|
||||
return 0;
|
||||
}
|
||||
return withQuestionMarkSequence(consumedHexLength, length, getNextToken);
|
||||
}
|
||||
|
||||
token = getNextToken(++length);
|
||||
if (token === null) {
|
||||
// u <number-token> <eof>
|
||||
return length;
|
||||
}
|
||||
// u <dimension-token> '?'*
|
||||
if (token.type === types.Dimension) {
|
||||
return withQuestionMarkSequence(
|
||||
hexSequence(token, 1, true),
|
||||
++length,
|
||||
getNextToken
|
||||
);
|
||||
}
|
||||
|
||||
if (token.type === types.Dimension || token.type === types.Number) {
|
||||
// u <number-token> <dimension-token>
|
||||
// u <number-token> <number-token>
|
||||
if (!startsWith(token, HYPHENMINUS) || !hexSequence(token, 1, false)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return length + 1;
|
||||
}
|
||||
|
||||
// u <number-token> '?'*
|
||||
return withQuestionMarkSequence(consumedHexLength, length, getNextToken);
|
||||
}
|
||||
|
||||
// u <dimension-token> '?'*
|
||||
if (token.type === types.Dimension) {
|
||||
return withQuestionMarkSequence(hexSequence(token, 1, true), ++length, getNextToken);
|
||||
}
|
||||
|
||||
return 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
module.exports = urange;
|
||||
|
815
node_modules/css-tree/cjs/lexer/generic.cjs
generated
vendored
815
node_modules/css-tree/cjs/lexer/generic.cjs
generated
vendored
@ -9,165 +9,202 @@ const utils = require('../tokenizer/utils.cjs');
|
||||
|
||||
const calcFunctionNames = ['calc(', '-moz-calc(', '-webkit-calc('];
|
||||
const balancePair = new Map([
|
||||
[types.Function, types.RightParenthesis],
|
||||
[types.LeftParenthesis, types.RightParenthesis],
|
||||
[types.LeftSquareBracket, types.RightSquareBracket],
|
||||
[types.LeftCurlyBracket, types.RightCurlyBracket]
|
||||
[types.Function, types.RightParenthesis],
|
||||
[types.LeftParenthesis, types.RightParenthesis],
|
||||
[types.LeftSquareBracket, types.RightSquareBracket],
|
||||
[types.LeftCurlyBracket, types.RightCurlyBracket],
|
||||
]);
|
||||
|
||||
// units
|
||||
const LENGTH = [
|
||||
// absolute length units https://www.w3.org/TR/css-values-3/#lengths
|
||||
'cm', 'mm', 'q', 'in', 'pt', 'pc', 'px',
|
||||
// font-relative length units https://drafts.csswg.org/css-values-4/#font-relative-lengths
|
||||
'em', 'rem',
|
||||
'ex', 'rex',
|
||||
'cap', 'rcap',
|
||||
'ch', 'rch',
|
||||
'ic', 'ric',
|
||||
'lh', 'rlh',
|
||||
// viewport-percentage lengths https://drafts.csswg.org/css-values-4/#viewport-relative-lengths
|
||||
'vw', 'svw', 'lvw', 'dvw',
|
||||
'vh', 'svh', 'lvh', 'dvh',
|
||||
'vi', 'svi', 'lvi', 'dvi',
|
||||
'vb', 'svb', 'lvb', 'dvb',
|
||||
'vmin', 'svmin', 'lvmin', 'dvmin',
|
||||
'vmax', 'svmax', 'lvmax', 'dvmax',
|
||||
// container relative lengths https://drafts.csswg.org/css-contain-3/#container-lengths
|
||||
'cqw', 'cqh', 'cqi', 'cqb', 'cqmin', 'cqmax'
|
||||
// absolute length units https://www.w3.org/TR/css-values-3/#lengths
|
||||
'cm',
|
||||
'mm',
|
||||
'q',
|
||||
'in',
|
||||
'pt',
|
||||
'pc',
|
||||
'px',
|
||||
// font-relative length units https://drafts.csswg.org/css-values-4/#font-relative-lengths
|
||||
'em',
|
||||
'rem',
|
||||
'ex',
|
||||
'rex',
|
||||
'cap',
|
||||
'rcap',
|
||||
'ch',
|
||||
'rch',
|
||||
'ic',
|
||||
'ric',
|
||||
'lh',
|
||||
'rlh',
|
||||
// viewport-percentage lengths https://drafts.csswg.org/css-values-4/#viewport-relative-lengths
|
||||
'vw',
|
||||
'svw',
|
||||
'lvw',
|
||||
'dvw',
|
||||
'vh',
|
||||
'svh',
|
||||
'lvh',
|
||||
'dvh',
|
||||
'vi',
|
||||
'svi',
|
||||
'lvi',
|
||||
'dvi',
|
||||
'vb',
|
||||
'svb',
|
||||
'lvb',
|
||||
'dvb',
|
||||
'vmin',
|
||||
'svmin',
|
||||
'lvmin',
|
||||
'dvmin',
|
||||
'vmax',
|
||||
'svmax',
|
||||
'lvmax',
|
||||
'dvmax',
|
||||
// container relative lengths https://drafts.csswg.org/css-contain-3/#container-lengths
|
||||
'cqw',
|
||||
'cqh',
|
||||
'cqi',
|
||||
'cqb',
|
||||
'cqmin',
|
||||
'cqmax',
|
||||
];
|
||||
const ANGLE = ['deg', 'grad', 'rad', 'turn']; // https://www.w3.org/TR/css-values-3/#angles
|
||||
const TIME = ['s', 'ms']; // https://www.w3.org/TR/css-values-3/#time
|
||||
const FREQUENCY = ['hz', 'khz']; // https://www.w3.org/TR/css-values-3/#frequency
|
||||
const TIME = ['s', 'ms']; // https://www.w3.org/TR/css-values-3/#time
|
||||
const FREQUENCY = ['hz', 'khz']; // https://www.w3.org/TR/css-values-3/#frequency
|
||||
const RESOLUTION = ['dpi', 'dpcm', 'dppx', 'x']; // https://www.w3.org/TR/css-values-3/#resolution
|
||||
const FLEX = ['fr']; // https://drafts.csswg.org/css-grid/#fr-unit
|
||||
const DECIBEL = ['db']; // https://www.w3.org/TR/css3-speech/#mixing-props-voice-volume
|
||||
const SEMITONES = ['st']; // https://www.w3.org/TR/css3-speech/#voice-props-voice-pitch
|
||||
const FLEX = ['fr']; // https://drafts.csswg.org/css-grid/#fr-unit
|
||||
const DECIBEL = ['db']; // https://www.w3.org/TR/css3-speech/#mixing-props-voice-volume
|
||||
const SEMITONES = ['st']; // https://www.w3.org/TR/css3-speech/#voice-props-voice-pitch
|
||||
|
||||
// safe char code getter
|
||||
function charCodeAt(str, index) {
|
||||
return index < str.length ? str.charCodeAt(index) : 0;
|
||||
return index < str.length ? str.charCodeAt(index) : 0;
|
||||
}
|
||||
|
||||
function eqStr(actual, expected) {
|
||||
return utils.cmpStr(actual, 0, actual.length, expected);
|
||||
return utils.cmpStr(actual, 0, actual.length, expected);
|
||||
}
|
||||
|
||||
function eqStrAny(actual, expected) {
|
||||
for (let i = 0; i < expected.length; i++) {
|
||||
if (eqStr(actual, expected[i])) {
|
||||
return true;
|
||||
}
|
||||
for (let i = 0; i < expected.length; i++) {
|
||||
if (eqStr(actual, expected[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
return false;
|
||||
}
|
||||
|
||||
// IE postfix hack, i.e. 123\0 or 123px\9
|
||||
function isPostfixIeHack(str, offset) {
|
||||
if (offset !== str.length - 2) {
|
||||
return false;
|
||||
}
|
||||
if (offset !== str.length - 2) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
charCodeAt(str, offset) === 0x005C && // U+005C REVERSE SOLIDUS (\)
|
||||
charCodeDefinitions.isDigit(charCodeAt(str, offset + 1))
|
||||
);
|
||||
return (
|
||||
charCodeAt(str, offset) === 0x005c && // U+005C REVERSE SOLIDUS (\)
|
||||
charCodeDefinitions.isDigit(charCodeAt(str, offset + 1))
|
||||
);
|
||||
}
|
||||
|
||||
function outOfRange(opts, value, numEnd) {
|
||||
if (opts && opts.type === 'Range') {
|
||||
const num = Number(
|
||||
numEnd !== undefined && numEnd !== value.length
|
||||
? value.substr(0, numEnd)
|
||||
: value
|
||||
);
|
||||
if (opts && opts.type === 'Range') {
|
||||
const num = Number(
|
||||
numEnd !== undefined && numEnd !== value.length ?
|
||||
value.substr(0, numEnd)
|
||||
: value
|
||||
);
|
||||
|
||||
if (isNaN(num)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// FIXME: when opts.min is a string it's a dimension, skip a range validation
|
||||
// for now since it requires a type covertation which is not implmented yet
|
||||
if (opts.min !== null && num < opts.min && typeof opts.min !== 'string') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// FIXME: when opts.max is a string it's a dimension, skip a range validation
|
||||
// for now since it requires a type covertation which is not implmented yet
|
||||
if (opts.max !== null && num > opts.max && typeof opts.max !== 'string') {
|
||||
return true;
|
||||
}
|
||||
if (isNaN(num)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
// FIXME: when opts.min is a string it's a dimension, skip a range validation
|
||||
// for now since it requires a type covertation which is not implmented yet
|
||||
if (opts.min !== null && num < opts.min && typeof opts.min !== 'string') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// FIXME: when opts.max is a string it's a dimension, skip a range validation
|
||||
// for now since it requires a type covertation which is not implmented yet
|
||||
if (opts.max !== null && num > opts.max && typeof opts.max !== 'string') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function consumeFunction(token, getNextToken) {
|
||||
let balanceCloseType = 0;
|
||||
let balanceStash = [];
|
||||
let length = 0;
|
||||
let balanceCloseType = 0;
|
||||
let balanceStash = [];
|
||||
let length = 0;
|
||||
|
||||
// balanced token consuming
|
||||
scan:
|
||||
do {
|
||||
switch (token.type) {
|
||||
case types.RightCurlyBracket:
|
||||
case types.RightParenthesis:
|
||||
case types.RightSquareBracket:
|
||||
if (token.type !== balanceCloseType) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
balanceCloseType = balanceStash.pop();
|
||||
|
||||
if (balanceStash.length === 0) {
|
||||
length++;
|
||||
break scan;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case types.Function:
|
||||
case types.LeftParenthesis:
|
||||
case types.LeftSquareBracket:
|
||||
case types.LeftCurlyBracket:
|
||||
balanceStash.push(balanceCloseType);
|
||||
balanceCloseType = balancePair.get(token.type);
|
||||
break;
|
||||
// balanced token consuming
|
||||
scan: do {
|
||||
switch (token.type) {
|
||||
case types.RightCurlyBracket:
|
||||
case types.RightParenthesis:
|
||||
case types.RightSquareBracket:
|
||||
if (token.type !== balanceCloseType) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
length++;
|
||||
} while (token = getNextToken(length));
|
||||
balanceCloseType = balanceStash.pop();
|
||||
|
||||
return length;
|
||||
if (balanceStash.length === 0) {
|
||||
length++;
|
||||
break scan;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case types.Function:
|
||||
case types.LeftParenthesis:
|
||||
case types.LeftSquareBracket:
|
||||
case types.LeftCurlyBracket:
|
||||
balanceStash.push(balanceCloseType);
|
||||
balanceCloseType = balancePair.get(token.type);
|
||||
break;
|
||||
}
|
||||
|
||||
length++;
|
||||
} while ((token = getNextToken(length)));
|
||||
|
||||
return length;
|
||||
}
|
||||
|
||||
// TODO: implement
|
||||
// can be used wherever <length>, <frequency>, <angle>, <time>, <percentage>, <number>, or <integer> values are allowed
|
||||
// https://drafts.csswg.org/css-values/#calc-notation
|
||||
function calc(next) {
|
||||
return function(token, getNextToken, opts) {
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
return function (token, getNextToken, opts) {
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (token.type === types.Function && eqStrAny(token.value, calcFunctionNames)) {
|
||||
return consumeFunction(token, getNextToken);
|
||||
}
|
||||
if (
|
||||
token.type === types.Function &&
|
||||
eqStrAny(token.value, calcFunctionNames)
|
||||
) {
|
||||
return consumeFunction(token, getNextToken);
|
||||
}
|
||||
|
||||
return next(token, getNextToken, opts);
|
||||
};
|
||||
return next(token, getNextToken, opts);
|
||||
};
|
||||
}
|
||||
|
||||
function tokenType(expectedTokenType) {
|
||||
return function(token) {
|
||||
if (token === null || token.type !== expectedTokenType) {
|
||||
return 0;
|
||||
}
|
||||
return function (token) {
|
||||
if (token === null || token.type !== expectedTokenType) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
};
|
||||
return 1;
|
||||
};
|
||||
}
|
||||
|
||||
// =========================
|
||||
@ -182,29 +219,29 @@ function tokenType(expectedTokenType) {
|
||||
//
|
||||
// See also: https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident
|
||||
function customIdent(token) {
|
||||
if (token === null || token.type !== types.Ident) {
|
||||
return 0;
|
||||
}
|
||||
if (token === null || token.type !== types.Ident) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const name = token.value.toLowerCase();
|
||||
const name = token.value.toLowerCase();
|
||||
|
||||
// The CSS-wide keywords are not valid <custom-ident>s
|
||||
if (eqStrAny(name, genericConst.cssWideKeywords)) {
|
||||
return 0;
|
||||
}
|
||||
// The CSS-wide keywords are not valid <custom-ident>s
|
||||
if (eqStrAny(name, genericConst.cssWideKeywords)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// The default keyword is reserved and is also not a valid <custom-ident>
|
||||
if (eqStr(name, 'default')) {
|
||||
return 0;
|
||||
}
|
||||
// The default keyword is reserved and is also not a valid <custom-ident>
|
||||
if (eqStr(name, 'default')) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// TODO: ignore property specific keywords (as described https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident)
|
||||
// Specifications using <custom-ident> must specify clearly what other keywords
|
||||
// are excluded from <custom-ident>, if any—for example by saying that any pre-defined keywords
|
||||
// in that property’s value definition are excluded. Excluded keywords are excluded
|
||||
// in all ASCII case permutations.
|
||||
// TODO: ignore property specific keywords (as described https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident)
|
||||
// Specifications using <custom-ident> must specify clearly what other keywords
|
||||
// are excluded from <custom-ident>, if any—for example by saying that any pre-defined keywords
|
||||
// in that property’s value definition are excluded. Excluded keywords are excluded
|
||||
// in all ASCII case permutations.
|
||||
|
||||
return 1;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-variables/#typedef-custom-property-name
|
||||
@ -213,17 +250,20 @@ function customIdent(token) {
|
||||
// that starts with two dashes, except -- itself, which is reserved for future use by CSS.
|
||||
// NOTE: Current implementation treat `--` as a valid name since most (all?) major browsers treat it as valid.
|
||||
function customPropertyName(token) {
|
||||
// ... defined as any valid identifier
|
||||
if (token === null || token.type !== types.Ident) {
|
||||
return 0;
|
||||
}
|
||||
// ... defined as any valid identifier
|
||||
if (token === null || token.type !== types.Ident) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// ... that starts with two dashes (U+002D HYPHEN-MINUS)
|
||||
if (charCodeAt(token.value, 0) !== 0x002D || charCodeAt(token.value, 1) !== 0x002D) {
|
||||
return 0;
|
||||
}
|
||||
// ... that starts with two dashes (U+002D HYPHEN-MINUS)
|
||||
if (
|
||||
charCodeAt(token.value, 0) !== 0x002d ||
|
||||
charCodeAt(token.value, 1) !== 0x002d
|
||||
) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-color-4/#hex-notation
|
||||
@ -231,99 +271,104 @@ function customPropertyName(token) {
|
||||
// In other words, a hex color is written as a hash character, "#", followed by some number of digits 0-9 or
|
||||
// letters a-f (the case of the letters doesn’t matter - #00ff00 is identical to #00FF00).
|
||||
function hexColor(token) {
|
||||
if (token === null || token.type !== types.Hash) {
|
||||
return 0;
|
||||
if (token === null || token.type !== types.Hash) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const length = token.value.length;
|
||||
|
||||
// valid values (length): #rgb (4), #rgba (5), #rrggbb (7), #rrggbbaa (9)
|
||||
if (length !== 4 && length !== 5 && length !== 7 && length !== 9) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
for (let i = 1; i < length; i++) {
|
||||
if (!charCodeDefinitions.isHexDigit(charCodeAt(token.value, i))) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
const length = token.value.length;
|
||||
|
||||
// valid values (length): #rgb (4), #rgba (5), #rrggbb (7), #rrggbbaa (9)
|
||||
if (length !== 4 && length !== 5 && length !== 7 && length !== 9) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
for (let i = 1; i < length; i++) {
|
||||
if (!charCodeDefinitions.isHexDigit(charCodeAt(token.value, i))) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
return 1;
|
||||
return 1;
|
||||
}
|
||||
|
||||
function idSelector(token) {
|
||||
if (token === null || token.type !== types.Hash) {
|
||||
return 0;
|
||||
}
|
||||
if (token === null || token.type !== types.Hash) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!charCodeDefinitions.isIdentifierStart(charCodeAt(token.value, 1), charCodeAt(token.value, 2), charCodeAt(token.value, 3))) {
|
||||
return 0;
|
||||
}
|
||||
if (
|
||||
!charCodeDefinitions.isIdentifierStart(
|
||||
charCodeAt(token.value, 1),
|
||||
charCodeAt(token.value, 2),
|
||||
charCodeAt(token.value, 3)
|
||||
)
|
||||
) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-syntax/#any-value
|
||||
// It represents the entirety of what a valid declaration can have as its value.
|
||||
function declarationValue(token, getNextToken) {
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let balanceCloseType = 0;
|
||||
let balanceStash = [];
|
||||
let length = 0;
|
||||
let balanceCloseType = 0;
|
||||
let balanceStash = [];
|
||||
let length = 0;
|
||||
|
||||
// The <declaration-value> production matches any sequence of one or more tokens,
|
||||
// so long as the sequence does not contain ...
|
||||
scan:
|
||||
do {
|
||||
switch (token.type) {
|
||||
// ... <bad-string-token>, <bad-url-token>,
|
||||
case types.BadString:
|
||||
case types.BadUrl:
|
||||
break scan;
|
||||
// The <declaration-value> production matches any sequence of one or more tokens,
|
||||
// so long as the sequence does not contain ...
|
||||
scan: do {
|
||||
switch (token.type) {
|
||||
// ... <bad-string-token>, <bad-url-token>,
|
||||
case types.BadString:
|
||||
case types.BadUrl:
|
||||
break scan;
|
||||
|
||||
// ... unmatched <)-token>, <]-token>, or <}-token>,
|
||||
case types.RightCurlyBracket:
|
||||
case types.RightParenthesis:
|
||||
case types.RightSquareBracket:
|
||||
if (token.type !== balanceCloseType) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
balanceCloseType = balanceStash.pop();
|
||||
break;
|
||||
|
||||
// ... or top-level <semicolon-token> tokens
|
||||
case types.Semicolon:
|
||||
if (balanceCloseType === 0) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
// ... or <delim-token> tokens with a value of "!"
|
||||
case types.Delim:
|
||||
if (balanceCloseType === 0 && token.value === '!') {
|
||||
break scan;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case types.Function:
|
||||
case types.LeftParenthesis:
|
||||
case types.LeftSquareBracket:
|
||||
case types.LeftCurlyBracket:
|
||||
balanceStash.push(balanceCloseType);
|
||||
balanceCloseType = balancePair.get(token.type);
|
||||
break;
|
||||
// ... unmatched <)-token>, <]-token>, or <}-token>,
|
||||
case types.RightCurlyBracket:
|
||||
case types.RightParenthesis:
|
||||
case types.RightSquareBracket:
|
||||
if (token.type !== balanceCloseType) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
length++;
|
||||
} while (token = getNextToken(length));
|
||||
balanceCloseType = balanceStash.pop();
|
||||
break;
|
||||
|
||||
return length;
|
||||
// ... or top-level <semicolon-token> tokens
|
||||
case types.Semicolon:
|
||||
if (balanceCloseType === 0) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
// ... or <delim-token> tokens with a value of "!"
|
||||
case types.Delim:
|
||||
if (balanceCloseType === 0 && token.value === '!') {
|
||||
break scan;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case types.Function:
|
||||
case types.LeftParenthesis:
|
||||
case types.LeftSquareBracket:
|
||||
case types.LeftCurlyBracket:
|
||||
balanceStash.push(balanceCloseType);
|
||||
balanceCloseType = balancePair.get(token.type);
|
||||
break;
|
||||
}
|
||||
|
||||
length++;
|
||||
} while ((token = getNextToken(length)));
|
||||
|
||||
return length;
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-syntax/#any-value
|
||||
@ -331,48 +376,47 @@ function declarationValue(token, getNextToken) {
|
||||
// allows top-level <semicolon-token> tokens and <delim-token> tokens
|
||||
// with a value of "!". It represents the entirety of what valid CSS can be in any context.
|
||||
function anyValue(token, getNextToken) {
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
if (!token) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let balanceCloseType = 0;
|
||||
let balanceStash = [];
|
||||
let length = 0;
|
||||
let balanceCloseType = 0;
|
||||
let balanceStash = [];
|
||||
let length = 0;
|
||||
|
||||
// The <any-value> production matches any sequence of one or more tokens,
|
||||
// so long as the sequence ...
|
||||
scan:
|
||||
do {
|
||||
switch (token.type) {
|
||||
// ... does not contain <bad-string-token>, <bad-url-token>,
|
||||
case types.BadString:
|
||||
case types.BadUrl:
|
||||
break scan;
|
||||
// The <any-value> production matches any sequence of one or more tokens,
|
||||
// so long as the sequence ...
|
||||
scan: do {
|
||||
switch (token.type) {
|
||||
// ... does not contain <bad-string-token>, <bad-url-token>,
|
||||
case types.BadString:
|
||||
case types.BadUrl:
|
||||
break scan;
|
||||
|
||||
// ... unmatched <)-token>, <]-token>, or <}-token>,
|
||||
case types.RightCurlyBracket:
|
||||
case types.RightParenthesis:
|
||||
case types.RightSquareBracket:
|
||||
if (token.type !== balanceCloseType) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
balanceCloseType = balanceStash.pop();
|
||||
break;
|
||||
|
||||
case types.Function:
|
||||
case types.LeftParenthesis:
|
||||
case types.LeftSquareBracket:
|
||||
case types.LeftCurlyBracket:
|
||||
balanceStash.push(balanceCloseType);
|
||||
balanceCloseType = balancePair.get(token.type);
|
||||
break;
|
||||
// ... unmatched <)-token>, <]-token>, or <}-token>,
|
||||
case types.RightCurlyBracket:
|
||||
case types.RightParenthesis:
|
||||
case types.RightSquareBracket:
|
||||
if (token.type !== balanceCloseType) {
|
||||
break scan;
|
||||
}
|
||||
|
||||
length++;
|
||||
} while (token = getNextToken(length));
|
||||
balanceCloseType = balanceStash.pop();
|
||||
break;
|
||||
|
||||
return length;
|
||||
case types.Function:
|
||||
case types.LeftParenthesis:
|
||||
case types.LeftSquareBracket:
|
||||
case types.LeftCurlyBracket:
|
||||
balanceStash.push(balanceCloseType);
|
||||
balanceCloseType = balancePair.get(token.type);
|
||||
break;
|
||||
}
|
||||
|
||||
length++;
|
||||
} while ((token = getNextToken(length)));
|
||||
|
||||
return length;
|
||||
}
|
||||
|
||||
// =========================
|
||||
@ -380,37 +424,41 @@ function anyValue(token, getNextToken) {
|
||||
//
|
||||
|
||||
function dimension(type) {
|
||||
if (type) {
|
||||
type = new Set(type);
|
||||
if (type) {
|
||||
type = new Set(type);
|
||||
}
|
||||
|
||||
return function (token, getNextToken, opts) {
|
||||
if (token === null || token.type !== types.Dimension) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return function(token, getNextToken, opts) {
|
||||
if (token === null || token.type !== types.Dimension) {
|
||||
return 0;
|
||||
}
|
||||
const numberEnd = utils.consumeNumber(token.value, 0);
|
||||
|
||||
const numberEnd = utils.consumeNumber(token.value, 0);
|
||||
// check unit
|
||||
if (type !== null) {
|
||||
// check for IE postfix hack, i.e. 123px\0 or 123px\9
|
||||
const reverseSolidusOffset = token.value.indexOf('\\', numberEnd);
|
||||
const unit =
|
||||
(
|
||||
reverseSolidusOffset === -1 ||
|
||||
!isPostfixIeHack(token.value, reverseSolidusOffset)
|
||||
) ?
|
||||
token.value.substr(numberEnd)
|
||||
: token.value.substring(numberEnd, reverseSolidusOffset);
|
||||
|
||||
// check unit
|
||||
if (type !== null) {
|
||||
// check for IE postfix hack, i.e. 123px\0 or 123px\9
|
||||
const reverseSolidusOffset = token.value.indexOf('\\', numberEnd);
|
||||
const unit = reverseSolidusOffset === -1 || !isPostfixIeHack(token.value, reverseSolidusOffset)
|
||||
? token.value.substr(numberEnd)
|
||||
: token.value.substring(numberEnd, reverseSolidusOffset);
|
||||
if (type.has(unit.toLowerCase()) === false) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (type.has(unit.toLowerCase()) === false) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, numberEnd)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, numberEnd)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
};
|
||||
return 1;
|
||||
};
|
||||
}
|
||||
|
||||
// =========================
|
||||
@ -420,17 +468,17 @@ function dimension(type) {
|
||||
// §5.5. Percentages: the <percentage> type
|
||||
// https://drafts.csswg.org/css-values-4/#percentages
|
||||
function percentage(token, getNextToken, opts) {
|
||||
// ... corresponds to the <percentage-token> production
|
||||
if (token === null || token.type !== types.Percentage) {
|
||||
return 0;
|
||||
}
|
||||
// ... corresponds to the <percentage-token> production
|
||||
if (token === null || token.type !== types.Percentage) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, token.value.length - 1)) {
|
||||
return 0;
|
||||
}
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, token.value.length - 1)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// =========================
|
||||
@ -442,21 +490,21 @@ function percentage(token, getNextToken, opts) {
|
||||
// evaluate to a <number> with the value 0 (for example, calc(0)) do not match <zero>;
|
||||
// only literal <number-token>s do.
|
||||
function zero(next) {
|
||||
if (typeof next !== 'function') {
|
||||
next = function() {
|
||||
return 0;
|
||||
};
|
||||
if (typeof next !== 'function') {
|
||||
next = function () {
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
|
||||
return function (token, getNextToken, opts) {
|
||||
if (token !== null && token.type === types.Number) {
|
||||
if (Number(token.value) === 0) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
return function(token, getNextToken, opts) {
|
||||
if (token !== null && token.type === types.Number) {
|
||||
if (Number(token.value) === 0) {
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
return next(token, getNextToken, opts);
|
||||
};
|
||||
return next(token, getNextToken, opts);
|
||||
};
|
||||
}
|
||||
|
||||
// § 5.3. Real Numbers: the <number> type
|
||||
@ -464,110 +512,115 @@ function zero(next) {
|
||||
// Number values are denoted by <number>, and represent real numbers, possibly with a fractional component.
|
||||
// ... It corresponds to the <number-token> production
|
||||
function number(token, getNextToken, opts) {
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
if (token === null) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const numberEnd = utils.consumeNumber(token.value, 0);
|
||||
const isNumber = numberEnd === token.value.length;
|
||||
if (!isNumber && !isPostfixIeHack(token.value, numberEnd)) {
|
||||
return 0;
|
||||
}
|
||||
const numberEnd = utils.consumeNumber(token.value, 0);
|
||||
const isNumber = numberEnd === token.value.length;
|
||||
if (!isNumber && !isPostfixIeHack(token.value, numberEnd)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, numberEnd)) {
|
||||
return 0;
|
||||
}
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, numberEnd)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
return 1;
|
||||
}
|
||||
|
||||
// §5.2. Integers: the <integer> type
|
||||
// https://drafts.csswg.org/css-values-4/#integers
|
||||
function integer(token, getNextToken, opts) {
|
||||
// ... corresponds to a subset of the <number-token> production
|
||||
if (token === null || token.type !== types.Number) {
|
||||
return 0;
|
||||
// ... corresponds to a subset of the <number-token> production
|
||||
if (token === null || token.type !== types.Number) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// The first digit of an integer may be immediately preceded by `-` or `+` to indicate the integer’s sign.
|
||||
let i =
|
||||
(
|
||||
charCodeAt(token.value, 0) === 0x002b || // U+002B PLUS SIGN (+)
|
||||
charCodeAt(token.value, 0) === 0x002d
|
||||
) ?
|
||||
1
|
||||
: 0; // U+002D HYPHEN-MINUS (-)
|
||||
|
||||
// When written literally, an integer is one or more decimal digits 0 through 9 ...
|
||||
for (; i < token.value.length; i++) {
|
||||
if (!charCodeDefinitions.isDigit(charCodeAt(token.value, i))) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// The first digit of an integer may be immediately preceded by `-` or `+` to indicate the integer’s sign.
|
||||
let i = charCodeAt(token.value, 0) === 0x002B || // U+002B PLUS SIGN (+)
|
||||
charCodeAt(token.value, 0) === 0x002D ? 1 : 0; // U+002D HYPHEN-MINUS (-)
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, i)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// When written literally, an integer is one or more decimal digits 0 through 9 ...
|
||||
for (; i < token.value.length; i++) {
|
||||
if (!charCodeDefinitions.isDigit(charCodeAt(token.value, i))) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// check range if specified
|
||||
if (outOfRange(opts, token.value, i)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 1;
|
||||
return 1;
|
||||
}
|
||||
|
||||
const genericSyntaxes = {
|
||||
// token types
|
||||
'ident-token': tokenType(types.Ident),
|
||||
'function-token': tokenType(types.Function),
|
||||
'at-keyword-token': tokenType(types.AtKeyword),
|
||||
'hash-token': tokenType(types.Hash),
|
||||
'string-token': tokenType(types.String),
|
||||
'bad-string-token': tokenType(types.BadString),
|
||||
'url-token': tokenType(types.Url),
|
||||
'bad-url-token': tokenType(types.BadUrl),
|
||||
'delim-token': tokenType(types.Delim),
|
||||
'number-token': tokenType(types.Number),
|
||||
'percentage-token': tokenType(types.Percentage),
|
||||
'dimension-token': tokenType(types.Dimension),
|
||||
'whitespace-token': tokenType(types.WhiteSpace),
|
||||
'CDO-token': tokenType(types.CDO),
|
||||
'CDC-token': tokenType(types.CDC),
|
||||
'colon-token': tokenType(types.Colon),
|
||||
'semicolon-token': tokenType(types.Semicolon),
|
||||
'comma-token': tokenType(types.Comma),
|
||||
'[-token': tokenType(types.LeftSquareBracket),
|
||||
']-token': tokenType(types.RightSquareBracket),
|
||||
'(-token': tokenType(types.LeftParenthesis),
|
||||
')-token': tokenType(types.RightParenthesis),
|
||||
'{-token': tokenType(types.LeftCurlyBracket),
|
||||
'}-token': tokenType(types.RightCurlyBracket),
|
||||
// token types
|
||||
'ident-token': tokenType(types.Ident),
|
||||
'function-token': tokenType(types.Function),
|
||||
'at-keyword-token': tokenType(types.AtKeyword),
|
||||
'hash-token': tokenType(types.Hash),
|
||||
'string-token': tokenType(types.String),
|
||||
'bad-string-token': tokenType(types.BadString),
|
||||
'url-token': tokenType(types.Url),
|
||||
'bad-url-token': tokenType(types.BadUrl),
|
||||
'delim-token': tokenType(types.Delim),
|
||||
'number-token': tokenType(types.Number),
|
||||
'percentage-token': tokenType(types.Percentage),
|
||||
'dimension-token': tokenType(types.Dimension),
|
||||
'whitespace-token': tokenType(types.WhiteSpace),
|
||||
'CDO-token': tokenType(types.CDO),
|
||||
'CDC-token': tokenType(types.CDC),
|
||||
'colon-token': tokenType(types.Colon),
|
||||
'semicolon-token': tokenType(types.Semicolon),
|
||||
'comma-token': tokenType(types.Comma),
|
||||
'[-token': tokenType(types.LeftSquareBracket),
|
||||
']-token': tokenType(types.RightSquareBracket),
|
||||
'(-token': tokenType(types.LeftParenthesis),
|
||||
')-token': tokenType(types.RightParenthesis),
|
||||
'{-token': tokenType(types.LeftCurlyBracket),
|
||||
'}-token': tokenType(types.RightCurlyBracket),
|
||||
|
||||
// token type aliases
|
||||
'string': tokenType(types.String),
|
||||
'ident': tokenType(types.Ident),
|
||||
// token type aliases
|
||||
string: tokenType(types.String),
|
||||
ident: tokenType(types.Ident),
|
||||
|
||||
// complex types
|
||||
'custom-ident': customIdent,
|
||||
'custom-property-name': customPropertyName,
|
||||
'hex-color': hexColor,
|
||||
'id-selector': idSelector, // element( <id-selector> )
|
||||
'an-plus-b': genericAnPlusB,
|
||||
'urange': genericUrange,
|
||||
'declaration-value': declarationValue,
|
||||
'any-value': anyValue,
|
||||
// complex types
|
||||
'custom-ident': customIdent,
|
||||
'custom-property-name': customPropertyName,
|
||||
'hex-color': hexColor,
|
||||
'id-selector': idSelector, // element( <id-selector> )
|
||||
'an-plus-b': genericAnPlusB,
|
||||
urange: genericUrange,
|
||||
'declaration-value': declarationValue,
|
||||
'any-value': anyValue,
|
||||
|
||||
// dimensions
|
||||
'dimension': calc(dimension(null)),
|
||||
'angle': calc(dimension(ANGLE)),
|
||||
'decibel': calc(dimension(DECIBEL)),
|
||||
'frequency': calc(dimension(FREQUENCY)),
|
||||
'flex': calc(dimension(FLEX)),
|
||||
'length': calc(zero(dimension(LENGTH))),
|
||||
'resolution': calc(dimension(RESOLUTION)),
|
||||
'semitones': calc(dimension(SEMITONES)),
|
||||
'time': calc(dimension(TIME)),
|
||||
// dimensions
|
||||
dimension: calc(dimension(null)),
|
||||
angle: calc(dimension(ANGLE)),
|
||||
decibel: calc(dimension(DECIBEL)),
|
||||
frequency: calc(dimension(FREQUENCY)),
|
||||
flex: calc(dimension(FLEX)),
|
||||
length: calc(zero(dimension(LENGTH))),
|
||||
resolution: calc(dimension(RESOLUTION)),
|
||||
semitones: calc(dimension(SEMITONES)),
|
||||
time: calc(dimension(TIME)),
|
||||
|
||||
// percentage
|
||||
'percentage': calc(percentage),
|
||||
// percentage
|
||||
percentage: calc(percentage),
|
||||
|
||||
// numeric
|
||||
'zero': zero(),
|
||||
'number': calc(number),
|
||||
'integer': calc(integer)
|
||||
// numeric
|
||||
zero: zero(),
|
||||
number: calc(number),
|
||||
integer: calc(integer),
|
||||
};
|
||||
|
||||
module.exports = genericSyntaxes;
|
||||
|
2
node_modules/css-tree/cjs/lexer/index.cjs
generated
vendored
2
node_modules/css-tree/cjs/lexer/index.cjs
generated
vendored
@ -2,6 +2,4 @@
|
||||
|
||||
const Lexer = require('./Lexer.cjs');
|
||||
|
||||
|
||||
|
||||
exports.Lexer = Lexer.Lexer;
|
||||
|
769
node_modules/css-tree/cjs/lexer/match-graph.cjs
generated
vendored
769
node_modules/css-tree/cjs/lexer/match-graph.cjs
generated
vendored
@ -6,451 +6,418 @@ const MATCH = { type: 'Match' };
|
||||
const MISMATCH = { type: 'Mismatch' };
|
||||
const DISALLOW_EMPTY = { type: 'DisallowEmpty' };
|
||||
|
||||
const LEFTPARENTHESIS = 40; // (
|
||||
const LEFTPARENTHESIS = 40; // (
|
||||
const RIGHTPARENTHESIS = 41; // )
|
||||
|
||||
function createCondition(match, thenBranch, elseBranch) {
|
||||
// reduce node count
|
||||
if (thenBranch === MATCH && elseBranch === MISMATCH) {
|
||||
return match;
|
||||
}
|
||||
// reduce node count
|
||||
if (thenBranch === MATCH && elseBranch === MISMATCH) {
|
||||
return match;
|
||||
}
|
||||
|
||||
if (match === MATCH && thenBranch === MATCH && elseBranch === MATCH) {
|
||||
return match;
|
||||
}
|
||||
if (match === MATCH && thenBranch === MATCH && elseBranch === MATCH) {
|
||||
return match;
|
||||
}
|
||||
|
||||
if (match.type === 'If' && match.else === MISMATCH && thenBranch === MATCH) {
|
||||
thenBranch = match.then;
|
||||
match = match.match;
|
||||
}
|
||||
if (match.type === 'If' && match.else === MISMATCH && thenBranch === MATCH) {
|
||||
thenBranch = match.then;
|
||||
match = match.match;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'If',
|
||||
match,
|
||||
then: thenBranch,
|
||||
else: elseBranch
|
||||
};
|
||||
return {
|
||||
type: 'If',
|
||||
match,
|
||||
then: thenBranch,
|
||||
else: elseBranch,
|
||||
};
|
||||
}
|
||||
|
||||
function isFunctionType(name) {
|
||||
return (
|
||||
name.length > 2 &&
|
||||
name.charCodeAt(name.length - 2) === LEFTPARENTHESIS &&
|
||||
name.charCodeAt(name.length - 1) === RIGHTPARENTHESIS
|
||||
);
|
||||
return (
|
||||
name.length > 2 &&
|
||||
name.charCodeAt(name.length - 2) === LEFTPARENTHESIS &&
|
||||
name.charCodeAt(name.length - 1) === RIGHTPARENTHESIS
|
||||
);
|
||||
}
|
||||
|
||||
function isEnumCapatible(term) {
|
||||
return (
|
||||
term.type === 'Keyword' ||
|
||||
term.type === 'AtKeyword' ||
|
||||
term.type === 'Function' ||
|
||||
term.type === 'Type' && isFunctionType(term.name)
|
||||
);
|
||||
return (
|
||||
term.type === 'Keyword' ||
|
||||
term.type === 'AtKeyword' ||
|
||||
term.type === 'Function' ||
|
||||
(term.type === 'Type' && isFunctionType(term.name))
|
||||
);
|
||||
}
|
||||
|
||||
function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
|
||||
switch (combinator) {
|
||||
case ' ': {
|
||||
// Juxtaposing components means that all of them must occur, in the given order.
|
||||
//
|
||||
// a b c
|
||||
// =
|
||||
// match a
|
||||
// then match b
|
||||
// then match c
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
// else MISMATCH
|
||||
// else MISMATCH
|
||||
let result = MATCH;
|
||||
switch (combinator) {
|
||||
case ' ': {
|
||||
// Juxtaposing components means that all of them must occur, in the given order.
|
||||
//
|
||||
// a b c
|
||||
// =
|
||||
// match a
|
||||
// then match b
|
||||
// then match c
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
// else MISMATCH
|
||||
// else MISMATCH
|
||||
let result = MATCH;
|
||||
|
||||
for (let i = terms.length - 1; i >= 0; i--) {
|
||||
const term = terms[i];
|
||||
for (let i = terms.length - 1; i >= 0; i--) {
|
||||
const term = terms[i];
|
||||
|
||||
result = createCondition(
|
||||
term,
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
case '|': {
|
||||
// A bar (|) separates two or more alternatives: exactly one of them must occur.
|
||||
//
|
||||
// a | b | c
|
||||
// =
|
||||
// match a
|
||||
// then MATCH
|
||||
// else match b
|
||||
// then MATCH
|
||||
// else match c
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
|
||||
let result = MISMATCH;
|
||||
let map = null;
|
||||
|
||||
for (let i = terms.length - 1; i >= 0; i--) {
|
||||
let term = terms[i];
|
||||
|
||||
// reduce sequence of keywords into a Enum
|
||||
if (isEnumCapatible(term)) {
|
||||
if (map === null && i > 0 && isEnumCapatible(terms[i - 1])) {
|
||||
map = Object.create(null);
|
||||
result = createCondition(
|
||||
{
|
||||
type: 'Enum',
|
||||
map
|
||||
},
|
||||
MATCH,
|
||||
result
|
||||
);
|
||||
}
|
||||
|
||||
if (map !== null) {
|
||||
const key = (isFunctionType(term.name) ? term.name.slice(0, -1) : term.name).toLowerCase();
|
||||
if (key in map === false) {
|
||||
map[key] = term;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
map = null;
|
||||
|
||||
// create a new conditonal node
|
||||
result = createCondition(
|
||||
term,
|
||||
MATCH,
|
||||
result
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
case '&&': {
|
||||
// A double ampersand (&&) separates two or more components,
|
||||
// all of which must occur, in any order.
|
||||
|
||||
// Use MatchOnce for groups with a large number of terms,
|
||||
// since &&-groups produces at least N!-node trees
|
||||
if (terms.length > 5) {
|
||||
return {
|
||||
type: 'MatchOnce',
|
||||
terms,
|
||||
all: true
|
||||
};
|
||||
}
|
||||
|
||||
// Use a combination tree for groups with small number of terms
|
||||
//
|
||||
// a && b && c
|
||||
// =
|
||||
// match a
|
||||
// then [b && c]
|
||||
// else match b
|
||||
// then [a && c]
|
||||
// else match c
|
||||
// then [a && b]
|
||||
// else MISMATCH
|
||||
//
|
||||
// a && b
|
||||
// =
|
||||
// match a
|
||||
// then match b
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
// else match b
|
||||
// then match a
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
// else MISMATCH
|
||||
let result = MISMATCH;
|
||||
|
||||
for (let i = terms.length - 1; i >= 0; i--) {
|
||||
const term = terms[i];
|
||||
let thenClause;
|
||||
|
||||
if (terms.length > 1) {
|
||||
thenClause = buildGroupMatchGraph(
|
||||
combinator,
|
||||
terms.filter(function(newGroupTerm) {
|
||||
return newGroupTerm !== term;
|
||||
}),
|
||||
false
|
||||
);
|
||||
} else {
|
||||
thenClause = MATCH;
|
||||
}
|
||||
|
||||
result = createCondition(
|
||||
term,
|
||||
thenClause,
|
||||
result
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
case '||': {
|
||||
// A double bar (||) separates two or more options:
|
||||
// one or more of them must occur, in any order.
|
||||
|
||||
// Use MatchOnce for groups with a large number of terms,
|
||||
// since ||-groups produces at least N!-node trees
|
||||
if (terms.length > 5) {
|
||||
return {
|
||||
type: 'MatchOnce',
|
||||
terms,
|
||||
all: false
|
||||
};
|
||||
}
|
||||
|
||||
// Use a combination tree for groups with small number of terms
|
||||
//
|
||||
// a || b || c
|
||||
// =
|
||||
// match a
|
||||
// then [b || c]
|
||||
// else match b
|
||||
// then [a || c]
|
||||
// else match c
|
||||
// then [a || b]
|
||||
// else MISMATCH
|
||||
//
|
||||
// a || b
|
||||
// =
|
||||
// match a
|
||||
// then match b
|
||||
// then MATCH
|
||||
// else MATCH
|
||||
// else match b
|
||||
// then match a
|
||||
// then MATCH
|
||||
// else MATCH
|
||||
// else MISMATCH
|
||||
let result = atLeastOneTermMatched ? MATCH : MISMATCH;
|
||||
|
||||
for (let i = terms.length - 1; i >= 0; i--) {
|
||||
const term = terms[i];
|
||||
let thenClause;
|
||||
|
||||
if (terms.length > 1) {
|
||||
thenClause = buildGroupMatchGraph(
|
||||
combinator,
|
||||
terms.filter(function(newGroupTerm) {
|
||||
return newGroupTerm !== term;
|
||||
}),
|
||||
true
|
||||
);
|
||||
} else {
|
||||
thenClause = MATCH;
|
||||
}
|
||||
|
||||
result = createCondition(
|
||||
term,
|
||||
thenClause,
|
||||
result
|
||||
);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
result = createCondition(term, result, MISMATCH);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
case '|': {
|
||||
// A bar (|) separates two or more alternatives: exactly one of them must occur.
|
||||
//
|
||||
// a | b | c
|
||||
// =
|
||||
// match a
|
||||
// then MATCH
|
||||
// else match b
|
||||
// then MATCH
|
||||
// else match c
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
|
||||
let result = MISMATCH;
|
||||
let map = null;
|
||||
|
||||
for (let i = terms.length - 1; i >= 0; i--) {
|
||||
let term = terms[i];
|
||||
|
||||
// reduce sequence of keywords into a Enum
|
||||
if (isEnumCapatible(term)) {
|
||||
if (map === null && i > 0 && isEnumCapatible(terms[i - 1])) {
|
||||
map = Object.create(null);
|
||||
result = createCondition(
|
||||
{
|
||||
type: 'Enum',
|
||||
map,
|
||||
},
|
||||
MATCH,
|
||||
result
|
||||
);
|
||||
}
|
||||
|
||||
if (map !== null) {
|
||||
const key = (
|
||||
isFunctionType(term.name) ?
|
||||
term.name.slice(0, -1)
|
||||
: term.name).toLowerCase();
|
||||
if (key in map === false) {
|
||||
map[key] = term;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
map = null;
|
||||
|
||||
// create a new conditonal node
|
||||
result = createCondition(term, MATCH, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
case '&&': {
|
||||
// A double ampersand (&&) separates two or more components,
|
||||
// all of which must occur, in any order.
|
||||
|
||||
// Use MatchOnce for groups with a large number of terms,
|
||||
// since &&-groups produces at least N!-node trees
|
||||
if (terms.length > 5) {
|
||||
return {
|
||||
type: 'MatchOnce',
|
||||
terms,
|
||||
all: true,
|
||||
};
|
||||
}
|
||||
|
||||
// Use a combination tree for groups with small number of terms
|
||||
//
|
||||
// a && b && c
|
||||
// =
|
||||
// match a
|
||||
// then [b && c]
|
||||
// else match b
|
||||
// then [a && c]
|
||||
// else match c
|
||||
// then [a && b]
|
||||
// else MISMATCH
|
||||
//
|
||||
// a && b
|
||||
// =
|
||||
// match a
|
||||
// then match b
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
// else match b
|
||||
// then match a
|
||||
// then MATCH
|
||||
// else MISMATCH
|
||||
// else MISMATCH
|
||||
let result = MISMATCH;
|
||||
|
||||
for (let i = terms.length - 1; i >= 0; i--) {
|
||||
const term = terms[i];
|
||||
let thenClause;
|
||||
|
||||
if (terms.length > 1) {
|
||||
thenClause = buildGroupMatchGraph(
|
||||
combinator,
|
||||
terms.filter(function (newGroupTerm) {
|
||||
return newGroupTerm !== term;
|
||||
}),
|
||||
false
|
||||
);
|
||||
} else {
|
||||
thenClause = MATCH;
|
||||
}
|
||||
|
||||
result = createCondition(term, thenClause, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
case '||': {
|
||||
// A double bar (||) separates two or more options:
|
||||
// one or more of them must occur, in any order.
|
||||
|
||||
// Use MatchOnce for groups with a large number of terms,
|
||||
// since ||-groups produces at least N!-node trees
|
||||
if (terms.length > 5) {
|
||||
return {
|
||||
type: 'MatchOnce',
|
||||
terms,
|
||||
all: false,
|
||||
};
|
||||
}
|
||||
|
||||
// Use a combination tree for groups with small number of terms
|
||||
//
|
||||
// a || b || c
|
||||
// =
|
||||
// match a
|
||||
// then [b || c]
|
||||
// else match b
|
||||
// then [a || c]
|
||||
// else match c
|
||||
// then [a || b]
|
||||
// else MISMATCH
|
||||
//
|
||||
// a || b
|
||||
// =
|
||||
// match a
|
||||
// then match b
|
||||
// then MATCH
|
||||
// else MATCH
|
||||
// else match b
|
||||
// then match a
|
||||
// then MATCH
|
||||
// else MATCH
|
||||
// else MISMATCH
|
||||
let result = atLeastOneTermMatched ? MATCH : MISMATCH;
|
||||
|
||||
for (let i = terms.length - 1; i >= 0; i--) {
|
||||
const term = terms[i];
|
||||
let thenClause;
|
||||
|
||||
if (terms.length > 1) {
|
||||
thenClause = buildGroupMatchGraph(
|
||||
combinator,
|
||||
terms.filter(function (newGroupTerm) {
|
||||
return newGroupTerm !== term;
|
||||
}),
|
||||
true
|
||||
);
|
||||
} else {
|
||||
thenClause = MATCH;
|
||||
}
|
||||
|
||||
result = createCondition(term, thenClause, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function buildMultiplierMatchGraph(node) {
|
||||
let result = MATCH;
|
||||
let matchTerm = buildMatchGraphInternal(node.term);
|
||||
let result = MATCH;
|
||||
let matchTerm = buildMatchGraphInternal(node.term);
|
||||
|
||||
if (node.max === 0) {
|
||||
// disable repeating of empty match to prevent infinite loop
|
||||
matchTerm = createCondition(
|
||||
matchTerm,
|
||||
DISALLOW_EMPTY,
|
||||
MISMATCH
|
||||
);
|
||||
if (node.max === 0) {
|
||||
// disable repeating of empty match to prevent infinite loop
|
||||
matchTerm = createCondition(matchTerm, DISALLOW_EMPTY, MISMATCH);
|
||||
|
||||
// an occurrence count is not limited, make a cycle;
|
||||
// to collect more terms on each following matching mismatch
|
||||
result = createCondition(
|
||||
matchTerm,
|
||||
null, // will be a loop
|
||||
MISMATCH
|
||||
);
|
||||
// an occurrence count is not limited, make a cycle;
|
||||
// to collect more terms on each following matching mismatch
|
||||
result = createCondition(
|
||||
matchTerm,
|
||||
null, // will be a loop
|
||||
MISMATCH
|
||||
);
|
||||
|
||||
result.then = createCondition(
|
||||
MATCH,
|
||||
MATCH,
|
||||
result // make a loop
|
||||
);
|
||||
result.then = createCondition(
|
||||
MATCH,
|
||||
MATCH,
|
||||
result // make a loop
|
||||
);
|
||||
|
||||
if (node.comma) {
|
||||
result.then.else = createCondition(
|
||||
{ type: 'Comma', syntax: node },
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// create a match node chain for [min .. max] interval with optional matches
|
||||
for (let i = node.min || 1; i <= node.max; i++) {
|
||||
if (node.comma && result !== MATCH) {
|
||||
result = createCondition(
|
||||
{ type: 'Comma', syntax: node },
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
|
||||
result = createCondition(
|
||||
matchTerm,
|
||||
createCondition(
|
||||
MATCH,
|
||||
MATCH,
|
||||
result
|
||||
),
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
if (node.comma) {
|
||||
result.then.else = createCondition(
|
||||
{ type: 'Comma', syntax: node },
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
|
||||
if (node.min === 0) {
|
||||
// allow zero match
|
||||
} else {
|
||||
// create a match node chain for [min .. max] interval with optional matches
|
||||
for (let i = node.min || 1; i <= node.max; i++) {
|
||||
if (node.comma && result !== MATCH) {
|
||||
result = createCondition(
|
||||
MATCH,
|
||||
MATCH,
|
||||
result
|
||||
{ type: 'Comma', syntax: node },
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
} else {
|
||||
// create a match node chain to collect [0 ... min - 1] required matches
|
||||
for (let i = 0; i < node.min - 1; i++) {
|
||||
if (node.comma && result !== MATCH) {
|
||||
result = createCondition(
|
||||
{ type: 'Comma', syntax: node },
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
result = createCondition(
|
||||
matchTerm,
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
result = createCondition(
|
||||
matchTerm,
|
||||
createCondition(MATCH, MATCH, result),
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
if (node.min === 0) {
|
||||
// allow zero match
|
||||
result = createCondition(MATCH, MATCH, result);
|
||||
} else {
|
||||
// create a match node chain to collect [0 ... min - 1] required matches
|
||||
for (let i = 0; i < node.min - 1; i++) {
|
||||
if (node.comma && result !== MATCH) {
|
||||
result = createCondition(
|
||||
{ type: 'Comma', syntax: node },
|
||||
result,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
|
||||
result = createCondition(matchTerm, result, MISMATCH);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function buildMatchGraphInternal(node) {
|
||||
if (typeof node === 'function') {
|
||||
if (typeof node === 'function') {
|
||||
return {
|
||||
type: 'Generic',
|
||||
fn: node,
|
||||
};
|
||||
}
|
||||
|
||||
switch (node.type) {
|
||||
case 'Group': {
|
||||
let result = buildGroupMatchGraph(
|
||||
node.combinator,
|
||||
node.terms.map(buildMatchGraphInternal),
|
||||
false
|
||||
);
|
||||
|
||||
if (node.disallowEmpty) {
|
||||
result = createCondition(result, DISALLOW_EMPTY, MISMATCH);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
case 'Multiplier':
|
||||
return buildMultiplierMatchGraph(node);
|
||||
|
||||
case 'Type':
|
||||
case 'Property':
|
||||
return {
|
||||
type: node.type,
|
||||
name: node.name,
|
||||
syntax: node,
|
||||
};
|
||||
|
||||
case 'Keyword':
|
||||
return {
|
||||
type: node.type,
|
||||
name: node.name.toLowerCase(),
|
||||
syntax: node,
|
||||
};
|
||||
|
||||
case 'AtKeyword':
|
||||
return {
|
||||
type: node.type,
|
||||
name: '@' + node.name.toLowerCase(),
|
||||
syntax: node,
|
||||
};
|
||||
|
||||
case 'Function':
|
||||
return {
|
||||
type: node.type,
|
||||
name: node.name.toLowerCase() + '(',
|
||||
syntax: node,
|
||||
};
|
||||
|
||||
case 'String':
|
||||
// convert a one char length String to a Token
|
||||
if (node.value.length === 3) {
|
||||
return {
|
||||
type: 'Generic',
|
||||
fn: node
|
||||
type: 'Token',
|
||||
value: node.value.charAt(1),
|
||||
syntax: node,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
switch (node.type) {
|
||||
case 'Group': {
|
||||
let result = buildGroupMatchGraph(
|
||||
node.combinator,
|
||||
node.terms.map(buildMatchGraphInternal),
|
||||
false
|
||||
);
|
||||
// otherwise use it as is
|
||||
return {
|
||||
type: node.type,
|
||||
value: node.value.substr(1, node.value.length - 2).replace(/\\'/g, "'"),
|
||||
syntax: node,
|
||||
};
|
||||
|
||||
if (node.disallowEmpty) {
|
||||
result = createCondition(
|
||||
result,
|
||||
DISALLOW_EMPTY,
|
||||
MISMATCH
|
||||
);
|
||||
}
|
||||
case 'Token':
|
||||
return {
|
||||
type: node.type,
|
||||
value: node.value,
|
||||
syntax: node,
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
case 'Comma':
|
||||
return {
|
||||
type: node.type,
|
||||
syntax: node,
|
||||
};
|
||||
|
||||
case 'Multiplier':
|
||||
return buildMultiplierMatchGraph(node);
|
||||
|
||||
case 'Type':
|
||||
case 'Property':
|
||||
return {
|
||||
type: node.type,
|
||||
name: node.name,
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'Keyword':
|
||||
return {
|
||||
type: node.type,
|
||||
name: node.name.toLowerCase(),
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'AtKeyword':
|
||||
return {
|
||||
type: node.type,
|
||||
name: '@' + node.name.toLowerCase(),
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'Function':
|
||||
return {
|
||||
type: node.type,
|
||||
name: node.name.toLowerCase() + '(',
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'String':
|
||||
// convert a one char length String to a Token
|
||||
if (node.value.length === 3) {
|
||||
return {
|
||||
type: 'Token',
|
||||
value: node.value.charAt(1),
|
||||
syntax: node
|
||||
};
|
||||
}
|
||||
|
||||
// otherwise use it as is
|
||||
return {
|
||||
type: node.type,
|
||||
value: node.value.substr(1, node.value.length - 2).replace(/\\'/g, '\''),
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'Token':
|
||||
return {
|
||||
type: node.type,
|
||||
value: node.value,
|
||||
syntax: node
|
||||
};
|
||||
|
||||
case 'Comma':
|
||||
return {
|
||||
type: node.type,
|
||||
syntax: node
|
||||
};
|
||||
|
||||
default:
|
||||
throw new Error('Unknown node type:', node.type);
|
||||
}
|
||||
default:
|
||||
throw new Error('Unknown node type:', node.type);
|
||||
}
|
||||
}
|
||||
|
||||
function buildMatchGraph(syntaxTree, ref) {
|
||||
if (typeof syntaxTree === 'string') {
|
||||
syntaxTree = parse.parse(syntaxTree);
|
||||
}
|
||||
if (typeof syntaxTree === 'string') {
|
||||
syntaxTree = parse.parse(syntaxTree);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'MatchGraph',
|
||||
match: buildMatchGraphInternal(syntaxTree),
|
||||
syntax: ref || null,
|
||||
source: syntaxTree
|
||||
};
|
||||
return {
|
||||
type: 'MatchGraph',
|
||||
match: buildMatchGraphInternal(syntaxTree),
|
||||
syntax: ref || null,
|
||||
source: syntaxTree,
|
||||
};
|
||||
}
|
||||
|
||||
exports.DISALLOW_EMPTY = DISALLOW_EMPTY;
|
||||
|
1144
node_modules/css-tree/cjs/lexer/match.cjs
generated
vendored
1144
node_modules/css-tree/cjs/lexer/match.cjs
generated
vendored
File diff suppressed because it is too large
Load Diff
72
node_modules/css-tree/cjs/lexer/prepare-tokens.cjs
generated
vendored
72
node_modules/css-tree/cjs/lexer/prepare-tokens.cjs
generated
vendored
@ -3,52 +3,52 @@
|
||||
const index = require('../tokenizer/index.cjs');
|
||||
|
||||
const astToTokens = {
|
||||
decorator(handlers) {
|
||||
const tokens = [];
|
||||
let curNode = null;
|
||||
decorator(handlers) {
|
||||
const tokens = [];
|
||||
let curNode = null;
|
||||
|
||||
return {
|
||||
...handlers,
|
||||
node(node) {
|
||||
const tmp = curNode;
|
||||
curNode = node;
|
||||
handlers.node.call(this, node);
|
||||
curNode = tmp;
|
||||
},
|
||||
emit(value, type, auto) {
|
||||
tokens.push({
|
||||
type,
|
||||
value,
|
||||
node: auto ? null : curNode
|
||||
});
|
||||
},
|
||||
result() {
|
||||
return tokens;
|
||||
}
|
||||
};
|
||||
}
|
||||
return {
|
||||
...handlers,
|
||||
node(node) {
|
||||
const tmp = curNode;
|
||||
curNode = node;
|
||||
handlers.node.call(this, node);
|
||||
curNode = tmp;
|
||||
},
|
||||
emit(value, type, auto) {
|
||||
tokens.push({
|
||||
type,
|
||||
value,
|
||||
node: auto ? null : curNode,
|
||||
});
|
||||
},
|
||||
result() {
|
||||
return tokens;
|
||||
},
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
function stringToTokens(str) {
|
||||
const tokens = [];
|
||||
const tokens = [];
|
||||
|
||||
index.tokenize(str, (type, start, end) =>
|
||||
tokens.push({
|
||||
type,
|
||||
value: str.slice(start, end),
|
||||
node: null
|
||||
})
|
||||
);
|
||||
index.tokenize(str, (type, start, end) =>
|
||||
tokens.push({
|
||||
type,
|
||||
value: str.slice(start, end),
|
||||
node: null,
|
||||
})
|
||||
);
|
||||
|
||||
return tokens;
|
||||
return tokens;
|
||||
}
|
||||
|
||||
function prepareTokens(value, syntax) {
|
||||
if (typeof value === 'string') {
|
||||
return stringToTokens(value);
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
return stringToTokens(value);
|
||||
}
|
||||
|
||||
return syntax.generate(value, astToTokens);
|
||||
return syntax.generate(value, astToTokens);
|
||||
}
|
||||
|
||||
module.exports = prepareTokens;
|
||||
|
80
node_modules/css-tree/cjs/lexer/search.cjs
generated
vendored
80
node_modules/css-tree/cjs/lexer/search.cjs
generated
vendored
@ -3,63 +3,65 @@
|
||||
const List = require('../utils/List.cjs');
|
||||
|
||||
function getFirstMatchNode(matchNode) {
|
||||
if ('node' in matchNode) {
|
||||
return matchNode.node;
|
||||
}
|
||||
if ('node' in matchNode) {
|
||||
return matchNode.node;
|
||||
}
|
||||
|
||||
return getFirstMatchNode(matchNode.match[0]);
|
||||
return getFirstMatchNode(matchNode.match[0]);
|
||||
}
|
||||
|
||||
function getLastMatchNode(matchNode) {
|
||||
if ('node' in matchNode) {
|
||||
return matchNode.node;
|
||||
}
|
||||
if ('node' in matchNode) {
|
||||
return matchNode.node;
|
||||
}
|
||||
|
||||
return getLastMatchNode(matchNode.match[matchNode.match.length - 1]);
|
||||
return getLastMatchNode(matchNode.match[matchNode.match.length - 1]);
|
||||
}
|
||||
|
||||
function matchFragments(lexer, ast, match, type, name) {
|
||||
function findFragments(matchNode) {
|
||||
if (matchNode.syntax !== null &&
|
||||
matchNode.syntax.type === type &&
|
||||
matchNode.syntax.name === name) {
|
||||
const start = getFirstMatchNode(matchNode);
|
||||
const end = getLastMatchNode(matchNode);
|
||||
function findFragments(matchNode) {
|
||||
if (
|
||||
matchNode.syntax !== null &&
|
||||
matchNode.syntax.type === type &&
|
||||
matchNode.syntax.name === name
|
||||
) {
|
||||
const start = getFirstMatchNode(matchNode);
|
||||
const end = getLastMatchNode(matchNode);
|
||||
|
||||
lexer.syntax.walk(ast, function(node, item, list) {
|
||||
if (node === start) {
|
||||
const nodes = new List.List();
|
||||
lexer.syntax.walk(ast, function (node, item, list) {
|
||||
if (node === start) {
|
||||
const nodes = new List.List();
|
||||
|
||||
do {
|
||||
nodes.appendData(item.data);
|
||||
do {
|
||||
nodes.appendData(item.data);
|
||||
|
||||
if (item.data === end) {
|
||||
break;
|
||||
}
|
||||
if (item.data === end) {
|
||||
break;
|
||||
}
|
||||
|
||||
item = item.next;
|
||||
} while (item !== null);
|
||||
item = item.next;
|
||||
} while (item !== null);
|
||||
|
||||
fragments.push({
|
||||
parent: list,
|
||||
nodes
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (Array.isArray(matchNode.match)) {
|
||||
matchNode.match.forEach(findFragments);
|
||||
fragments.push({
|
||||
parent: list,
|
||||
nodes,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const fragments = [];
|
||||
|
||||
if (match.matched !== null) {
|
||||
findFragments(match.matched);
|
||||
if (Array.isArray(matchNode.match)) {
|
||||
matchNode.match.forEach(findFragments);
|
||||
}
|
||||
}
|
||||
|
||||
return fragments;
|
||||
const fragments = [];
|
||||
|
||||
if (match.matched !== null) {
|
||||
findFragments(match.matched);
|
||||
}
|
||||
|
||||
return fragments;
|
||||
}
|
||||
|
||||
exports.matchFragments = matchFragments;
|
||||
|
274
node_modules/css-tree/cjs/lexer/structure.cjs
generated
vendored
274
node_modules/css-tree/cjs/lexer/structure.cjs
generated
vendored
@ -5,164 +5,180 @@ const List = require('../utils/List.cjs');
|
||||
const { hasOwnProperty } = Object.prototype;
|
||||
|
||||
function isValidNumber(value) {
|
||||
// Number.isInteger(value) && value >= 0
|
||||
return (
|
||||
typeof value === 'number' &&
|
||||
isFinite(value) &&
|
||||
Math.floor(value) === value &&
|
||||
value >= 0
|
||||
);
|
||||
// Number.isInteger(value) && value >= 0
|
||||
return (
|
||||
typeof value === 'number' &&
|
||||
isFinite(value) &&
|
||||
Math.floor(value) === value &&
|
||||
value >= 0
|
||||
);
|
||||
}
|
||||
|
||||
function isValidLocation(loc) {
|
||||
return (
|
||||
Boolean(loc) &&
|
||||
isValidNumber(loc.offset) &&
|
||||
isValidNumber(loc.line) &&
|
||||
isValidNumber(loc.column)
|
||||
);
|
||||
return (
|
||||
Boolean(loc) &&
|
||||
isValidNumber(loc.offset) &&
|
||||
isValidNumber(loc.line) &&
|
||||
isValidNumber(loc.column)
|
||||
);
|
||||
}
|
||||
|
||||
function createNodeStructureChecker(type, fields) {
|
||||
return function checkNode(node, warn) {
|
||||
if (!node || node.constructor !== Object) {
|
||||
return warn(node, 'Type of node should be an Object');
|
||||
return function checkNode(node, warn) {
|
||||
if (!node || node.constructor !== Object) {
|
||||
return warn(node, 'Type of node should be an Object');
|
||||
}
|
||||
|
||||
for (let key in node) {
|
||||
let valid = true;
|
||||
|
||||
if (hasOwnProperty.call(node, key) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (key === 'type') {
|
||||
if (node.type !== type) {
|
||||
warn(
|
||||
node,
|
||||
'Wrong node type `' + node.type + '`, expected `' + type + '`'
|
||||
);
|
||||
}
|
||||
} else if (key === 'loc') {
|
||||
if (node.loc === null) {
|
||||
continue;
|
||||
} else if (node.loc && node.loc.constructor === Object) {
|
||||
if (typeof node.loc.source !== 'string') {
|
||||
key += '.source';
|
||||
} else if (!isValidLocation(node.loc.start)) {
|
||||
key += '.start';
|
||||
} else if (!isValidLocation(node.loc.end)) {
|
||||
key += '.end';
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
for (let key in node) {
|
||||
let valid = true;
|
||||
valid = false;
|
||||
} else if (fields.hasOwnProperty(key)) {
|
||||
valid = false;
|
||||
|
||||
if (hasOwnProperty.call(node, key) === false) {
|
||||
continue;
|
||||
}
|
||||
for (let i = 0; !valid && i < fields[key].length; i++) {
|
||||
const fieldType = fields[key][i];
|
||||
|
||||
if (key === 'type') {
|
||||
if (node.type !== type) {
|
||||
warn(node, 'Wrong node type `' + node.type + '`, expected `' + type + '`');
|
||||
}
|
||||
} else if (key === 'loc') {
|
||||
if (node.loc === null) {
|
||||
continue;
|
||||
} else if (node.loc && node.loc.constructor === Object) {
|
||||
if (typeof node.loc.source !== 'string') {
|
||||
key += '.source';
|
||||
} else if (!isValidLocation(node.loc.start)) {
|
||||
key += '.start';
|
||||
} else if (!isValidLocation(node.loc.end)) {
|
||||
key += '.end';
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
switch (fieldType) {
|
||||
case String:
|
||||
valid = typeof node[key] === 'string';
|
||||
break;
|
||||
|
||||
valid = false;
|
||||
} else if (fields.hasOwnProperty(key)) {
|
||||
valid = false;
|
||||
case Boolean:
|
||||
valid = typeof node[key] === 'boolean';
|
||||
break;
|
||||
|
||||
for (let i = 0; !valid && i < fields[key].length; i++) {
|
||||
const fieldType = fields[key][i];
|
||||
case null:
|
||||
valid = node[key] === null;
|
||||
break;
|
||||
|
||||
switch (fieldType) {
|
||||
case String:
|
||||
valid = typeof node[key] === 'string';
|
||||
break;
|
||||
|
||||
case Boolean:
|
||||
valid = typeof node[key] === 'boolean';
|
||||
break;
|
||||
|
||||
case null:
|
||||
valid = node[key] === null;
|
||||
break;
|
||||
|
||||
default:
|
||||
if (typeof fieldType === 'string') {
|
||||
valid = node[key] && node[key].type === fieldType;
|
||||
} else if (Array.isArray(fieldType)) {
|
||||
valid = node[key] instanceof List.List;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
warn(node, 'Unknown field `' + key + '` for ' + type + ' node type');
|
||||
}
|
||||
|
||||
if (!valid) {
|
||||
warn(node, 'Bad value for `' + type + '.' + key + '`');
|
||||
}
|
||||
default:
|
||||
if (typeof fieldType === 'string') {
|
||||
valid = node[key] && node[key].type === fieldType;
|
||||
} else if (Array.isArray(fieldType)) {
|
||||
valid = node[key] instanceof List.List;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
warn(node, 'Unknown field `' + key + '` for ' + type + ' node type');
|
||||
}
|
||||
|
||||
for (const key in fields) {
|
||||
if (hasOwnProperty.call(fields, key) &&
|
||||
hasOwnProperty.call(node, key) === false) {
|
||||
warn(node, 'Field `' + type + '.' + key + '` is missed');
|
||||
}
|
||||
}
|
||||
};
|
||||
if (!valid) {
|
||||
warn(node, 'Bad value for `' + type + '.' + key + '`');
|
||||
}
|
||||
}
|
||||
|
||||
for (const key in fields) {
|
||||
if (
|
||||
hasOwnProperty.call(fields, key) &&
|
||||
hasOwnProperty.call(node, key) === false
|
||||
) {
|
||||
warn(node, 'Field `' + type + '.' + key + '` is missed');
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function processStructure(name, nodeType) {
|
||||
const structure = nodeType.structure;
|
||||
const fields = {
|
||||
type: String,
|
||||
loc: true
|
||||
};
|
||||
const docs = {
|
||||
type: '"' + name + '"'
|
||||
};
|
||||
const structure = nodeType.structure;
|
||||
const fields = {
|
||||
type: String,
|
||||
loc: true,
|
||||
};
|
||||
const docs = {
|
||||
type: '"' + name + '"',
|
||||
};
|
||||
|
||||
for (const key in structure) {
|
||||
if (hasOwnProperty.call(structure, key) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const docsTypes = [];
|
||||
const fieldTypes = fields[key] = Array.isArray(structure[key])
|
||||
? structure[key].slice()
|
||||
: [structure[key]];
|
||||
|
||||
for (let i = 0; i < fieldTypes.length; i++) {
|
||||
const fieldType = fieldTypes[i];
|
||||
if (fieldType === String || fieldType === Boolean) {
|
||||
docsTypes.push(fieldType.name);
|
||||
} else if (fieldType === null) {
|
||||
docsTypes.push('null');
|
||||
} else if (typeof fieldType === 'string') {
|
||||
docsTypes.push('<' + fieldType + '>');
|
||||
} else if (Array.isArray(fieldType)) {
|
||||
docsTypes.push('List'); // TODO: use type enum
|
||||
} else {
|
||||
throw new Error('Wrong value `' + fieldType + '` in `' + name + '.' + key + '` structure definition');
|
||||
}
|
||||
}
|
||||
|
||||
docs[key] = docsTypes.join(' | ');
|
||||
for (const key in structure) {
|
||||
if (hasOwnProperty.call(structure, key) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
return {
|
||||
docs,
|
||||
check: createNodeStructureChecker(name, fields)
|
||||
};
|
||||
const docsTypes = [];
|
||||
const fieldTypes = (fields[key] =
|
||||
Array.isArray(structure[key]) ?
|
||||
structure[key].slice()
|
||||
: [structure[key]]);
|
||||
|
||||
for (let i = 0; i < fieldTypes.length; i++) {
|
||||
const fieldType = fieldTypes[i];
|
||||
if (fieldType === String || fieldType === Boolean) {
|
||||
docsTypes.push(fieldType.name);
|
||||
} else if (fieldType === null) {
|
||||
docsTypes.push('null');
|
||||
} else if (typeof fieldType === 'string') {
|
||||
docsTypes.push('<' + fieldType + '>');
|
||||
} else if (Array.isArray(fieldType)) {
|
||||
docsTypes.push('List'); // TODO: use type enum
|
||||
} else {
|
||||
throw new Error(
|
||||
'Wrong value `' +
|
||||
fieldType +
|
||||
'` in `' +
|
||||
name +
|
||||
'.' +
|
||||
key +
|
||||
'` structure definition'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
docs[key] = docsTypes.join(' | ');
|
||||
}
|
||||
|
||||
return {
|
||||
docs,
|
||||
check: createNodeStructureChecker(name, fields),
|
||||
};
|
||||
}
|
||||
|
||||
function getStructureFromConfig(config) {
|
||||
const structure = {};
|
||||
const structure = {};
|
||||
|
||||
if (config.node) {
|
||||
for (const name in config.node) {
|
||||
if (hasOwnProperty.call(config.node, name)) {
|
||||
const nodeType = config.node[name];
|
||||
if (config.node) {
|
||||
for (const name in config.node) {
|
||||
if (hasOwnProperty.call(config.node, name)) {
|
||||
const nodeType = config.node[name];
|
||||
|
||||
if (nodeType.structure) {
|
||||
structure[name] = processStructure(name, nodeType);
|
||||
} else {
|
||||
throw new Error('Missed `structure` field in `' + name + '` node type definition');
|
||||
}
|
||||
}
|
||||
if (nodeType.structure) {
|
||||
structure[name] = processStructure(name, nodeType);
|
||||
} else {
|
||||
throw new Error(
|
||||
'Missed `structure` field in `' + name + '` node type definition'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return structure;
|
||||
return structure;
|
||||
}
|
||||
|
||||
exports.getStructureFromConfig = getStructureFromConfig;
|
||||
|
92
node_modules/css-tree/cjs/lexer/trace.cjs
generated
vendored
92
node_modules/css-tree/cjs/lexer/trace.cjs
generated
vendored
@ -1,70 +1,76 @@
|
||||
'use strict';
|
||||
|
||||
function getTrace(node) {
|
||||
function shouldPutToTrace(syntax) {
|
||||
if (syntax === null) {
|
||||
return false;
|
||||
function shouldPutToTrace(syntax) {
|
||||
if (syntax === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
syntax.type === 'Type' ||
|
||||
syntax.type === 'Property' ||
|
||||
syntax.type === 'Keyword'
|
||||
);
|
||||
}
|
||||
|
||||
function hasMatch(matchNode) {
|
||||
if (Array.isArray(matchNode.match)) {
|
||||
// use for-loop for better perfomance
|
||||
for (let i = 0; i < matchNode.match.length; i++) {
|
||||
if (hasMatch(matchNode.match[i])) {
|
||||
if (shouldPutToTrace(matchNode.syntax)) {
|
||||
result.unshift(matchNode.syntax);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else if (matchNode.node === node) {
|
||||
result = shouldPutToTrace(matchNode.syntax) ? [matchNode.syntax] : [];
|
||||
|
||||
return (
|
||||
syntax.type === 'Type' ||
|
||||
syntax.type === 'Property' ||
|
||||
syntax.type === 'Keyword'
|
||||
);
|
||||
return true;
|
||||
}
|
||||
|
||||
function hasMatch(matchNode) {
|
||||
if (Array.isArray(matchNode.match)) {
|
||||
// use for-loop for better perfomance
|
||||
for (let i = 0; i < matchNode.match.length; i++) {
|
||||
if (hasMatch(matchNode.match[i])) {
|
||||
if (shouldPutToTrace(matchNode.syntax)) {
|
||||
result.unshift(matchNode.syntax);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else if (matchNode.node === node) {
|
||||
result = shouldPutToTrace(matchNode.syntax)
|
||||
? [matchNode.syntax]
|
||||
: [];
|
||||
let result = null;
|
||||
|
||||
return true;
|
||||
}
|
||||
if (this.matched !== null) {
|
||||
hasMatch(this.matched);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
let result = null;
|
||||
|
||||
if (this.matched !== null) {
|
||||
hasMatch(this.matched);
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
function isType(node, type) {
|
||||
return testNode(this, node, match => match.type === 'Type' && match.name === type);
|
||||
return testNode(
|
||||
this,
|
||||
node,
|
||||
(match) => match.type === 'Type' && match.name === type
|
||||
);
|
||||
}
|
||||
|
||||
function isProperty(node, property) {
|
||||
return testNode(this, node, match => match.type === 'Property' && match.name === property);
|
||||
return testNode(
|
||||
this,
|
||||
node,
|
||||
(match) => match.type === 'Property' && match.name === property
|
||||
);
|
||||
}
|
||||
|
||||
function isKeyword(node) {
|
||||
return testNode(this, node, match => match.type === 'Keyword');
|
||||
return testNode(this, node, (match) => match.type === 'Keyword');
|
||||
}
|
||||
|
||||
function testNode(match, node, fn) {
|
||||
const trace = getTrace.call(match, node);
|
||||
const trace = getTrace.call(match, node);
|
||||
|
||||
if (trace === null) {
|
||||
return false;
|
||||
}
|
||||
if (trace === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return trace.some(fn);
|
||||
return trace.some(fn);
|
||||
}
|
||||
|
||||
exports.getTrace = getTrace;
|
||||
|
114
node_modules/css-tree/cjs/parser/SyntaxError.cjs
generated
vendored
114
node_modules/css-tree/cjs/parser/SyntaxError.cjs
generated
vendored
@ -7,63 +7,75 @@ const OFFSET_CORRECTION = 60;
|
||||
const TAB_REPLACEMENT = ' ';
|
||||
|
||||
function sourceFragment({ source, line, column }, extraLines) {
|
||||
function processLines(start, end) {
|
||||
return lines
|
||||
.slice(start, end)
|
||||
.map((line, idx) =>
|
||||
String(start + idx + 1).padStart(maxNumLength) + ' |' + line
|
||||
).join('\n');
|
||||
function processLines(start, end) {
|
||||
return lines
|
||||
.slice(start, end)
|
||||
.map(
|
||||
(line, idx) =>
|
||||
String(start + idx + 1).padStart(maxNumLength) + ' |' + line
|
||||
)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
const lines = source.split(/\r\n?|\n|\f/);
|
||||
const startLine = Math.max(1, line - extraLines) - 1;
|
||||
const endLine = Math.min(line + extraLines, lines.length + 1);
|
||||
const maxNumLength = Math.max(4, String(endLine).length) + 1;
|
||||
let cutLeft = 0;
|
||||
|
||||
// column correction according to replaced tab before column
|
||||
column +=
|
||||
(TAB_REPLACEMENT.length - 1) *
|
||||
(lines[line - 1].substr(0, column - 1).match(/\t/g) || []).length;
|
||||
|
||||
if (column > MAX_LINE_LENGTH) {
|
||||
cutLeft = column - OFFSET_CORRECTION + 3;
|
||||
column = OFFSET_CORRECTION - 2;
|
||||
}
|
||||
|
||||
for (let i = startLine; i <= endLine; i++) {
|
||||
if (i >= 0 && i < lines.length) {
|
||||
lines[i] = lines[i].replace(/\t/g, TAB_REPLACEMENT);
|
||||
lines[i] =
|
||||
(cutLeft > 0 && lines[i].length > cutLeft ? '\u2026' : '') +
|
||||
lines[i].substr(cutLeft, MAX_LINE_LENGTH - 2) +
|
||||
(lines[i].length > cutLeft + MAX_LINE_LENGTH - 1 ? '\u2026' : '');
|
||||
}
|
||||
}
|
||||
|
||||
const lines = source.split(/\r\n?|\n|\f/);
|
||||
const startLine = Math.max(1, line - extraLines) - 1;
|
||||
const endLine = Math.min(line + extraLines, lines.length + 1);
|
||||
const maxNumLength = Math.max(4, String(endLine).length) + 1;
|
||||
let cutLeft = 0;
|
||||
|
||||
// column correction according to replaced tab before column
|
||||
column += (TAB_REPLACEMENT.length - 1) * (lines[line - 1].substr(0, column - 1).match(/\t/g) || []).length;
|
||||
|
||||
if (column > MAX_LINE_LENGTH) {
|
||||
cutLeft = column - OFFSET_CORRECTION + 3;
|
||||
column = OFFSET_CORRECTION - 2;
|
||||
}
|
||||
|
||||
for (let i = startLine; i <= endLine; i++) {
|
||||
if (i >= 0 && i < lines.length) {
|
||||
lines[i] = lines[i].replace(/\t/g, TAB_REPLACEMENT);
|
||||
lines[i] =
|
||||
(cutLeft > 0 && lines[i].length > cutLeft ? '\u2026' : '') +
|
||||
lines[i].substr(cutLeft, MAX_LINE_LENGTH - 2) +
|
||||
(lines[i].length > cutLeft + MAX_LINE_LENGTH - 1 ? '\u2026' : '');
|
||||
}
|
||||
}
|
||||
|
||||
return [
|
||||
processLines(startLine, line),
|
||||
new Array(column + maxNumLength + 2).join('-') + '^',
|
||||
processLines(line, endLine)
|
||||
].filter(Boolean).join('\n');
|
||||
return [
|
||||
processLines(startLine, line),
|
||||
new Array(column + maxNumLength + 2).join('-') + '^',
|
||||
processLines(line, endLine),
|
||||
]
|
||||
.filter(Boolean)
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function SyntaxError(message, source, offset, line, column) {
|
||||
const error = Object.assign(createCustomError.createCustomError('SyntaxError', message), {
|
||||
source,
|
||||
offset,
|
||||
line,
|
||||
column,
|
||||
sourceFragment(extraLines) {
|
||||
return sourceFragment({ source, line, column }, isNaN(extraLines) ? 0 : extraLines);
|
||||
},
|
||||
get formattedMessage() {
|
||||
return (
|
||||
`Parse error: ${message}\n` +
|
||||
sourceFragment({ source, line, column }, 2)
|
||||
);
|
||||
}
|
||||
});
|
||||
const error = Object.assign(
|
||||
createCustomError.createCustomError('SyntaxError', message),
|
||||
{
|
||||
source,
|
||||
offset,
|
||||
line,
|
||||
column,
|
||||
sourceFragment(extraLines) {
|
||||
return sourceFragment(
|
||||
{ source, line, column },
|
||||
isNaN(extraLines) ? 0 : extraLines
|
||||
);
|
||||
},
|
||||
get formattedMessage() {
|
||||
return (
|
||||
`Parse error: ${message}\n` +
|
||||
sourceFragment({ source, line, column }, 2)
|
||||
);
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
return error;
|
||||
return error;
|
||||
}
|
||||
|
||||
exports.SyntaxError = SyntaxError;
|
||||
|
580
node_modules/css-tree/cjs/parser/create.cjs
generated
vendored
580
node_modules/css-tree/cjs/parser/create.cjs
generated
vendored
@ -11,326 +11,354 @@ const types = require('../tokenizer/types.cjs');
|
||||
const names = require('../tokenizer/names.cjs');
|
||||
|
||||
const NOOP = () => {};
|
||||
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
|
||||
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
|
||||
const SEMICOLON = 0x003B; // U+003B SEMICOLON (;)
|
||||
const LEFTCURLYBRACKET = 0x007B; // U+007B LEFT CURLY BRACKET ({)
|
||||
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
|
||||
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
|
||||
const SEMICOLON = 0x003b; // U+003B SEMICOLON (;)
|
||||
const LEFTCURLYBRACKET = 0x007b; // U+007B LEFT CURLY BRACKET ({)
|
||||
const NULL = 0;
|
||||
|
||||
function createParseContext(name) {
|
||||
return function() {
|
||||
return this[name]();
|
||||
};
|
||||
return function () {
|
||||
return this[name]();
|
||||
};
|
||||
}
|
||||
|
||||
function fetchParseValues(dict) {
|
||||
const result = Object.create(null);
|
||||
const result = Object.create(null);
|
||||
|
||||
for (const name in dict) {
|
||||
const item = dict[name];
|
||||
const fn = item.parse || item;
|
||||
for (const name in dict) {
|
||||
const item = dict[name];
|
||||
const fn = item.parse || item;
|
||||
|
||||
if (fn) {
|
||||
result[name] = fn;
|
||||
}
|
||||
if (fn) {
|
||||
result[name] = fn;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
function processConfig(config) {
|
||||
const parseConfig = {
|
||||
context: Object.create(null),
|
||||
scope: Object.assign(Object.create(null), config.scope),
|
||||
atrule: fetchParseValues(config.atrule),
|
||||
pseudo: fetchParseValues(config.pseudo),
|
||||
node: fetchParseValues(config.node)
|
||||
};
|
||||
const parseConfig = {
|
||||
context: Object.create(null),
|
||||
scope: Object.assign(Object.create(null), config.scope),
|
||||
atrule: fetchParseValues(config.atrule),
|
||||
pseudo: fetchParseValues(config.pseudo),
|
||||
node: fetchParseValues(config.node),
|
||||
};
|
||||
|
||||
for (const name in config.parseContext) {
|
||||
switch (typeof config.parseContext[name]) {
|
||||
case 'function':
|
||||
parseConfig.context[name] = config.parseContext[name];
|
||||
break;
|
||||
for (const name in config.parseContext) {
|
||||
switch (typeof config.parseContext[name]) {
|
||||
case 'function':
|
||||
parseConfig.context[name] = config.parseContext[name];
|
||||
break;
|
||||
|
||||
case 'string':
|
||||
parseConfig.context[name] = createParseContext(config.parseContext[name]);
|
||||
break;
|
||||
}
|
||||
case 'string':
|
||||
parseConfig.context[name] = createParseContext(
|
||||
config.parseContext[name]
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
config: parseConfig,
|
||||
...parseConfig,
|
||||
...parseConfig.node
|
||||
};
|
||||
return {
|
||||
config: parseConfig,
|
||||
...parseConfig,
|
||||
...parseConfig.node,
|
||||
};
|
||||
}
|
||||
|
||||
function createParser(config) {
|
||||
let source = '';
|
||||
let filename = '<unknown>';
|
||||
let needPositions = false;
|
||||
let onParseError = NOOP;
|
||||
let onParseErrorThrow = false;
|
||||
let source = '';
|
||||
let filename = '<unknown>';
|
||||
let needPositions = false;
|
||||
let onParseError = NOOP;
|
||||
let onParseErrorThrow = false;
|
||||
|
||||
const locationMap = new OffsetToLocation.OffsetToLocation();
|
||||
const parser = Object.assign(new TokenStream.TokenStream(), processConfig(config || {}), {
|
||||
parseAtrulePrelude: true,
|
||||
parseRulePrelude: true,
|
||||
parseValue: true,
|
||||
parseCustomProperty: false,
|
||||
const locationMap = new OffsetToLocation.OffsetToLocation();
|
||||
const parser = Object.assign(
|
||||
new TokenStream.TokenStream(),
|
||||
processConfig(config || {}),
|
||||
{
|
||||
parseAtrulePrelude: true,
|
||||
parseRulePrelude: true,
|
||||
parseValue: true,
|
||||
parseCustomProperty: false,
|
||||
|
||||
readSequence: sequence.readSequence,
|
||||
readSequence: sequence.readSequence,
|
||||
|
||||
consumeUntilBalanceEnd: () => 0,
|
||||
consumeUntilLeftCurlyBracket(code) {
|
||||
return code === LEFTCURLYBRACKET ? 1 : 0;
|
||||
},
|
||||
consumeUntilLeftCurlyBracketOrSemicolon(code) {
|
||||
return code === LEFTCURLYBRACKET || code === SEMICOLON ? 1 : 0;
|
||||
},
|
||||
consumeUntilExclamationMarkOrSemicolon(code) {
|
||||
return code === EXCLAMATIONMARK || code === SEMICOLON ? 1 : 0;
|
||||
},
|
||||
consumeUntilSemicolonIncluded(code) {
|
||||
return code === SEMICOLON ? 2 : 0;
|
||||
},
|
||||
consumeUntilBalanceEnd: () => 0,
|
||||
consumeUntilLeftCurlyBracket(code) {
|
||||
return code === LEFTCURLYBRACKET ? 1 : 0;
|
||||
},
|
||||
consumeUntilLeftCurlyBracketOrSemicolon(code) {
|
||||
return code === LEFTCURLYBRACKET || code === SEMICOLON ? 1 : 0;
|
||||
},
|
||||
consumeUntilExclamationMarkOrSemicolon(code) {
|
||||
return code === EXCLAMATIONMARK || code === SEMICOLON ? 1 : 0;
|
||||
},
|
||||
consumeUntilSemicolonIncluded(code) {
|
||||
return code === SEMICOLON ? 2 : 0;
|
||||
},
|
||||
|
||||
createList() {
|
||||
return new List.List();
|
||||
},
|
||||
createSingleNodeList(node) {
|
||||
return new List.List().appendData(node);
|
||||
},
|
||||
getFirstListNode(list) {
|
||||
return list && list.first;
|
||||
},
|
||||
getLastListNode(list) {
|
||||
return list && list.last;
|
||||
},
|
||||
createList() {
|
||||
return new List.List();
|
||||
},
|
||||
createSingleNodeList(node) {
|
||||
return new List.List().appendData(node);
|
||||
},
|
||||
getFirstListNode(list) {
|
||||
return list && list.first;
|
||||
},
|
||||
getLastListNode(list) {
|
||||
return list && list.last;
|
||||
},
|
||||
|
||||
parseWithFallback(consumer, fallback) {
|
||||
const startToken = this.tokenIndex;
|
||||
parseWithFallback(consumer, fallback) {
|
||||
const startToken = this.tokenIndex;
|
||||
|
||||
try {
|
||||
return consumer.call(this);
|
||||
} catch (e) {
|
||||
if (onParseErrorThrow) {
|
||||
throw e;
|
||||
}
|
||||
try {
|
||||
return consumer.call(this);
|
||||
} catch (e) {
|
||||
if (onParseErrorThrow) {
|
||||
throw e;
|
||||
}
|
||||
|
||||
const fallbackNode = fallback.call(this, startToken);
|
||||
const fallbackNode = fallback.call(this, startToken);
|
||||
|
||||
onParseErrorThrow = true;
|
||||
onParseError(e, fallbackNode);
|
||||
onParseErrorThrow = false;
|
||||
onParseErrorThrow = true;
|
||||
onParseError(e, fallbackNode);
|
||||
onParseErrorThrow = false;
|
||||
|
||||
return fallbackNode;
|
||||
}
|
||||
},
|
||||
|
||||
lookupNonWSType(offset) {
|
||||
let type;
|
||||
|
||||
do {
|
||||
type = this.lookupType(offset++);
|
||||
if (type !== types.WhiteSpace) {
|
||||
return type;
|
||||
}
|
||||
} while (type !== NULL);
|
||||
|
||||
return NULL;
|
||||
},
|
||||
|
||||
charCodeAt(offset) {
|
||||
return offset >= 0 && offset < source.length ? source.charCodeAt(offset) : 0;
|
||||
},
|
||||
substring(offsetStart, offsetEnd) {
|
||||
return source.substring(offsetStart, offsetEnd);
|
||||
},
|
||||
substrToCursor(start) {
|
||||
return this.source.substring(start, this.tokenStart);
|
||||
},
|
||||
|
||||
cmpChar(offset, charCode) {
|
||||
return utils.cmpChar(source, offset, charCode);
|
||||
},
|
||||
cmpStr(offsetStart, offsetEnd, str) {
|
||||
return utils.cmpStr(source, offsetStart, offsetEnd, str);
|
||||
},
|
||||
|
||||
consume(tokenType) {
|
||||
const start = this.tokenStart;
|
||||
|
||||
this.eat(tokenType);
|
||||
|
||||
return this.substrToCursor(start);
|
||||
},
|
||||
consumeFunctionName() {
|
||||
const name = source.substring(this.tokenStart, this.tokenEnd - 1);
|
||||
|
||||
this.eat(types.Function);
|
||||
|
||||
return name;
|
||||
},
|
||||
consumeNumber(type) {
|
||||
const number = source.substring(this.tokenStart, utils.consumeNumber(source, this.tokenStart));
|
||||
|
||||
this.eat(type);
|
||||
|
||||
return number;
|
||||
},
|
||||
|
||||
eat(tokenType) {
|
||||
if (this.tokenType !== tokenType) {
|
||||
const tokenName = names[tokenType].slice(0, -6).replace(/-/g, ' ').replace(/^./, m => m.toUpperCase());
|
||||
let message = `${/[[\](){}]/.test(tokenName) ? `"${tokenName}"` : tokenName} is expected`;
|
||||
let offset = this.tokenStart;
|
||||
|
||||
// tweak message and offset
|
||||
switch (tokenType) {
|
||||
case types.Ident:
|
||||
// when identifier is expected but there is a function or url
|
||||
if (this.tokenType === types.Function || this.tokenType === types.Url) {
|
||||
offset = this.tokenEnd - 1;
|
||||
message = 'Identifier is expected but function found';
|
||||
} else {
|
||||
message = 'Identifier is expected';
|
||||
}
|
||||
break;
|
||||
|
||||
case types.Hash:
|
||||
if (this.isDelim(NUMBERSIGN)) {
|
||||
this.next();
|
||||
offset++;
|
||||
message = 'Name is expected';
|
||||
}
|
||||
break;
|
||||
|
||||
case types.Percentage:
|
||||
if (this.tokenType === types.Number) {
|
||||
offset = this.tokenEnd;
|
||||
message = 'Percent sign is expected';
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
this.error(message, offset);
|
||||
}
|
||||
|
||||
this.next();
|
||||
},
|
||||
eatIdent(name) {
|
||||
if (this.tokenType !== types.Ident || this.lookupValue(0, name) === false) {
|
||||
this.error(`Identifier "${name}" is expected`);
|
||||
}
|
||||
|
||||
this.next();
|
||||
},
|
||||
eatDelim(code) {
|
||||
if (!this.isDelim(code)) {
|
||||
this.error(`Delim "${String.fromCharCode(code)}" is expected`);
|
||||
}
|
||||
|
||||
this.next();
|
||||
},
|
||||
|
||||
getLocation(start, end) {
|
||||
if (needPositions) {
|
||||
return locationMap.getLocationRange(
|
||||
start,
|
||||
end,
|
||||
filename
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
getLocationFromList(list) {
|
||||
if (needPositions) {
|
||||
const head = this.getFirstListNode(list);
|
||||
const tail = this.getLastListNode(list);
|
||||
return locationMap.getLocationRange(
|
||||
head !== null ? head.loc.start.offset - locationMap.startOffset : this.tokenStart,
|
||||
tail !== null ? tail.loc.end.offset - locationMap.startOffset : this.tokenStart,
|
||||
filename
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
error(message, offset) {
|
||||
const location = typeof offset !== 'undefined' && offset < source.length
|
||||
? locationMap.getLocation(offset)
|
||||
: this.eof
|
||||
? locationMap.getLocation(utils.findWhiteSpaceStart(source, source.length - 1))
|
||||
: locationMap.getLocation(this.tokenStart);
|
||||
|
||||
throw new SyntaxError.SyntaxError(
|
||||
message || 'Unexpected input',
|
||||
source,
|
||||
location.offset,
|
||||
location.line,
|
||||
location.column
|
||||
);
|
||||
return fallbackNode;
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
const parse = function(source_, options) {
|
||||
source = source_;
|
||||
options = options || {};
|
||||
lookupNonWSType(offset) {
|
||||
let type;
|
||||
|
||||
parser.setSource(source, index.tokenize);
|
||||
locationMap.setSource(
|
||||
source,
|
||||
options.offset,
|
||||
options.line,
|
||||
options.column
|
||||
do {
|
||||
type = this.lookupType(offset++);
|
||||
if (type !== types.WhiteSpace) {
|
||||
return type;
|
||||
}
|
||||
} while (type !== NULL);
|
||||
|
||||
return NULL;
|
||||
},
|
||||
|
||||
charCodeAt(offset) {
|
||||
return offset >= 0 && offset < source.length ?
|
||||
source.charCodeAt(offset)
|
||||
: 0;
|
||||
},
|
||||
substring(offsetStart, offsetEnd) {
|
||||
return source.substring(offsetStart, offsetEnd);
|
||||
},
|
||||
substrToCursor(start) {
|
||||
return this.source.substring(start, this.tokenStart);
|
||||
},
|
||||
|
||||
cmpChar(offset, charCode) {
|
||||
return utils.cmpChar(source, offset, charCode);
|
||||
},
|
||||
cmpStr(offsetStart, offsetEnd, str) {
|
||||
return utils.cmpStr(source, offsetStart, offsetEnd, str);
|
||||
},
|
||||
|
||||
consume(tokenType) {
|
||||
const start = this.tokenStart;
|
||||
|
||||
this.eat(tokenType);
|
||||
|
||||
return this.substrToCursor(start);
|
||||
},
|
||||
consumeFunctionName() {
|
||||
const name = source.substring(this.tokenStart, this.tokenEnd - 1);
|
||||
|
||||
this.eat(types.Function);
|
||||
|
||||
return name;
|
||||
},
|
||||
consumeNumber(type) {
|
||||
const number = source.substring(
|
||||
this.tokenStart,
|
||||
utils.consumeNumber(source, this.tokenStart)
|
||||
);
|
||||
|
||||
filename = options.filename || '<unknown>';
|
||||
needPositions = Boolean(options.positions);
|
||||
onParseError = typeof options.onParseError === 'function' ? options.onParseError : NOOP;
|
||||
onParseErrorThrow = false;
|
||||
this.eat(type);
|
||||
|
||||
parser.parseAtrulePrelude = 'parseAtrulePrelude' in options ? Boolean(options.parseAtrulePrelude) : true;
|
||||
parser.parseRulePrelude = 'parseRulePrelude' in options ? Boolean(options.parseRulePrelude) : true;
|
||||
parser.parseValue = 'parseValue' in options ? Boolean(options.parseValue) : true;
|
||||
parser.parseCustomProperty = 'parseCustomProperty' in options ? Boolean(options.parseCustomProperty) : false;
|
||||
return number;
|
||||
},
|
||||
|
||||
const { context = 'default', onComment } = options;
|
||||
eat(tokenType) {
|
||||
if (this.tokenType !== tokenType) {
|
||||
const tokenName = names[tokenType]
|
||||
.slice(0, -6)
|
||||
.replace(/-/g, ' ')
|
||||
.replace(/^./, (m) => m.toUpperCase());
|
||||
let message = `${/[[\](){}]/.test(tokenName) ? `"${tokenName}"` : tokenName} is expected`;
|
||||
let offset = this.tokenStart;
|
||||
|
||||
if (context in parser.context === false) {
|
||||
throw new Error('Unknown context `' + context + '`');
|
||||
// tweak message and offset
|
||||
switch (tokenType) {
|
||||
case types.Ident:
|
||||
// when identifier is expected but there is a function or url
|
||||
if (
|
||||
this.tokenType === types.Function ||
|
||||
this.tokenType === types.Url
|
||||
) {
|
||||
offset = this.tokenEnd - 1;
|
||||
message = 'Identifier is expected but function found';
|
||||
} else {
|
||||
message = 'Identifier is expected';
|
||||
}
|
||||
break;
|
||||
|
||||
case types.Hash:
|
||||
if (this.isDelim(NUMBERSIGN)) {
|
||||
this.next();
|
||||
offset++;
|
||||
message = 'Name is expected';
|
||||
}
|
||||
break;
|
||||
|
||||
case types.Percentage:
|
||||
if (this.tokenType === types.Number) {
|
||||
offset = this.tokenEnd;
|
||||
message = 'Percent sign is expected';
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
this.error(message, offset);
|
||||
}
|
||||
|
||||
if (typeof onComment === 'function') {
|
||||
parser.forEachToken((type, start, end) => {
|
||||
if (type === types.Comment) {
|
||||
const loc = parser.getLocation(start, end);
|
||||
const value = utils.cmpStr(source, end - 2, end, '*/')
|
||||
? source.slice(start + 2, end - 2)
|
||||
: source.slice(start + 2, end);
|
||||
|
||||
onComment(value, loc);
|
||||
}
|
||||
});
|
||||
this.next();
|
||||
},
|
||||
eatIdent(name) {
|
||||
if (
|
||||
this.tokenType !== types.Ident ||
|
||||
this.lookupValue(0, name) === false
|
||||
) {
|
||||
this.error(`Identifier "${name}" is expected`);
|
||||
}
|
||||
|
||||
const ast = parser.context[context].call(parser, options);
|
||||
|
||||
if (!parser.eof) {
|
||||
parser.error();
|
||||
this.next();
|
||||
},
|
||||
eatDelim(code) {
|
||||
if (!this.isDelim(code)) {
|
||||
this.error(`Delim "${String.fromCharCode(code)}" is expected`);
|
||||
}
|
||||
|
||||
return ast;
|
||||
};
|
||||
this.next();
|
||||
},
|
||||
|
||||
return Object.assign(parse, {
|
||||
SyntaxError: SyntaxError.SyntaxError,
|
||||
config: parser.config
|
||||
});
|
||||
getLocation(start, end) {
|
||||
if (needPositions) {
|
||||
return locationMap.getLocationRange(start, end, filename);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
getLocationFromList(list) {
|
||||
if (needPositions) {
|
||||
const head = this.getFirstListNode(list);
|
||||
const tail = this.getLastListNode(list);
|
||||
return locationMap.getLocationRange(
|
||||
head !== null ?
|
||||
head.loc.start.offset - locationMap.startOffset
|
||||
: this.tokenStart,
|
||||
tail !== null ?
|
||||
tail.loc.end.offset - locationMap.startOffset
|
||||
: this.tokenStart,
|
||||
filename
|
||||
);
|
||||
}
|
||||
|
||||
return null;
|
||||
},
|
||||
|
||||
error(message, offset) {
|
||||
const location =
|
||||
typeof offset !== 'undefined' && offset < source.length ?
|
||||
locationMap.getLocation(offset)
|
||||
: this.eof ?
|
||||
locationMap.getLocation(
|
||||
utils.findWhiteSpaceStart(source, source.length - 1)
|
||||
)
|
||||
: locationMap.getLocation(this.tokenStart);
|
||||
|
||||
throw new SyntaxError.SyntaxError(
|
||||
message || 'Unexpected input',
|
||||
source,
|
||||
location.offset,
|
||||
location.line,
|
||||
location.column
|
||||
);
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
const parse = function (source_, options) {
|
||||
source = source_;
|
||||
options = options || {};
|
||||
|
||||
parser.setSource(source, index.tokenize);
|
||||
locationMap.setSource(source, options.offset, options.line, options.column);
|
||||
|
||||
filename = options.filename || '<unknown>';
|
||||
needPositions = Boolean(options.positions);
|
||||
onParseError =
|
||||
typeof options.onParseError === 'function' ? options.onParseError : NOOP;
|
||||
onParseErrorThrow = false;
|
||||
|
||||
parser.parseAtrulePrelude =
|
||||
'parseAtrulePrelude' in options ?
|
||||
Boolean(options.parseAtrulePrelude)
|
||||
: true;
|
||||
parser.parseRulePrelude =
|
||||
'parseRulePrelude' in options ? Boolean(options.parseRulePrelude) : true;
|
||||
parser.parseValue =
|
||||
'parseValue' in options ? Boolean(options.parseValue) : true;
|
||||
parser.parseCustomProperty =
|
||||
'parseCustomProperty' in options ?
|
||||
Boolean(options.parseCustomProperty)
|
||||
: false;
|
||||
|
||||
const { context = 'default', onComment } = options;
|
||||
|
||||
if (context in parser.context === false) {
|
||||
throw new Error('Unknown context `' + context + '`');
|
||||
}
|
||||
|
||||
if (typeof onComment === 'function') {
|
||||
parser.forEachToken((type, start, end) => {
|
||||
if (type === types.Comment) {
|
||||
const loc = parser.getLocation(start, end);
|
||||
const value =
|
||||
utils.cmpStr(source, end - 2, end, '*/') ?
|
||||
source.slice(start + 2, end - 2)
|
||||
: source.slice(start + 2, end);
|
||||
|
||||
onComment(value, loc);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const ast = parser.context[context].call(parser, options);
|
||||
|
||||
if (!parser.eof) {
|
||||
parser.error();
|
||||
}
|
||||
|
||||
return ast;
|
||||
};
|
||||
|
||||
return Object.assign(parse, {
|
||||
SyntaxError: SyntaxError.SyntaxError,
|
||||
config: parser.config,
|
||||
});
|
||||
}
|
||||
|
||||
exports.createParser = createParser;
|
||||
|
66
node_modules/css-tree/cjs/parser/sequence.cjs
generated
vendored
66
node_modules/css-tree/cjs/parser/sequence.cjs
generated
vendored
@ -3,45 +3,45 @@
|
||||
const types = require('../tokenizer/types.cjs');
|
||||
|
||||
function readSequence(recognizer) {
|
||||
const children = this.createList();
|
||||
let space = false;
|
||||
const context = {
|
||||
recognizer
|
||||
};
|
||||
const children = this.createList();
|
||||
let space = false;
|
||||
const context = {
|
||||
recognizer,
|
||||
};
|
||||
|
||||
while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.Comment:
|
||||
this.next();
|
||||
continue;
|
||||
while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.Comment:
|
||||
this.next();
|
||||
continue;
|
||||
|
||||
case types.WhiteSpace:
|
||||
space = true;
|
||||
this.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
let child = recognizer.getNode.call(this, context);
|
||||
|
||||
if (child === undefined) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (space) {
|
||||
if (recognizer.onWhiteSpace) {
|
||||
recognizer.onWhiteSpace.call(this, child, children, context);
|
||||
}
|
||||
space = false;
|
||||
}
|
||||
|
||||
children.push(child);
|
||||
case types.WhiteSpace:
|
||||
space = true;
|
||||
this.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (space && recognizer.onWhiteSpace) {
|
||||
recognizer.onWhiteSpace.call(this, null, children, context);
|
||||
let child = recognizer.getNode.call(this, context);
|
||||
|
||||
if (child === undefined) {
|
||||
break;
|
||||
}
|
||||
|
||||
return children;
|
||||
if (space) {
|
||||
if (recognizer.onWhiteSpace) {
|
||||
recognizer.onWhiteSpace.call(this, child, children, context);
|
||||
}
|
||||
space = false;
|
||||
}
|
||||
|
||||
children.push(child);
|
||||
}
|
||||
|
||||
if (space && recognizer.onWhiteSpace) {
|
||||
recognizer.onWhiteSpace.call(this, null, children, context);
|
||||
}
|
||||
|
||||
return children;
|
||||
}
|
||||
|
||||
exports.readSequence = readSequence;
|
||||
|
12
node_modules/css-tree/cjs/syntax/atrule/font-face.cjs
generated
vendored
12
node_modules/css-tree/cjs/syntax/atrule/font-face.cjs
generated
vendored
@ -1,12 +1,12 @@
|
||||
'use strict';
|
||||
|
||||
const fontFace = {
|
||||
parse: {
|
||||
prelude: null,
|
||||
block() {
|
||||
return this.Block(true);
|
||||
}
|
||||
}
|
||||
parse: {
|
||||
prelude: null,
|
||||
block() {
|
||||
return this.Block(true);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = fontFace;
|
||||
|
48
node_modules/css-tree/cjs/syntax/atrule/import.cjs
generated
vendored
48
node_modules/css-tree/cjs/syntax/atrule/import.cjs
generated
vendored
@ -3,35 +3,37 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const importAtrule = {
|
||||
parse: {
|
||||
prelude() {
|
||||
const children = this.createList();
|
||||
parse: {
|
||||
prelude() {
|
||||
const children = this.createList();
|
||||
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
switch (this.tokenType) {
|
||||
case types.String:
|
||||
children.push(this.String());
|
||||
break;
|
||||
switch (this.tokenType) {
|
||||
case types.String:
|
||||
children.push(this.String());
|
||||
break;
|
||||
|
||||
case types.Url:
|
||||
case types.Function:
|
||||
children.push(this.Url());
|
||||
break;
|
||||
case types.Url:
|
||||
case types.Function:
|
||||
children.push(this.Url());
|
||||
break;
|
||||
|
||||
default:
|
||||
this.error('String or url() is expected');
|
||||
}
|
||||
default:
|
||||
this.error('String or url() is expected');
|
||||
}
|
||||
|
||||
if (this.lookupNonWSType(0) === types.Ident ||
|
||||
this.lookupNonWSType(0) === types.LeftParenthesis) {
|
||||
children.push(this.MediaQueryList());
|
||||
}
|
||||
if (
|
||||
this.lookupNonWSType(0) === types.Ident ||
|
||||
this.lookupNonWSType(0) === types.LeftParenthesis
|
||||
) {
|
||||
children.push(this.MediaQueryList());
|
||||
}
|
||||
|
||||
return children;
|
||||
},
|
||||
block: null
|
||||
}
|
||||
return children;
|
||||
},
|
||||
block: null,
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = importAtrule;
|
||||
|
10
node_modules/css-tree/cjs/syntax/atrule/index.cjs
generated
vendored
10
node_modules/css-tree/cjs/syntax/atrule/index.cjs
generated
vendored
@ -7,11 +7,11 @@ const page = require('./page.cjs');
|
||||
const supports = require('./supports.cjs');
|
||||
|
||||
const atrule = {
|
||||
'font-face': fontFace,
|
||||
'import': _import,
|
||||
media,
|
||||
page,
|
||||
supports
|
||||
'font-face': fontFace,
|
||||
import: _import,
|
||||
media,
|
||||
page,
|
||||
supports,
|
||||
};
|
||||
|
||||
module.exports = atrule;
|
||||
|
18
node_modules/css-tree/cjs/syntax/atrule/media.cjs
generated
vendored
18
node_modules/css-tree/cjs/syntax/atrule/media.cjs
generated
vendored
@ -1,16 +1,14 @@
|
||||
'use strict';
|
||||
|
||||
const media = {
|
||||
parse: {
|
||||
prelude() {
|
||||
return this.createSingleNodeList(
|
||||
this.MediaQueryList()
|
||||
);
|
||||
},
|
||||
block() {
|
||||
return this.Block(false);
|
||||
}
|
||||
}
|
||||
parse: {
|
||||
prelude() {
|
||||
return this.createSingleNodeList(this.MediaQueryList());
|
||||
},
|
||||
block() {
|
||||
return this.Block(false);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = media;
|
||||
|
18
node_modules/css-tree/cjs/syntax/atrule/page.cjs
generated
vendored
18
node_modules/css-tree/cjs/syntax/atrule/page.cjs
generated
vendored
@ -1,16 +1,14 @@
|
||||
'use strict';
|
||||
|
||||
const page = {
|
||||
parse: {
|
||||
prelude() {
|
||||
return this.createSingleNodeList(
|
||||
this.SelectorList()
|
||||
);
|
||||
},
|
||||
block() {
|
||||
return this.Block(true);
|
||||
}
|
||||
}
|
||||
parse: {
|
||||
prelude() {
|
||||
return this.createSingleNodeList(this.SelectorList());
|
||||
},
|
||||
block() {
|
||||
return this.Block(true);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = page;
|
||||
|
93
node_modules/css-tree/cjs/syntax/atrule/supports.cjs
generated
vendored
93
node_modules/css-tree/cjs/syntax/atrule/supports.cjs
generated
vendored
@ -3,75 +3,72 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
function consumeRaw() {
|
||||
return this.createSingleNodeList(
|
||||
this.Raw(this.tokenIndex, null, false)
|
||||
);
|
||||
return this.createSingleNodeList(this.Raw(this.tokenIndex, null, false));
|
||||
}
|
||||
|
||||
function parentheses() {
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
if (this.tokenType === types.Ident &&
|
||||
this.lookupNonWSType(1) === types.Colon) {
|
||||
return this.createSingleNodeList(
|
||||
this.Declaration()
|
||||
);
|
||||
}
|
||||
if (
|
||||
this.tokenType === types.Ident &&
|
||||
this.lookupNonWSType(1) === types.Colon
|
||||
) {
|
||||
return this.createSingleNodeList(this.Declaration());
|
||||
}
|
||||
|
||||
return readSequence.call(this);
|
||||
return readSequence.call(this);
|
||||
}
|
||||
|
||||
function readSequence() {
|
||||
const children = this.createList();
|
||||
let child;
|
||||
const children = this.createList();
|
||||
let child;
|
||||
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
scan:
|
||||
while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.Comment:
|
||||
case types.WhiteSpace:
|
||||
this.next();
|
||||
continue;
|
||||
scan: while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.Comment:
|
||||
case types.WhiteSpace:
|
||||
this.next();
|
||||
continue;
|
||||
|
||||
case types.Function:
|
||||
child = this.Function(consumeRaw, this.scope.AtrulePrelude);
|
||||
break;
|
||||
case types.Function:
|
||||
child = this.Function(consumeRaw, this.scope.AtrulePrelude);
|
||||
break;
|
||||
|
||||
case types.Ident:
|
||||
child = this.Identifier();
|
||||
break;
|
||||
case types.Ident:
|
||||
child = this.Identifier();
|
||||
break;
|
||||
|
||||
case types.LeftParenthesis:
|
||||
child = this.Parentheses(parentheses, this.scope.AtrulePrelude);
|
||||
break;
|
||||
case types.LeftParenthesis:
|
||||
child = this.Parentheses(parentheses, this.scope.AtrulePrelude);
|
||||
break;
|
||||
|
||||
default:
|
||||
break scan;
|
||||
}
|
||||
|
||||
children.push(child);
|
||||
default:
|
||||
break scan;
|
||||
}
|
||||
|
||||
return children;
|
||||
children.push(child);
|
||||
}
|
||||
|
||||
return children;
|
||||
}
|
||||
|
||||
const supports = {
|
||||
parse: {
|
||||
prelude() {
|
||||
const children = readSequence.call(this);
|
||||
parse: {
|
||||
prelude() {
|
||||
const children = readSequence.call(this);
|
||||
|
||||
if (this.getFirstListNode(children) === null) {
|
||||
this.error('Condition is expected');
|
||||
}
|
||||
if (this.getFirstListNode(children) === null) {
|
||||
this.error('Condition is expected');
|
||||
}
|
||||
|
||||
return children;
|
||||
},
|
||||
block() {
|
||||
return this.Block(false);
|
||||
}
|
||||
}
|
||||
return children;
|
||||
},
|
||||
block() {
|
||||
return this.Block(false);
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = supports;
|
||||
|
2
node_modules/css-tree/cjs/syntax/config/generator.cjs
generated
vendored
2
node_modules/css-tree/cjs/syntax/config/generator.cjs
generated
vendored
@ -3,7 +3,7 @@
|
||||
const indexGenerate = require('../node/index-generate.cjs');
|
||||
|
||||
const config = {
|
||||
node: indexGenerate
|
||||
node: indexGenerate,
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
|
6
node_modules/css-tree/cjs/syntax/config/lexer.cjs
generated
vendored
6
node_modules/css-tree/cjs/syntax/config/lexer.cjs
generated
vendored
@ -4,9 +4,9 @@ const data = require('../../data.cjs');
|
||||
const index = require('../node/index.cjs');
|
||||
|
||||
const lexerConfig = {
|
||||
generic: true,
|
||||
...data,
|
||||
node: index
|
||||
generic: true,
|
||||
...data,
|
||||
node: index,
|
||||
};
|
||||
|
||||
module.exports = lexerConfig;
|
||||
|
199
node_modules/css-tree/cjs/syntax/config/mix.cjs
generated
vendored
199
node_modules/css-tree/cjs/syntax/config/mix.cjs
generated
vendored
@ -2,139 +2,136 @@
|
||||
|
||||
const { hasOwnProperty } = Object.prototype;
|
||||
const shape = {
|
||||
generic: true,
|
||||
types: appendOrAssign,
|
||||
atrules: {
|
||||
prelude: appendOrAssignOrNull,
|
||||
descriptors: appendOrAssignOrNull
|
||||
},
|
||||
properties: appendOrAssign,
|
||||
parseContext: assign,
|
||||
scope: deepAssign,
|
||||
atrule: ['parse'],
|
||||
pseudo: ['parse'],
|
||||
node: ['name', 'structure', 'parse', 'generate', 'walkContext']
|
||||
generic: true,
|
||||
types: appendOrAssign,
|
||||
atrules: {
|
||||
prelude: appendOrAssignOrNull,
|
||||
descriptors: appendOrAssignOrNull,
|
||||
},
|
||||
properties: appendOrAssign,
|
||||
parseContext: assign,
|
||||
scope: deepAssign,
|
||||
atrule: ['parse'],
|
||||
pseudo: ['parse'],
|
||||
node: ['name', 'structure', 'parse', 'generate', 'walkContext'],
|
||||
};
|
||||
|
||||
function isObject(value) {
|
||||
return value && value.constructor === Object;
|
||||
return value && value.constructor === Object;
|
||||
}
|
||||
|
||||
function copy(value) {
|
||||
return isObject(value)
|
||||
? { ...value }
|
||||
: value;
|
||||
return isObject(value) ? { ...value } : value;
|
||||
}
|
||||
|
||||
function assign(dest, src) {
|
||||
return Object.assign(dest, src);
|
||||
return Object.assign(dest, src);
|
||||
}
|
||||
|
||||
function deepAssign(dest, src) {
|
||||
for (const key in src) {
|
||||
if (hasOwnProperty.call(src, key)) {
|
||||
if (isObject(dest[key])) {
|
||||
deepAssign(dest[key], src[key]);
|
||||
} else {
|
||||
dest[key] = copy(src[key]);
|
||||
}
|
||||
}
|
||||
for (const key in src) {
|
||||
if (hasOwnProperty.call(src, key)) {
|
||||
if (isObject(dest[key])) {
|
||||
deepAssign(dest[key], src[key]);
|
||||
} else {
|
||||
dest[key] = copy(src[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return dest;
|
||||
return dest;
|
||||
}
|
||||
|
||||
function append(a, b) {
|
||||
if (typeof b === 'string' && /^\s*\|/.test(b)) {
|
||||
return typeof a === 'string'
|
||||
? a + b
|
||||
: b.replace(/^\s*\|\s*/, '');
|
||||
}
|
||||
if (typeof b === 'string' && /^\s*\|/.test(b)) {
|
||||
return typeof a === 'string' ? a + b : b.replace(/^\s*\|\s*/, '');
|
||||
}
|
||||
|
||||
return b || null;
|
||||
return b || null;
|
||||
}
|
||||
|
||||
function appendOrAssign(a, b) {
|
||||
if (typeof b === 'string') {
|
||||
return append(a, b);
|
||||
}
|
||||
if (typeof b === 'string') {
|
||||
return append(a, b);
|
||||
}
|
||||
|
||||
const result = { ...a };
|
||||
for (let key in b) {
|
||||
if (hasOwnProperty.call(b, key)) {
|
||||
result[key] = append(hasOwnProperty.call(a, key) ? a[key] : undefined, b[key]);
|
||||
}
|
||||
const result = { ...a };
|
||||
for (let key in b) {
|
||||
if (hasOwnProperty.call(b, key)) {
|
||||
result[key] = append(
|
||||
hasOwnProperty.call(a, key) ? a[key] : undefined,
|
||||
b[key]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
function appendOrAssignOrNull(a, b) {
|
||||
const result = appendOrAssign(a, b);
|
||||
const result = appendOrAssign(a, b);
|
||||
|
||||
return !isObject(result) || Object.keys(result).length
|
||||
? result
|
||||
: null;
|
||||
return !isObject(result) || Object.keys(result).length ? result : null;
|
||||
}
|
||||
|
||||
function mix(dest, src, shape) {
|
||||
for (const key in shape) {
|
||||
if (hasOwnProperty.call(shape, key) === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (shape[key] === true) {
|
||||
if (hasOwnProperty.call(src, key)) {
|
||||
dest[key] = copy(src[key]);
|
||||
}
|
||||
} else if (shape[key]) {
|
||||
if (typeof shape[key] === 'function') {
|
||||
const fn = shape[key];
|
||||
dest[key] = fn({}, dest[key]);
|
||||
dest[key] = fn(dest[key] || {}, src[key]);
|
||||
} else if (isObject(shape[key])) {
|
||||
const result = {};
|
||||
|
||||
for (let name in dest[key]) {
|
||||
result[name] = mix({}, dest[key][name], shape[key]);
|
||||
}
|
||||
|
||||
for (let name in src[key]) {
|
||||
result[name] = mix(result[name] || {}, src[key][name], shape[key]);
|
||||
}
|
||||
|
||||
dest[key] = result;
|
||||
} else if (Array.isArray(shape[key])) {
|
||||
const res = {};
|
||||
const innerShape = shape[key].reduce(function(s, k) {
|
||||
s[k] = true;
|
||||
return s;
|
||||
}, {});
|
||||
|
||||
for (const [name, value] of Object.entries(dest[key] || {})) {
|
||||
res[name] = {};
|
||||
if (value) {
|
||||
mix(res[name], value, innerShape);
|
||||
}
|
||||
}
|
||||
|
||||
for (const name in src[key]) {
|
||||
if (hasOwnProperty.call(src[key], name)) {
|
||||
if (!res[name]) {
|
||||
res[name] = {};
|
||||
}
|
||||
|
||||
if (src[key] && src[key][name]) {
|
||||
mix(res[name], src[key][name], innerShape);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dest[key] = res;
|
||||
}
|
||||
}
|
||||
for (const key in shape) {
|
||||
if (hasOwnProperty.call(shape, key) === false) {
|
||||
continue;
|
||||
}
|
||||
return dest;
|
||||
|
||||
if (shape[key] === true) {
|
||||
if (hasOwnProperty.call(src, key)) {
|
||||
dest[key] = copy(src[key]);
|
||||
}
|
||||
} else if (shape[key]) {
|
||||
if (typeof shape[key] === 'function') {
|
||||
const fn = shape[key];
|
||||
dest[key] = fn({}, dest[key]);
|
||||
dest[key] = fn(dest[key] || {}, src[key]);
|
||||
} else if (isObject(shape[key])) {
|
||||
const result = {};
|
||||
|
||||
for (let name in dest[key]) {
|
||||
result[name] = mix({}, dest[key][name], shape[key]);
|
||||
}
|
||||
|
||||
for (let name in src[key]) {
|
||||
result[name] = mix(result[name] || {}, src[key][name], shape[key]);
|
||||
}
|
||||
|
||||
dest[key] = result;
|
||||
} else if (Array.isArray(shape[key])) {
|
||||
const res = {};
|
||||
const innerShape = shape[key].reduce(function (s, k) {
|
||||
s[k] = true;
|
||||
return s;
|
||||
}, {});
|
||||
|
||||
for (const [name, value] of Object.entries(dest[key] || {})) {
|
||||
res[name] = {};
|
||||
if (value) {
|
||||
mix(res[name], value, innerShape);
|
||||
}
|
||||
}
|
||||
|
||||
for (const name in src[key]) {
|
||||
if (hasOwnProperty.call(src[key], name)) {
|
||||
if (!res[name]) {
|
||||
res[name] = {};
|
||||
}
|
||||
|
||||
if (src[key] && src[key][name]) {
|
||||
mix(res[name], src[key][name], innerShape);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dest[key] = res;
|
||||
}
|
||||
}
|
||||
}
|
||||
return dest;
|
||||
}
|
||||
|
||||
const mix$1 = (dest, src) => mix(dest, src, shape);
|
||||
|
18
node_modules/css-tree/cjs/syntax/config/parser-selector.cjs
generated
vendored
18
node_modules/css-tree/cjs/syntax/config/parser-selector.cjs
generated
vendored
@ -5,15 +5,15 @@ const indexParseSelector = require('../node/index-parse-selector.cjs');
|
||||
const selector = require('../scope/selector.cjs');
|
||||
|
||||
const config = {
|
||||
parseContext: {
|
||||
default: 'SelectorList',
|
||||
selectorList: 'SelectorList',
|
||||
selector: 'Selector'
|
||||
},
|
||||
scope: { Selector: selector },
|
||||
atrule: {},
|
||||
pseudo: index,
|
||||
node: indexParseSelector
|
||||
parseContext: {
|
||||
default: 'SelectorList',
|
||||
selectorList: 'SelectorList',
|
||||
selector: 'Selector',
|
||||
},
|
||||
scope: { Selector: selector },
|
||||
atrule: {},
|
||||
pseudo: index,
|
||||
node: indexParseSelector,
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
|
44
node_modules/css-tree/cjs/syntax/config/parser.cjs
generated
vendored
44
node_modules/css-tree/cjs/syntax/config/parser.cjs
generated
vendored
@ -6,29 +6,29 @@ const index$2 = require('../pseudo/index.cjs');
|
||||
const indexParse = require('../node/index-parse.cjs');
|
||||
|
||||
const config = {
|
||||
parseContext: {
|
||||
default: 'StyleSheet',
|
||||
stylesheet: 'StyleSheet',
|
||||
atrule: 'Atrule',
|
||||
atrulePrelude(options) {
|
||||
return this.AtrulePrelude(options.atrule ? String(options.atrule) : null);
|
||||
},
|
||||
mediaQueryList: 'MediaQueryList',
|
||||
mediaQuery: 'MediaQuery',
|
||||
rule: 'Rule',
|
||||
selectorList: 'SelectorList',
|
||||
selector: 'Selector',
|
||||
block() {
|
||||
return this.Block(true);
|
||||
},
|
||||
declarationList: 'DeclarationList',
|
||||
declaration: 'Declaration',
|
||||
value: 'Value'
|
||||
parseContext: {
|
||||
default: 'StyleSheet',
|
||||
stylesheet: 'StyleSheet',
|
||||
atrule: 'Atrule',
|
||||
atrulePrelude(options) {
|
||||
return this.AtrulePrelude(options.atrule ? String(options.atrule) : null);
|
||||
},
|
||||
scope: index,
|
||||
atrule: index$1,
|
||||
pseudo: index$2,
|
||||
node: indexParse
|
||||
mediaQueryList: 'MediaQueryList',
|
||||
mediaQuery: 'MediaQuery',
|
||||
rule: 'Rule',
|
||||
selectorList: 'SelectorList',
|
||||
selector: 'Selector',
|
||||
block() {
|
||||
return this.Block(true);
|
||||
},
|
||||
declarationList: 'DeclarationList',
|
||||
declaration: 'Declaration',
|
||||
value: 'Value',
|
||||
},
|
||||
scope: index,
|
||||
atrule: index$1,
|
||||
pseudo: index$2,
|
||||
node: indexParse,
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
|
2
node_modules/css-tree/cjs/syntax/config/walker.cjs
generated
vendored
2
node_modules/css-tree/cjs/syntax/config/walker.cjs
generated
vendored
@ -3,7 +3,7 @@
|
||||
const index = require('../node/index.cjs');
|
||||
|
||||
const config = {
|
||||
node: index
|
||||
node: index,
|
||||
};
|
||||
|
||||
module.exports = config;
|
||||
|
72
node_modules/css-tree/cjs/syntax/create.cjs
generated
vendored
72
node_modules/css-tree/cjs/syntax/create.cjs
generated
vendored
@ -9,48 +9,52 @@ const Lexer = require('../lexer/Lexer.cjs');
|
||||
const mix = require('./config/mix.cjs');
|
||||
|
||||
function createSyntax(config) {
|
||||
const parse = create.createParser(config);
|
||||
const walk = create$1.createWalker(config);
|
||||
const generate = create$2.createGenerator(config);
|
||||
const { fromPlainObject, toPlainObject } = create$3.createConvertor(walk);
|
||||
const parse = create.createParser(config);
|
||||
const walk = create$1.createWalker(config);
|
||||
const generate = create$2.createGenerator(config);
|
||||
const { fromPlainObject, toPlainObject } = create$3.createConvertor(walk);
|
||||
|
||||
const syntax = {
|
||||
lexer: null,
|
||||
createLexer: config => new Lexer.Lexer(config, syntax, syntax.lexer.structure),
|
||||
const syntax = {
|
||||
lexer: null,
|
||||
createLexer: (config) =>
|
||||
new Lexer.Lexer(config, syntax, syntax.lexer.structure),
|
||||
|
||||
tokenize: index.tokenize,
|
||||
parse,
|
||||
generate,
|
||||
tokenize: index.tokenize,
|
||||
parse,
|
||||
generate,
|
||||
|
||||
walk,
|
||||
find: walk.find,
|
||||
findLast: walk.findLast,
|
||||
findAll: walk.findAll,
|
||||
walk,
|
||||
find: walk.find,
|
||||
findLast: walk.findLast,
|
||||
findAll: walk.findAll,
|
||||
|
||||
fromPlainObject,
|
||||
toPlainObject,
|
||||
fromPlainObject,
|
||||
toPlainObject,
|
||||
|
||||
fork(extension) {
|
||||
const base = mix({}, config); // copy of config
|
||||
fork(extension) {
|
||||
const base = mix({}, config); // copy of config
|
||||
|
||||
return createSyntax(
|
||||
typeof extension === 'function'
|
||||
? extension(base, Object.assign)
|
||||
: mix(base, extension)
|
||||
);
|
||||
}
|
||||
};
|
||||
return createSyntax(
|
||||
typeof extension === 'function' ?
|
||||
extension(base, Object.assign)
|
||||
: mix(base, extension)
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
syntax.lexer = new Lexer.Lexer({
|
||||
generic: true,
|
||||
types: config.types,
|
||||
atrules: config.atrules,
|
||||
properties: config.properties,
|
||||
node: config.node
|
||||
}, syntax);
|
||||
syntax.lexer = new Lexer.Lexer(
|
||||
{
|
||||
generic: true,
|
||||
types: config.types,
|
||||
atrules: config.atrules,
|
||||
properties: config.properties,
|
||||
node: config.node,
|
||||
},
|
||||
syntax
|
||||
);
|
||||
|
||||
return syntax;
|
||||
return syntax;
|
||||
}
|
||||
const createSyntax$1 = config => createSyntax(mix({}, config));
|
||||
const createSyntax$1 = (config) => createSyntax(mix({}, config));
|
||||
|
||||
module.exports = createSyntax$1;
|
||||
|
4
node_modules/css-tree/cjs/syntax/function/expression.cjs
generated
vendored
4
node_modules/css-tree/cjs/syntax/function/expression.cjs
generated
vendored
@ -3,9 +3,7 @@
|
||||
// legacy IE function
|
||||
// expression( <any-value> )
|
||||
function expressionFn() {
|
||||
return this.createSingleNodeList(
|
||||
this.Raw(this.tokenIndex, null, false)
|
||||
);
|
||||
return this.createSingleNodeList(this.Raw(this.tokenIndex, null, false));
|
||||
}
|
||||
|
||||
module.exports = expressionFn;
|
||||
|
55
node_modules/css-tree/cjs/syntax/function/var.cjs
generated
vendored
55
node_modules/css-tree/cjs/syntax/function/var.cjs
generated
vendored
@ -4,40 +4,45 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
// var( <ident> , <value>? )
|
||||
function varFn() {
|
||||
const children = this.createList();
|
||||
const children = this.createList();
|
||||
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
// NOTE: Don't check more than a first argument is an ident, rest checks are for lexer
|
||||
children.push(this.Identifier());
|
||||
// NOTE: Don't check more than a first argument is an ident, rest checks are for lexer
|
||||
children.push(this.Identifier());
|
||||
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
if (this.tokenType === types.Comma) {
|
||||
children.push(this.Operator());
|
||||
if (this.tokenType === types.Comma) {
|
||||
children.push(this.Operator());
|
||||
|
||||
const startIndex = this.tokenIndex;
|
||||
const value = this.parseCustomProperty
|
||||
? this.Value(null)
|
||||
: this.Raw(this.tokenIndex, this.consumeUntilExclamationMarkOrSemicolon, false);
|
||||
const startIndex = this.tokenIndex;
|
||||
const value =
|
||||
this.parseCustomProperty ?
|
||||
this.Value(null)
|
||||
: this.Raw(
|
||||
this.tokenIndex,
|
||||
this.consumeUntilExclamationMarkOrSemicolon,
|
||||
false
|
||||
);
|
||||
|
||||
if (value.type === 'Value' && value.children.isEmpty) {
|
||||
for (let offset = startIndex - this.tokenIndex; offset <= 0; offset++) {
|
||||
if (this.lookupType(offset) === types.WhiteSpace) {
|
||||
value.children.appendData({
|
||||
type: 'WhiteSpace',
|
||||
loc: null,
|
||||
value: ' '
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (value.type === 'Value' && value.children.isEmpty) {
|
||||
for (let offset = startIndex - this.tokenIndex; offset <= 0; offset++) {
|
||||
if (this.lookupType(offset) === types.WhiteSpace) {
|
||||
value.children.appendData({
|
||||
type: 'WhiteSpace',
|
||||
loc: null,
|
||||
value: ' ',
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
children.push(value);
|
||||
}
|
||||
}
|
||||
|
||||
return children;
|
||||
children.push(value);
|
||||
}
|
||||
|
||||
return children;
|
||||
}
|
||||
|
||||
module.exports = varFn;
|
||||
|
6
node_modules/css-tree/cjs/syntax/index.cjs
generated
vendored
6
node_modules/css-tree/cjs/syntax/index.cjs
generated
vendored
@ -6,9 +6,9 @@ const parser = require('./config/parser.cjs');
|
||||
const walker = require('./config/walker.cjs');
|
||||
|
||||
const syntax = create({
|
||||
...lexer,
|
||||
...parser,
|
||||
...walker
|
||||
...lexer,
|
||||
...parser,
|
||||
...walker,
|
||||
});
|
||||
|
||||
module.exports = syntax;
|
||||
|
437
node_modules/css-tree/cjs/syntax/node/AnPlusB.cjs
generated
vendored
437
node_modules/css-tree/cjs/syntax/node/AnPlusB.cjs
generated
vendored
@ -3,288 +3,293 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
const charCodeDefinitions = require('../../tokenizer/char-code-definitions.cjs');
|
||||
|
||||
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
|
||||
const N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
|
||||
const PLUSSIGN = 0x002b; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002d; // U+002D HYPHEN-MINUS (-)
|
||||
const N = 0x006e; // U+006E LATIN SMALL LETTER N (n)
|
||||
const DISALLOW_SIGN = true;
|
||||
const ALLOW_SIGN = false;
|
||||
|
||||
function checkInteger(offset, disallowSign) {
|
||||
let pos = this.tokenStart + offset;
|
||||
const code = this.charCodeAt(pos);
|
||||
let pos = this.tokenStart + offset;
|
||||
const code = this.charCodeAt(pos);
|
||||
|
||||
if (code === PLUSSIGN || code === HYPHENMINUS) {
|
||||
if (disallowSign) {
|
||||
this.error('Number sign is not allowed');
|
||||
}
|
||||
pos++;
|
||||
if (code === PLUSSIGN || code === HYPHENMINUS) {
|
||||
if (disallowSign) {
|
||||
this.error('Number sign is not allowed');
|
||||
}
|
||||
pos++;
|
||||
}
|
||||
|
||||
for (; pos < this.tokenEnd; pos++) {
|
||||
if (!charCodeDefinitions.isDigit(this.charCodeAt(pos))) {
|
||||
this.error('Integer is expected', pos);
|
||||
}
|
||||
for (; pos < this.tokenEnd; pos++) {
|
||||
if (!charCodeDefinitions.isDigit(this.charCodeAt(pos))) {
|
||||
this.error('Integer is expected', pos);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function checkTokenIsInteger(disallowSign) {
|
||||
return checkInteger.call(this, 0, disallowSign);
|
||||
return checkInteger.call(this, 0, disallowSign);
|
||||
}
|
||||
|
||||
function expectCharCode(offset, code) {
|
||||
if (!this.cmpChar(this.tokenStart + offset, code)) {
|
||||
let msg = '';
|
||||
if (!this.cmpChar(this.tokenStart + offset, code)) {
|
||||
let msg = '';
|
||||
|
||||
switch (code) {
|
||||
case N:
|
||||
msg = 'N is expected';
|
||||
break;
|
||||
case HYPHENMINUS:
|
||||
msg = 'HyphenMinus is expected';
|
||||
break;
|
||||
}
|
||||
|
||||
this.error(msg, this.tokenStart + offset);
|
||||
switch (code) {
|
||||
case N:
|
||||
msg = 'N is expected';
|
||||
break;
|
||||
case HYPHENMINUS:
|
||||
msg = 'HyphenMinus is expected';
|
||||
break;
|
||||
}
|
||||
|
||||
this.error(msg, this.tokenStart + offset);
|
||||
}
|
||||
}
|
||||
|
||||
// ... <signed-integer>
|
||||
// ... ['+' | '-'] <signless-integer>
|
||||
function consumeB() {
|
||||
let offset = 0;
|
||||
let sign = 0;
|
||||
let type = this.tokenType;
|
||||
let offset = 0;
|
||||
let sign = 0;
|
||||
let type = this.tokenType;
|
||||
|
||||
while (type === types.WhiteSpace || type === types.Comment) {
|
||||
while (type === types.WhiteSpace || type === types.Comment) {
|
||||
type = this.lookupType(++offset);
|
||||
}
|
||||
|
||||
if (type !== types.Number) {
|
||||
if (this.isDelim(PLUSSIGN, offset) || this.isDelim(HYPHENMINUS, offset)) {
|
||||
sign = this.isDelim(PLUSSIGN, offset) ? PLUSSIGN : HYPHENMINUS;
|
||||
|
||||
do {
|
||||
type = this.lookupType(++offset);
|
||||
}
|
||||
} while (type === types.WhiteSpace || type === types.Comment);
|
||||
|
||||
if (type !== types.Number) {
|
||||
if (this.isDelim(PLUSSIGN, offset) ||
|
||||
this.isDelim(HYPHENMINUS, offset)) {
|
||||
sign = this.isDelim(PLUSSIGN, offset) ? PLUSSIGN : HYPHENMINUS;
|
||||
|
||||
do {
|
||||
type = this.lookupType(++offset);
|
||||
} while (type === types.WhiteSpace || type === types.Comment);
|
||||
|
||||
if (type !== types.Number) {
|
||||
this.skip(offset);
|
||||
checkTokenIsInteger.call(this, DISALLOW_SIGN);
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
if (offset > 0) {
|
||||
if (type !== types.Number) {
|
||||
this.skip(offset);
|
||||
checkTokenIsInteger.call(this, DISALLOW_SIGN);
|
||||
}
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
if (sign === 0) {
|
||||
type = this.charCodeAt(this.tokenStart);
|
||||
if (type !== PLUSSIGN && type !== HYPHENMINUS) {
|
||||
this.error('Number sign is expected');
|
||||
}
|
||||
if (offset > 0) {
|
||||
this.skip(offset);
|
||||
}
|
||||
|
||||
if (sign === 0) {
|
||||
type = this.charCodeAt(this.tokenStart);
|
||||
if (type !== PLUSSIGN && type !== HYPHENMINUS) {
|
||||
this.error('Number sign is expected');
|
||||
}
|
||||
}
|
||||
|
||||
checkTokenIsInteger.call(this, sign !== 0);
|
||||
return sign === HYPHENMINUS ? '-' + this.consume(types.Number) : this.consume(types.Number);
|
||||
checkTokenIsInteger.call(this, sign !== 0);
|
||||
return sign === HYPHENMINUS ?
|
||||
'-' + this.consume(types.Number)
|
||||
: this.consume(types.Number);
|
||||
}
|
||||
|
||||
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
|
||||
const name = 'AnPlusB';
|
||||
const structure = {
|
||||
a: [String, null],
|
||||
b: [String, null]
|
||||
a: [String, null],
|
||||
b: [String, null],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
/* eslint-disable brace-style*/
|
||||
const start = this.tokenStart;
|
||||
let a = null;
|
||||
let b = null;
|
||||
/* eslint-disable brace-style*/
|
||||
const start = this.tokenStart;
|
||||
let a = null;
|
||||
let b = null;
|
||||
|
||||
// <integer>
|
||||
if (this.tokenType === types.Number) {
|
||||
checkTokenIsInteger.call(this, ALLOW_SIGN);
|
||||
b = this.consume(types.Number);
|
||||
// <integer>
|
||||
if (this.tokenType === types.Number) {
|
||||
checkTokenIsInteger.call(this, ALLOW_SIGN);
|
||||
b = this.consume(types.Number);
|
||||
}
|
||||
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
// -n- <signless-integer>
|
||||
// <dashndashdigit-ident>
|
||||
else if (
|
||||
this.tokenType === types.Ident &&
|
||||
this.cmpChar(this.tokenStart, HYPHENMINUS)
|
||||
) {
|
||||
a = '-1';
|
||||
|
||||
expectCharCode.call(this, 1, N);
|
||||
|
||||
switch (this.tokenEnd - this.tokenStart) {
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
case 2:
|
||||
this.next();
|
||||
b = consumeB.call(this);
|
||||
break;
|
||||
|
||||
// -n- <signless-integer>
|
||||
case 3:
|
||||
expectCharCode.call(this, 2, HYPHENMINUS);
|
||||
|
||||
this.next();
|
||||
this.skipSC();
|
||||
|
||||
checkTokenIsInteger.call(this, DISALLOW_SIGN);
|
||||
|
||||
b = '-' + this.consume(types.Number);
|
||||
break;
|
||||
|
||||
// <dashndashdigit-ident>
|
||||
default:
|
||||
expectCharCode.call(this, 2, HYPHENMINUS);
|
||||
checkInteger.call(this, 3, DISALLOW_SIGN);
|
||||
this.next();
|
||||
|
||||
b = this.substrToCursor(start + 2);
|
||||
}
|
||||
}
|
||||
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
// '+'? n- <signless-integer>
|
||||
// '+'? <ndashdigit-ident>
|
||||
else if (
|
||||
this.tokenType === types.Ident ||
|
||||
(this.isDelim(PLUSSIGN) && this.lookupType(1) === types.Ident)
|
||||
) {
|
||||
let sign = 0;
|
||||
a = '1';
|
||||
|
||||
// just ignore a plus
|
||||
if (this.isDelim(PLUSSIGN)) {
|
||||
sign = 1;
|
||||
this.next();
|
||||
}
|
||||
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
// -n- <signless-integer>
|
||||
// <dashndashdigit-ident>
|
||||
else if (this.tokenType === types.Ident && this.cmpChar(this.tokenStart, HYPHENMINUS)) {
|
||||
a = '-1';
|
||||
expectCharCode.call(this, 0, N);
|
||||
|
||||
expectCharCode.call(this, 1, N);
|
||||
switch (this.tokenEnd - this.tokenStart) {
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
case 1:
|
||||
this.next();
|
||||
b = consumeB.call(this);
|
||||
break;
|
||||
|
||||
switch (this.tokenEnd - this.tokenStart) {
|
||||
// -n
|
||||
// -n <signed-integer>
|
||||
// -n ['+' | '-'] <signless-integer>
|
||||
case 2:
|
||||
this.next();
|
||||
b = consumeB.call(this);
|
||||
break;
|
||||
// '+'? n- <signless-integer>
|
||||
case 2:
|
||||
expectCharCode.call(this, 1, HYPHENMINUS);
|
||||
|
||||
// -n- <signless-integer>
|
||||
case 3:
|
||||
expectCharCode.call(this, 2, HYPHENMINUS);
|
||||
this.next();
|
||||
this.skipSC();
|
||||
|
||||
this.next();
|
||||
this.skipSC();
|
||||
checkTokenIsInteger.call(this, DISALLOW_SIGN);
|
||||
|
||||
checkTokenIsInteger.call(this, DISALLOW_SIGN);
|
||||
b = '-' + this.consume(types.Number);
|
||||
break;
|
||||
|
||||
b = '-' + this.consume(types.Number);
|
||||
break;
|
||||
// '+'? <ndashdigit-ident>
|
||||
default:
|
||||
expectCharCode.call(this, 1, HYPHENMINUS);
|
||||
checkInteger.call(this, 2, DISALLOW_SIGN);
|
||||
this.next();
|
||||
|
||||
// <dashndashdigit-ident>
|
||||
default:
|
||||
expectCharCode.call(this, 2, HYPHENMINUS);
|
||||
checkInteger.call(this, 3, DISALLOW_SIGN);
|
||||
this.next();
|
||||
b = this.substrToCursor(start + sign + 1);
|
||||
}
|
||||
}
|
||||
|
||||
b = this.substrToCursor(start + 2);
|
||||
}
|
||||
// <ndashdigit-dimension>
|
||||
// <ndash-dimension> <signless-integer>
|
||||
// <n-dimension>
|
||||
// <n-dimension> <signed-integer>
|
||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||
else if (this.tokenType === types.Dimension) {
|
||||
const code = this.charCodeAt(this.tokenStart);
|
||||
const sign = code === PLUSSIGN || code === HYPHENMINUS;
|
||||
let i = this.tokenStart + sign;
|
||||
|
||||
for (; i < this.tokenEnd; i++) {
|
||||
if (!charCodeDefinitions.isDigit(this.charCodeAt(i))) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
// '+'? n- <signless-integer>
|
||||
// '+'? <ndashdigit-ident>
|
||||
else if (this.tokenType === types.Ident || (this.isDelim(PLUSSIGN) && this.lookupType(1) === types.Ident)) {
|
||||
let sign = 0;
|
||||
a = '1';
|
||||
|
||||
// just ignore a plus
|
||||
if (this.isDelim(PLUSSIGN)) {
|
||||
sign = 1;
|
||||
this.next();
|
||||
}
|
||||
|
||||
expectCharCode.call(this, 0, N);
|
||||
|
||||
switch (this.tokenEnd - this.tokenStart) {
|
||||
// '+'? n
|
||||
// '+'? n <signed-integer>
|
||||
// '+'? n ['+' | '-'] <signless-integer>
|
||||
case 1:
|
||||
this.next();
|
||||
b = consumeB.call(this);
|
||||
break;
|
||||
|
||||
// '+'? n- <signless-integer>
|
||||
case 2:
|
||||
expectCharCode.call(this, 1, HYPHENMINUS);
|
||||
|
||||
this.next();
|
||||
this.skipSC();
|
||||
|
||||
checkTokenIsInteger.call(this, DISALLOW_SIGN);
|
||||
|
||||
b = '-' + this.consume(types.Number);
|
||||
break;
|
||||
|
||||
// '+'? <ndashdigit-ident>
|
||||
default:
|
||||
expectCharCode.call(this, 1, HYPHENMINUS);
|
||||
checkInteger.call(this, 2, DISALLOW_SIGN);
|
||||
this.next();
|
||||
|
||||
b = this.substrToCursor(start + sign + 1);
|
||||
}
|
||||
if (i === this.tokenStart + sign) {
|
||||
this.error('Integer is expected', this.tokenStart + sign);
|
||||
}
|
||||
|
||||
// <ndashdigit-dimension>
|
||||
// <ndash-dimension> <signless-integer>
|
||||
expectCharCode.call(this, i - this.tokenStart, N);
|
||||
a = this.substring(start, i);
|
||||
|
||||
// <n-dimension>
|
||||
// <n-dimension> <signed-integer>
|
||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||
else if (this.tokenType === types.Dimension) {
|
||||
const code = this.charCodeAt(this.tokenStart);
|
||||
const sign = code === PLUSSIGN || code === HYPHENMINUS;
|
||||
let i = this.tokenStart + sign;
|
||||
|
||||
for (; i < this.tokenEnd; i++) {
|
||||
if (!charCodeDefinitions.isDigit(this.charCodeAt(i))) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (i === this.tokenStart + sign) {
|
||||
this.error('Integer is expected', this.tokenStart + sign);
|
||||
}
|
||||
|
||||
expectCharCode.call(this, i - this.tokenStart, N);
|
||||
a = this.substring(start, i);
|
||||
|
||||
// <n-dimension>
|
||||
// <n-dimension> <signed-integer>
|
||||
// <n-dimension> ['+' | '-'] <signless-integer>
|
||||
if (i + 1 === this.tokenEnd) {
|
||||
this.next();
|
||||
b = consumeB.call(this);
|
||||
} else {
|
||||
expectCharCode.call(this, i - this.tokenStart + 1, HYPHENMINUS);
|
||||
|
||||
// <ndash-dimension> <signless-integer>
|
||||
if (i + 2 === this.tokenEnd) {
|
||||
this.next();
|
||||
this.skipSC();
|
||||
checkTokenIsInteger.call(this, DISALLOW_SIGN);
|
||||
b = '-' + this.consume(types.Number);
|
||||
}
|
||||
// <ndashdigit-dimension>
|
||||
else {
|
||||
checkInteger.call(this, i - this.tokenStart + 2, DISALLOW_SIGN);
|
||||
this.next();
|
||||
b = this.substrToCursor(i + 1);
|
||||
}
|
||||
}
|
||||
if (i + 1 === this.tokenEnd) {
|
||||
this.next();
|
||||
b = consumeB.call(this);
|
||||
} else {
|
||||
this.error();
|
||||
}
|
||||
expectCharCode.call(this, i - this.tokenStart + 1, HYPHENMINUS);
|
||||
|
||||
if (a !== null && a.charCodeAt(0) === PLUSSIGN) {
|
||||
a = a.substr(1);
|
||||
// <ndash-dimension> <signless-integer>
|
||||
if (i + 2 === this.tokenEnd) {
|
||||
this.next();
|
||||
this.skipSC();
|
||||
checkTokenIsInteger.call(this, DISALLOW_SIGN);
|
||||
b = '-' + this.consume(types.Number);
|
||||
}
|
||||
// <ndashdigit-dimension>
|
||||
else {
|
||||
checkInteger.call(this, i - this.tokenStart + 2, DISALLOW_SIGN);
|
||||
this.next();
|
||||
b = this.substrToCursor(i + 1);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
this.error();
|
||||
}
|
||||
|
||||
if (b !== null && b.charCodeAt(0) === PLUSSIGN) {
|
||||
b = b.substr(1);
|
||||
}
|
||||
if (a !== null && a.charCodeAt(0) === PLUSSIGN) {
|
||||
a = a.substr(1);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'AnPlusB',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
a,
|
||||
b
|
||||
};
|
||||
if (b !== null && b.charCodeAt(0) === PLUSSIGN) {
|
||||
b = b.substr(1);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'AnPlusB',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
a,
|
||||
b,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
if (node.a) {
|
||||
const a =
|
||||
node.a === '+1' && 'n' ||
|
||||
node.a === '1' && 'n' ||
|
||||
node.a === '-1' && '-n' ||
|
||||
node.a + 'n';
|
||||
if (node.a) {
|
||||
const a =
|
||||
(node.a === '+1' && 'n') ||
|
||||
(node.a === '1' && 'n') ||
|
||||
(node.a === '-1' && '-n') ||
|
||||
node.a + 'n';
|
||||
|
||||
if (node.b) {
|
||||
const b = node.b[0] === '-' || node.b[0] === '+'
|
||||
? node.b
|
||||
: '+' + node.b;
|
||||
this.tokenize(a + b);
|
||||
} else {
|
||||
this.tokenize(a);
|
||||
}
|
||||
if (node.b) {
|
||||
const b = node.b[0] === '-' || node.b[0] === '+' ? node.b : '+' + node.b;
|
||||
this.tokenize(a + b);
|
||||
} else {
|
||||
this.tokenize(node.b);
|
||||
this.tokenize(a);
|
||||
}
|
||||
} else {
|
||||
this.tokenize(node.b);
|
||||
}
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
141
node_modules/css-tree/cjs/syntax/node/Atrule.cjs
generated
vendored
141
node_modules/css-tree/cjs/syntax/node/Atrule.cjs
generated
vendored
@ -3,97 +3,106 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, this.consumeUntilLeftCurlyBracketOrSemicolon, true);
|
||||
return this.Raw(
|
||||
startToken,
|
||||
this.consumeUntilLeftCurlyBracketOrSemicolon,
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
function isDeclarationBlockAtrule() {
|
||||
for (let offset = 1, type; type = this.lookupType(offset); offset++) {
|
||||
if (type === types.RightCurlyBracket) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (type === types.LeftCurlyBracket ||
|
||||
type === types.AtKeyword) {
|
||||
return false;
|
||||
}
|
||||
for (let offset = 1, type; (type = this.lookupType(offset)); offset++) {
|
||||
if (type === types.RightCurlyBracket) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
if (type === types.LeftCurlyBracket || type === types.AtKeyword) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
const name = 'Atrule';
|
||||
const walkContext = 'atrule';
|
||||
const structure = {
|
||||
name: String,
|
||||
prelude: ['AtrulePrelude', 'Raw', null],
|
||||
block: ['Block', null]
|
||||
name: String,
|
||||
prelude: ['AtrulePrelude', 'Raw', null],
|
||||
block: ['Block', null],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
let name;
|
||||
let nameLowerCase;
|
||||
let prelude = null;
|
||||
let block = null;
|
||||
const start = this.tokenStart;
|
||||
let name;
|
||||
let nameLowerCase;
|
||||
let prelude = null;
|
||||
let block = null;
|
||||
|
||||
this.eat(types.AtKeyword);
|
||||
this.eat(types.AtKeyword);
|
||||
|
||||
name = this.substrToCursor(start + 1);
|
||||
nameLowerCase = name.toLowerCase();
|
||||
this.skipSC();
|
||||
|
||||
// parse prelude
|
||||
if (
|
||||
this.eof === false &&
|
||||
this.tokenType !== types.LeftCurlyBracket &&
|
||||
this.tokenType !== types.Semicolon
|
||||
) {
|
||||
if (this.parseAtrulePrelude) {
|
||||
prelude = this.parseWithFallback(
|
||||
this.AtrulePrelude.bind(this, name),
|
||||
consumeRaw
|
||||
);
|
||||
} else {
|
||||
prelude = consumeRaw.call(this, this.tokenIndex);
|
||||
}
|
||||
|
||||
name = this.substrToCursor(start + 1);
|
||||
nameLowerCase = name.toLowerCase();
|
||||
this.skipSC();
|
||||
}
|
||||
|
||||
// parse prelude
|
||||
if (this.eof === false &&
|
||||
this.tokenType !== types.LeftCurlyBracket &&
|
||||
this.tokenType !== types.Semicolon) {
|
||||
if (this.parseAtrulePrelude) {
|
||||
prelude = this.parseWithFallback(this.AtrulePrelude.bind(this, name), consumeRaw);
|
||||
} else {
|
||||
prelude = consumeRaw.call(this, this.tokenIndex);
|
||||
}
|
||||
switch (this.tokenType) {
|
||||
case types.Semicolon:
|
||||
this.next();
|
||||
break;
|
||||
|
||||
this.skipSC();
|
||||
}
|
||||
case types.LeftCurlyBracket:
|
||||
if (
|
||||
hasOwnProperty.call(this.atrule, nameLowerCase) &&
|
||||
typeof this.atrule[nameLowerCase].block === 'function'
|
||||
) {
|
||||
block = this.atrule[nameLowerCase].block.call(this);
|
||||
} else {
|
||||
// TODO: should consume block content as Raw?
|
||||
block = this.Block(isDeclarationBlockAtrule.call(this));
|
||||
}
|
||||
|
||||
switch (this.tokenType) {
|
||||
case types.Semicolon:
|
||||
this.next();
|
||||
break;
|
||||
break;
|
||||
}
|
||||
|
||||
case types.LeftCurlyBracket:
|
||||
if (hasOwnProperty.call(this.atrule, nameLowerCase) &&
|
||||
typeof this.atrule[nameLowerCase].block === 'function') {
|
||||
block = this.atrule[nameLowerCase].block.call(this);
|
||||
} else {
|
||||
// TODO: should consume block content as Raw?
|
||||
block = this.Block(isDeclarationBlockAtrule.call(this));
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Atrule',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
prelude,
|
||||
block
|
||||
};
|
||||
return {
|
||||
type: 'Atrule',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
prelude,
|
||||
block,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.AtKeyword, '@' + node.name);
|
||||
this.token(types.AtKeyword, '@' + node.name);
|
||||
|
||||
if (node.prelude !== null) {
|
||||
this.node(node.prelude);
|
||||
}
|
||||
if (node.prelude !== null) {
|
||||
this.node(node.prelude);
|
||||
}
|
||||
|
||||
if (node.block) {
|
||||
this.node(node.block);
|
||||
} else {
|
||||
this.token(types.Semicolon, ';');
|
||||
}
|
||||
if (node.block) {
|
||||
this.node(node.block);
|
||||
} else {
|
||||
this.token(types.Semicolon, ';');
|
||||
}
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
56
node_modules/css-tree/cjs/syntax/node/AtrulePrelude.cjs
generated
vendored
56
node_modules/css-tree/cjs/syntax/node/AtrulePrelude.cjs
generated
vendored
@ -5,44 +5,48 @@ const types = require('../../tokenizer/types.cjs');
|
||||
const name = 'AtrulePrelude';
|
||||
const walkContext = 'atrulePrelude';
|
||||
const structure = {
|
||||
children: [[]]
|
||||
children: [[]],
|
||||
};
|
||||
|
||||
function parse(name) {
|
||||
let children = null;
|
||||
let children = null;
|
||||
|
||||
if (name !== null) {
|
||||
name = name.toLowerCase();
|
||||
}
|
||||
if (name !== null) {
|
||||
name = name.toLowerCase();
|
||||
}
|
||||
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
if (hasOwnProperty.call(this.atrule, name) &&
|
||||
typeof this.atrule[name].prelude === 'function') {
|
||||
// custom consumer
|
||||
children = this.atrule[name].prelude.call(this);
|
||||
} else {
|
||||
// default consumer
|
||||
children = this.readSequence(this.scope.AtrulePrelude);
|
||||
}
|
||||
if (
|
||||
hasOwnProperty.call(this.atrule, name) &&
|
||||
typeof this.atrule[name].prelude === 'function'
|
||||
) {
|
||||
// custom consumer
|
||||
children = this.atrule[name].prelude.call(this);
|
||||
} else {
|
||||
// default consumer
|
||||
children = this.readSequence(this.scope.AtrulePrelude);
|
||||
}
|
||||
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
if (this.eof !== true &&
|
||||
this.tokenType !== types.LeftCurlyBracket &&
|
||||
this.tokenType !== types.Semicolon) {
|
||||
this.error('Semicolon or block is expected');
|
||||
}
|
||||
if (
|
||||
this.eof !== true &&
|
||||
this.tokenType !== types.LeftCurlyBracket &&
|
||||
this.tokenType !== types.Semicolon
|
||||
) {
|
||||
this.error('Semicolon or block is expected');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'AtrulePrelude',
|
||||
loc: this.getLocationFromList(children),
|
||||
children
|
||||
};
|
||||
return {
|
||||
type: 'AtrulePrelude',
|
||||
loc: this.getLocationFromList(children),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.children(node);
|
||||
this.children(node);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
196
node_modules/css-tree/cjs/syntax/node/AttributeSelector.cjs
generated
vendored
196
node_modules/css-tree/cjs/syntax/node/AttributeSelector.cjs
generated
vendored
@ -2,144 +2,144 @@
|
||||
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
|
||||
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
|
||||
const EQUALSSIGN = 0x003D; // U+003D EQUALS SIGN (=)
|
||||
const CIRCUMFLEXACCENT = 0x005E; // U+005E (^)
|
||||
const VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
|
||||
const TILDE = 0x007E; // U+007E TILDE (~)
|
||||
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
|
||||
const ASTERISK = 0x002a; // U+002A ASTERISK (*)
|
||||
const EQUALSSIGN = 0x003d; // U+003D EQUALS SIGN (=)
|
||||
const CIRCUMFLEXACCENT = 0x005e; // U+005E (^)
|
||||
const VERTICALLINE = 0x007c; // U+007C VERTICAL LINE (|)
|
||||
const TILDE = 0x007e; // U+007E TILDE (~)
|
||||
|
||||
function getAttributeName() {
|
||||
if (this.eof) {
|
||||
this.error('Unexpected end of input');
|
||||
}
|
||||
if (this.eof) {
|
||||
this.error('Unexpected end of input');
|
||||
}
|
||||
|
||||
const start = this.tokenStart;
|
||||
let expectIdent = false;
|
||||
const start = this.tokenStart;
|
||||
let expectIdent = false;
|
||||
|
||||
if (this.isDelim(ASTERISK)) {
|
||||
expectIdent = true;
|
||||
this.next();
|
||||
} else if (!this.isDelim(VERTICALLINE)) {
|
||||
this.eat(types.Ident);
|
||||
}
|
||||
if (this.isDelim(ASTERISK)) {
|
||||
expectIdent = true;
|
||||
this.next();
|
||||
} else if (!this.isDelim(VERTICALLINE)) {
|
||||
this.eat(types.Ident);
|
||||
}
|
||||
|
||||
if (this.isDelim(VERTICALLINE)) {
|
||||
if (this.charCodeAt(this.tokenStart + 1) !== EQUALSSIGN) {
|
||||
this.next();
|
||||
this.eat(types.Ident);
|
||||
} else if (expectIdent) {
|
||||
this.error('Identifier is expected', this.tokenEnd);
|
||||
}
|
||||
if (this.isDelim(VERTICALLINE)) {
|
||||
if (this.charCodeAt(this.tokenStart + 1) !== EQUALSSIGN) {
|
||||
this.next();
|
||||
this.eat(types.Ident);
|
||||
} else if (expectIdent) {
|
||||
this.error('Vertical line is expected');
|
||||
this.error('Identifier is expected', this.tokenEnd);
|
||||
}
|
||||
} else if (expectIdent) {
|
||||
this.error('Vertical line is expected');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Identifier',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name: this.substrToCursor(start)
|
||||
};
|
||||
return {
|
||||
type: 'Identifier',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name: this.substrToCursor(start),
|
||||
};
|
||||
}
|
||||
|
||||
function getOperator() {
|
||||
const start = this.tokenStart;
|
||||
const code = this.charCodeAt(start);
|
||||
const start = this.tokenStart;
|
||||
const code = this.charCodeAt(start);
|
||||
|
||||
if (code !== EQUALSSIGN && // =
|
||||
code !== TILDE && // ~=
|
||||
code !== CIRCUMFLEXACCENT && // ^=
|
||||
code !== DOLLARSIGN && // $=
|
||||
code !== ASTERISK && // *=
|
||||
code !== VERTICALLINE // |=
|
||||
) {
|
||||
this.error('Attribute selector (=, ~=, ^=, $=, *=, |=) is expected');
|
||||
if (
|
||||
code !== EQUALSSIGN && // =
|
||||
code !== TILDE && // ~=
|
||||
code !== CIRCUMFLEXACCENT && // ^=
|
||||
code !== DOLLARSIGN && // $=
|
||||
code !== ASTERISK && // *=
|
||||
code !== VERTICALLINE // |=
|
||||
) {
|
||||
this.error('Attribute selector (=, ~=, ^=, $=, *=, |=) is expected');
|
||||
}
|
||||
|
||||
this.next();
|
||||
|
||||
if (code !== EQUALSSIGN) {
|
||||
if (!this.isDelim(EQUALSSIGN)) {
|
||||
this.error('Equal sign is expected');
|
||||
}
|
||||
|
||||
this.next();
|
||||
}
|
||||
|
||||
if (code !== EQUALSSIGN) {
|
||||
if (!this.isDelim(EQUALSSIGN)) {
|
||||
this.error('Equal sign is expected');
|
||||
}
|
||||
|
||||
this.next();
|
||||
}
|
||||
|
||||
return this.substrToCursor(start);
|
||||
return this.substrToCursor(start);
|
||||
}
|
||||
|
||||
// '[' <wq-name> ']'
|
||||
// '[' <wq-name> <attr-matcher> [ <string-token> | <ident-token> ] <attr-modifier>? ']'
|
||||
const name = 'AttributeSelector';
|
||||
const structure = {
|
||||
name: 'Identifier',
|
||||
matcher: [String, null],
|
||||
value: ['String', 'Identifier', null],
|
||||
flags: [String, null]
|
||||
name: 'Identifier',
|
||||
matcher: [String, null],
|
||||
value: ['String', 'Identifier', null],
|
||||
flags: [String, null],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
let name;
|
||||
let matcher = null;
|
||||
let value = null;
|
||||
let flags = null;
|
||||
const start = this.tokenStart;
|
||||
let name;
|
||||
let matcher = null;
|
||||
let value = null;
|
||||
let flags = null;
|
||||
|
||||
this.eat(types.LeftSquareBracket);
|
||||
this.skipSC();
|
||||
this.eat(types.LeftSquareBracket);
|
||||
this.skipSC();
|
||||
|
||||
name = getAttributeName.call(this);
|
||||
this.skipSC();
|
||||
name = getAttributeName.call(this);
|
||||
this.skipSC();
|
||||
|
||||
if (this.tokenType !== types.RightSquareBracket) {
|
||||
// avoid case `[name i]`
|
||||
if (this.tokenType !== types.Ident) {
|
||||
matcher = getOperator.call(this);
|
||||
if (this.tokenType !== types.RightSquareBracket) {
|
||||
// avoid case `[name i]`
|
||||
if (this.tokenType !== types.Ident) {
|
||||
matcher = getOperator.call(this);
|
||||
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
value = this.tokenType === types.String
|
||||
? this.String()
|
||||
: this.Identifier();
|
||||
value =
|
||||
this.tokenType === types.String ? this.String() : this.Identifier();
|
||||
|
||||
this.skipSC();
|
||||
}
|
||||
|
||||
// attribute flags
|
||||
if (this.tokenType === types.Ident) {
|
||||
flags = this.consume(types.Ident);
|
||||
|
||||
this.skipSC();
|
||||
}
|
||||
this.skipSC();
|
||||
}
|
||||
|
||||
this.eat(types.RightSquareBracket);
|
||||
// attribute flags
|
||||
if (this.tokenType === types.Ident) {
|
||||
flags = this.consume(types.Ident);
|
||||
|
||||
return {
|
||||
type: 'AttributeSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
matcher,
|
||||
value,
|
||||
flags
|
||||
};
|
||||
this.skipSC();
|
||||
}
|
||||
}
|
||||
|
||||
this.eat(types.RightSquareBracket);
|
||||
|
||||
return {
|
||||
type: 'AttributeSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
matcher,
|
||||
value,
|
||||
flags,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Delim, '[');
|
||||
this.node(node.name);
|
||||
this.token(types.Delim, '[');
|
||||
this.node(node.name);
|
||||
|
||||
if (node.matcher !== null) {
|
||||
this.tokenize(node.matcher);
|
||||
this.node(node.value);
|
||||
}
|
||||
if (node.matcher !== null) {
|
||||
this.tokenize(node.matcher);
|
||||
this.node(node.value);
|
||||
}
|
||||
|
||||
if (node.flags !== null) {
|
||||
this.token(types.Ident, node.flags);
|
||||
}
|
||||
if (node.flags !== null) {
|
||||
this.token(types.Ident, node.flags);
|
||||
}
|
||||
|
||||
this.token(types.Delim, ']');
|
||||
this.token(types.Delim, ']');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
95
node_modules/css-tree/cjs/syntax/node/Block.cjs
generated
vendored
95
node_modules/css-tree/cjs/syntax/node/Block.cjs
generated
vendored
@ -3,84 +3,79 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, null, true);
|
||||
return this.Raw(startToken, null, true);
|
||||
}
|
||||
function consumeRule() {
|
||||
return this.parseWithFallback(this.Rule, consumeRaw);
|
||||
return this.parseWithFallback(this.Rule, consumeRaw);
|
||||
}
|
||||
function consumeRawDeclaration(startToken) {
|
||||
return this.Raw(startToken, this.consumeUntilSemicolonIncluded, true);
|
||||
return this.Raw(startToken, this.consumeUntilSemicolonIncluded, true);
|
||||
}
|
||||
function consumeDeclaration() {
|
||||
if (this.tokenType === types.Semicolon) {
|
||||
return consumeRawDeclaration.call(this, this.tokenIndex);
|
||||
}
|
||||
if (this.tokenType === types.Semicolon) {
|
||||
return consumeRawDeclaration.call(this, this.tokenIndex);
|
||||
}
|
||||
|
||||
const node = this.parseWithFallback(this.Declaration, consumeRawDeclaration);
|
||||
const node = this.parseWithFallback(this.Declaration, consumeRawDeclaration);
|
||||
|
||||
if (this.tokenType === types.Semicolon) {
|
||||
this.next();
|
||||
}
|
||||
if (this.tokenType === types.Semicolon) {
|
||||
this.next();
|
||||
}
|
||||
|
||||
return node;
|
||||
return node;
|
||||
}
|
||||
|
||||
const name = 'Block';
|
||||
const walkContext = 'block';
|
||||
const structure = {
|
||||
children: [[
|
||||
'Atrule',
|
||||
'Rule',
|
||||
'Declaration'
|
||||
]]
|
||||
children: [['Atrule', 'Rule', 'Declaration']],
|
||||
};
|
||||
|
||||
function parse(isDeclaration) {
|
||||
const consumer = isDeclaration ? consumeDeclaration : consumeRule;
|
||||
const start = this.tokenStart;
|
||||
let children = this.createList();
|
||||
const consumer = isDeclaration ? consumeDeclaration : consumeRule;
|
||||
const start = this.tokenStart;
|
||||
let children = this.createList();
|
||||
|
||||
this.eat(types.LeftCurlyBracket);
|
||||
this.eat(types.LeftCurlyBracket);
|
||||
|
||||
scan:
|
||||
while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.RightCurlyBracket:
|
||||
break scan;
|
||||
scan: while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.RightCurlyBracket:
|
||||
break scan;
|
||||
|
||||
case types.WhiteSpace:
|
||||
case types.Comment:
|
||||
this.next();
|
||||
break;
|
||||
case types.WhiteSpace:
|
||||
case types.Comment:
|
||||
this.next();
|
||||
break;
|
||||
|
||||
case types.AtKeyword:
|
||||
children.push(this.parseWithFallback(this.Atrule, consumeRaw));
|
||||
break;
|
||||
case types.AtKeyword:
|
||||
children.push(this.parseWithFallback(this.Atrule, consumeRaw));
|
||||
break;
|
||||
|
||||
default:
|
||||
children.push(consumer.call(this));
|
||||
}
|
||||
default:
|
||||
children.push(consumer.call(this));
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightCurlyBracket);
|
||||
}
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightCurlyBracket);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Block',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children
|
||||
};
|
||||
return {
|
||||
type: 'Block',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.LeftCurlyBracket, '{');
|
||||
this.children(node, prev => {
|
||||
if (prev.type === 'Declaration') {
|
||||
this.token(types.Semicolon, ';');
|
||||
}
|
||||
});
|
||||
this.token(types.RightCurlyBracket, '}');
|
||||
this.token(types.LeftCurlyBracket, '{');
|
||||
this.children(node, (prev) => {
|
||||
if (prev.type === 'Declaration') {
|
||||
this.token(types.Semicolon, ';');
|
||||
}
|
||||
});
|
||||
this.token(types.RightCurlyBracket, '}');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
32
node_modules/css-tree/cjs/syntax/node/Brackets.cjs
generated
vendored
32
node_modules/css-tree/cjs/syntax/node/Brackets.cjs
generated
vendored
@ -4,32 +4,32 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'Brackets';
|
||||
const structure = {
|
||||
children: [[]]
|
||||
children: [[]],
|
||||
};
|
||||
|
||||
function parse(readSequence, recognizer) {
|
||||
const start = this.tokenStart;
|
||||
let children = null;
|
||||
const start = this.tokenStart;
|
||||
let children = null;
|
||||
|
||||
this.eat(types.LeftSquareBracket);
|
||||
this.eat(types.LeftSquareBracket);
|
||||
|
||||
children = readSequence.call(this, recognizer);
|
||||
children = readSequence.call(this, recognizer);
|
||||
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightSquareBracket);
|
||||
}
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightSquareBracket);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Brackets',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children
|
||||
};
|
||||
return {
|
||||
type: 'Brackets',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Delim, '[');
|
||||
this.children(node);
|
||||
this.token(types.Delim, ']');
|
||||
this.token(types.Delim, '[');
|
||||
this.children(node);
|
||||
this.token(types.Delim, ']');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
14
node_modules/css-tree/cjs/syntax/node/CDC.cjs
generated
vendored
14
node_modules/css-tree/cjs/syntax/node/CDC.cjs
generated
vendored
@ -6,18 +6,18 @@ const name = 'CDC';
|
||||
const structure = [];
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const start = this.tokenStart;
|
||||
|
||||
this.eat(types.CDC); // -->
|
||||
this.eat(types.CDC); // -->
|
||||
|
||||
return {
|
||||
type: 'CDC',
|
||||
loc: this.getLocation(start, this.tokenStart)
|
||||
};
|
||||
return {
|
||||
type: 'CDC',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
};
|
||||
}
|
||||
|
||||
function generate() {
|
||||
this.token(types.CDC, '-->');
|
||||
this.token(types.CDC, '-->');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
14
node_modules/css-tree/cjs/syntax/node/CDO.cjs
generated
vendored
14
node_modules/css-tree/cjs/syntax/node/CDO.cjs
generated
vendored
@ -6,18 +6,18 @@ const name = 'CDO';
|
||||
const structure = [];
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const start = this.tokenStart;
|
||||
|
||||
this.eat(types.CDO); // <!--
|
||||
this.eat(types.CDO); // <!--
|
||||
|
||||
return {
|
||||
type: 'CDO',
|
||||
loc: this.getLocation(start, this.tokenStart)
|
||||
};
|
||||
return {
|
||||
type: 'CDO',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
};
|
||||
}
|
||||
|
||||
function generate() {
|
||||
this.token(types.CDO, '<!--');
|
||||
this.token(types.CDO, '<!--');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
20
node_modules/css-tree/cjs/syntax/node/ClassSelector.cjs
generated
vendored
20
node_modules/css-tree/cjs/syntax/node/ClassSelector.cjs
generated
vendored
@ -2,27 +2,27 @@
|
||||
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
|
||||
const FULLSTOP = 0x002e; // U+002E FULL STOP (.)
|
||||
|
||||
// '.' ident
|
||||
const name = 'ClassSelector';
|
||||
const structure = {
|
||||
name: String
|
||||
name: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
this.eatDelim(FULLSTOP);
|
||||
this.eatDelim(FULLSTOP);
|
||||
|
||||
return {
|
||||
type: 'ClassSelector',
|
||||
loc: this.getLocation(this.tokenStart - 1, this.tokenEnd),
|
||||
name: this.consume(types.Ident)
|
||||
};
|
||||
return {
|
||||
type: 'ClassSelector',
|
||||
loc: this.getLocation(this.tokenStart - 1, this.tokenEnd),
|
||||
name: this.consume(types.Ident),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Delim, '.');
|
||||
this.token(types.Ident, node.name);
|
||||
this.token(types.Delim, '.');
|
||||
this.token(types.Ident, node.name);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
70
node_modules/css-tree/cjs/syntax/node/Combinator.cjs
generated
vendored
70
node_modules/css-tree/cjs/syntax/node/Combinator.cjs
generated
vendored
@ -2,57 +2,57 @@
|
||||
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
|
||||
const GREATERTHANSIGN = 0x003E; // U+003E GREATER-THAN SIGN (>)
|
||||
const TILDE = 0x007E; // U+007E TILDE (~)
|
||||
const PLUSSIGN = 0x002b; // U+002B PLUS SIGN (+)
|
||||
const SOLIDUS = 0x002f; // U+002F SOLIDUS (/)
|
||||
const GREATERTHANSIGN = 0x003e; // U+003E GREATER-THAN SIGN (>)
|
||||
const TILDE = 0x007e; // U+007E TILDE (~)
|
||||
|
||||
const name = 'Combinator';
|
||||
const structure = {
|
||||
name: String
|
||||
name: String,
|
||||
};
|
||||
|
||||
// + | > | ~ | /deep/
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
let name;
|
||||
const start = this.tokenStart;
|
||||
let name;
|
||||
|
||||
switch (this.tokenType) {
|
||||
case types.WhiteSpace:
|
||||
name = ' ';
|
||||
break;
|
||||
switch (this.tokenType) {
|
||||
case types.WhiteSpace:
|
||||
name = ' ';
|
||||
break;
|
||||
|
||||
case types.Delim:
|
||||
switch (this.charCodeAt(this.tokenStart)) {
|
||||
case GREATERTHANSIGN:
|
||||
case PLUSSIGN:
|
||||
case TILDE:
|
||||
this.next();
|
||||
break;
|
||||
case types.Delim:
|
||||
switch (this.charCodeAt(this.tokenStart)) {
|
||||
case GREATERTHANSIGN:
|
||||
case PLUSSIGN:
|
||||
case TILDE:
|
||||
this.next();
|
||||
break;
|
||||
|
||||
case SOLIDUS:
|
||||
this.next();
|
||||
this.eatIdent('deep');
|
||||
this.eatDelim(SOLIDUS);
|
||||
break;
|
||||
case SOLIDUS:
|
||||
this.next();
|
||||
this.eatIdent('deep');
|
||||
this.eatDelim(SOLIDUS);
|
||||
break;
|
||||
|
||||
default:
|
||||
this.error('Combinator is expected');
|
||||
}
|
||||
default:
|
||||
this.error('Combinator is expected');
|
||||
}
|
||||
|
||||
name = this.substrToCursor(start);
|
||||
break;
|
||||
}
|
||||
name = this.substrToCursor(start);
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Combinator',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name
|
||||
};
|
||||
return {
|
||||
type: 'Combinator',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.tokenize(node.name);
|
||||
this.tokenize(node.name);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
37
node_modules/css-tree/cjs/syntax/node/Comment.cjs
generated
vendored
37
node_modules/css-tree/cjs/syntax/node/Comment.cjs
generated
vendored
@ -2,36 +2,37 @@
|
||||
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
|
||||
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
|
||||
|
||||
const ASTERISK = 0x002a; // U+002A ASTERISK (*)
|
||||
const SOLIDUS = 0x002f; // U+002F SOLIDUS (/)
|
||||
|
||||
const name = 'Comment';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
let end = this.tokenEnd;
|
||||
const start = this.tokenStart;
|
||||
let end = this.tokenEnd;
|
||||
|
||||
this.eat(types.Comment);
|
||||
this.eat(types.Comment);
|
||||
|
||||
if ((end - start + 2) >= 2 &&
|
||||
this.charCodeAt(end - 2) === ASTERISK &&
|
||||
this.charCodeAt(end - 1) === SOLIDUS) {
|
||||
end -= 2;
|
||||
}
|
||||
if (
|
||||
end - start + 2 >= 2 &&
|
||||
this.charCodeAt(end - 2) === ASTERISK &&
|
||||
this.charCodeAt(end - 1) === SOLIDUS
|
||||
) {
|
||||
end -= 2;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Comment',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value: this.substring(start + 2, end)
|
||||
};
|
||||
return {
|
||||
type: 'Comment',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value: this.substring(start + 2, end),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Comment, '/*' + node.value + '*/');
|
||||
this.token(types.Comment, '/*' + node.value + '*/');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
231
node_modules/css-tree/cjs/syntax/node/Declaration.cjs
generated
vendored
231
node_modules/css-tree/cjs/syntax/node/Declaration.cjs
generated
vendored
@ -4,159 +4,176 @@ const names = require('../../utils/names.cjs');
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
|
||||
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
|
||||
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
|
||||
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
|
||||
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
|
||||
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
|
||||
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
|
||||
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
|
||||
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
|
||||
const ASTERISK = 0x002a; // U+002A ASTERISK (*)
|
||||
const PLUSSIGN = 0x002b; // U+002B PLUS SIGN (+)
|
||||
const SOLIDUS = 0x002f; // U+002F SOLIDUS (/)
|
||||
|
||||
function consumeValueRaw(startToken) {
|
||||
return this.Raw(startToken, this.consumeUntilExclamationMarkOrSemicolon, true);
|
||||
return this.Raw(
|
||||
startToken,
|
||||
this.consumeUntilExclamationMarkOrSemicolon,
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
function consumeCustomPropertyRaw(startToken) {
|
||||
return this.Raw(startToken, this.consumeUntilExclamationMarkOrSemicolon, false);
|
||||
return this.Raw(
|
||||
startToken,
|
||||
this.consumeUntilExclamationMarkOrSemicolon,
|
||||
false
|
||||
);
|
||||
}
|
||||
|
||||
function consumeValue() {
|
||||
const startValueToken = this.tokenIndex;
|
||||
const value = this.Value();
|
||||
const startValueToken = this.tokenIndex;
|
||||
const value = this.Value();
|
||||
|
||||
if (value.type !== 'Raw' &&
|
||||
this.eof === false &&
|
||||
this.tokenType !== types.Semicolon &&
|
||||
this.isDelim(EXCLAMATIONMARK) === false &&
|
||||
this.isBalanceEdge(startValueToken) === false) {
|
||||
this.error();
|
||||
}
|
||||
if (
|
||||
value.type !== 'Raw' &&
|
||||
this.eof === false &&
|
||||
this.tokenType !== types.Semicolon &&
|
||||
this.isDelim(EXCLAMATIONMARK) === false &&
|
||||
this.isBalanceEdge(startValueToken) === false
|
||||
) {
|
||||
this.error();
|
||||
}
|
||||
|
||||
return value;
|
||||
return value;
|
||||
}
|
||||
|
||||
const name = 'Declaration';
|
||||
const walkContext = 'declaration';
|
||||
const structure = {
|
||||
important: [Boolean, String],
|
||||
property: String,
|
||||
value: ['Value', 'Raw']
|
||||
important: [Boolean, String],
|
||||
property: String,
|
||||
value: ['Value', 'Raw'],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const startToken = this.tokenIndex;
|
||||
const property = readProperty.call(this);
|
||||
const customProperty = names.isCustomProperty(property);
|
||||
const parseValue = customProperty ? this.parseCustomProperty : this.parseValue;
|
||||
const consumeRaw = customProperty ? consumeCustomPropertyRaw : consumeValueRaw;
|
||||
let important = false;
|
||||
let value;
|
||||
const start = this.tokenStart;
|
||||
const startToken = this.tokenIndex;
|
||||
const property = readProperty.call(this);
|
||||
const customProperty = names.isCustomProperty(property);
|
||||
const parseValue =
|
||||
customProperty ? this.parseCustomProperty : this.parseValue;
|
||||
const consumeRaw =
|
||||
customProperty ? consumeCustomPropertyRaw : consumeValueRaw;
|
||||
let important = false;
|
||||
let value;
|
||||
|
||||
this.skipSC();
|
||||
this.eat(types.Colon);
|
||||
|
||||
const valueStart = this.tokenIndex;
|
||||
|
||||
if (!customProperty) {
|
||||
this.skipSC();
|
||||
this.eat(types.Colon);
|
||||
}
|
||||
|
||||
const valueStart = this.tokenIndex;
|
||||
if (parseValue) {
|
||||
value = this.parseWithFallback(consumeValue, consumeRaw);
|
||||
} else {
|
||||
value = consumeRaw.call(this, this.tokenIndex);
|
||||
}
|
||||
|
||||
if (!customProperty) {
|
||||
this.skipSC();
|
||||
if (customProperty && value.type === 'Value' && value.children.isEmpty) {
|
||||
for (let offset = valueStart - this.tokenIndex; offset <= 0; offset++) {
|
||||
if (this.lookupType(offset) === types.WhiteSpace) {
|
||||
value.children.appendData({
|
||||
type: 'WhiteSpace',
|
||||
loc: null,
|
||||
value: ' ',
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (parseValue) {
|
||||
value = this.parseWithFallback(consumeValue, consumeRaw);
|
||||
} else {
|
||||
value = consumeRaw.call(this, this.tokenIndex);
|
||||
}
|
||||
if (this.isDelim(EXCLAMATIONMARK)) {
|
||||
important = getImportant.call(this);
|
||||
this.skipSC();
|
||||
}
|
||||
|
||||
if (customProperty && value.type === 'Value' && value.children.isEmpty) {
|
||||
for (let offset = valueStart - this.tokenIndex; offset <= 0; offset++) {
|
||||
if (this.lookupType(offset) === types.WhiteSpace) {
|
||||
value.children.appendData({
|
||||
type: 'WhiteSpace',
|
||||
loc: null,
|
||||
value: ' '
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Do not include semicolon to range per spec
|
||||
// https://drafts.csswg.org/css-syntax/#declaration-diagram
|
||||
|
||||
if (this.isDelim(EXCLAMATIONMARK)) {
|
||||
important = getImportant.call(this);
|
||||
this.skipSC();
|
||||
}
|
||||
if (
|
||||
this.eof === false &&
|
||||
this.tokenType !== types.Semicolon &&
|
||||
this.isBalanceEdge(startToken) === false
|
||||
) {
|
||||
this.error();
|
||||
}
|
||||
|
||||
// Do not include semicolon to range per spec
|
||||
// https://drafts.csswg.org/css-syntax/#declaration-diagram
|
||||
|
||||
if (this.eof === false &&
|
||||
this.tokenType !== types.Semicolon &&
|
||||
this.isBalanceEdge(startToken) === false) {
|
||||
this.error();
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Declaration',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
important,
|
||||
property,
|
||||
value
|
||||
};
|
||||
return {
|
||||
type: 'Declaration',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
important,
|
||||
property,
|
||||
value,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Ident, node.property);
|
||||
this.token(types.Colon, ':');
|
||||
this.node(node.value);
|
||||
this.token(types.Ident, node.property);
|
||||
this.token(types.Colon, ':');
|
||||
this.node(node.value);
|
||||
|
||||
if (node.important) {
|
||||
this.token(types.Delim, '!');
|
||||
this.token(types.Ident, node.important === true ? 'important' : node.important);
|
||||
}
|
||||
if (node.important) {
|
||||
this.token(types.Delim, '!');
|
||||
this.token(
|
||||
types.Ident,
|
||||
node.important === true ? 'important' : node.important
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function readProperty() {
|
||||
const start = this.tokenStart;
|
||||
const start = this.tokenStart;
|
||||
|
||||
// hacks
|
||||
if (this.tokenType === types.Delim) {
|
||||
switch (this.charCodeAt(this.tokenStart)) {
|
||||
case ASTERISK:
|
||||
case DOLLARSIGN:
|
||||
case PLUSSIGN:
|
||||
case NUMBERSIGN:
|
||||
case AMPERSAND:
|
||||
this.next();
|
||||
break;
|
||||
// hacks
|
||||
if (this.tokenType === types.Delim) {
|
||||
switch (this.charCodeAt(this.tokenStart)) {
|
||||
case ASTERISK:
|
||||
case DOLLARSIGN:
|
||||
case PLUSSIGN:
|
||||
case NUMBERSIGN:
|
||||
case AMPERSAND:
|
||||
this.next();
|
||||
break;
|
||||
|
||||
// TODO: not sure we should support this hack
|
||||
case SOLIDUS:
|
||||
this.next();
|
||||
if (this.isDelim(SOLIDUS)) {
|
||||
this.next();
|
||||
}
|
||||
break;
|
||||
// TODO: not sure we should support this hack
|
||||
case SOLIDUS:
|
||||
this.next();
|
||||
if (this.isDelim(SOLIDUS)) {
|
||||
this.next();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.tokenType === types.Hash) {
|
||||
this.eat(types.Hash);
|
||||
} else {
|
||||
this.eat(types.Ident);
|
||||
}
|
||||
if (this.tokenType === types.Hash) {
|
||||
this.eat(types.Hash);
|
||||
} else {
|
||||
this.eat(types.Ident);
|
||||
}
|
||||
|
||||
return this.substrToCursor(start);
|
||||
return this.substrToCursor(start);
|
||||
}
|
||||
|
||||
// ! ws* important
|
||||
function getImportant() {
|
||||
this.eat(types.Delim);
|
||||
this.skipSC();
|
||||
this.eat(types.Delim);
|
||||
this.skipSC();
|
||||
|
||||
const important = this.consume(types.Ident);
|
||||
const important = this.consume(types.Ident);
|
||||
|
||||
// store original value in case it differ from `important`
|
||||
// for better original source restoring and hacks like `!ie` support
|
||||
return important === 'important' ? true : important;
|
||||
// store original value in case it differ from `important`
|
||||
// for better original source restoring and hacks like `!ie` support
|
||||
return important === 'important' ? true : important;
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
48
node_modules/css-tree/cjs/syntax/node/DeclarationList.cjs
generated
vendored
48
node_modules/css-tree/cjs/syntax/node/DeclarationList.cjs
generated
vendored
@ -3,45 +3,43 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, this.consumeUntilSemicolonIncluded, true);
|
||||
return this.Raw(startToken, this.consumeUntilSemicolonIncluded, true);
|
||||
}
|
||||
|
||||
const name = 'DeclarationList';
|
||||
const structure = {
|
||||
children: [[
|
||||
'Declaration'
|
||||
]]
|
||||
children: [['Declaration']],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const children = this.createList();
|
||||
const children = this.createList();
|
||||
|
||||
while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.WhiteSpace:
|
||||
case types.Comment:
|
||||
case types.Semicolon:
|
||||
this.next();
|
||||
break;
|
||||
while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.WhiteSpace:
|
||||
case types.Comment:
|
||||
case types.Semicolon:
|
||||
this.next();
|
||||
break;
|
||||
|
||||
default:
|
||||
children.push(this.parseWithFallback(this.Declaration, consumeRaw));
|
||||
}
|
||||
default:
|
||||
children.push(this.parseWithFallback(this.Declaration, consumeRaw));
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'DeclarationList',
|
||||
loc: this.getLocationFromList(children),
|
||||
children
|
||||
};
|
||||
return {
|
||||
type: 'DeclarationList',
|
||||
loc: this.getLocationFromList(children),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.children(node, prev => {
|
||||
if (prev.type === 'Declaration') {
|
||||
this.token(types.Semicolon, ';');
|
||||
}
|
||||
});
|
||||
this.children(node, (prev) => {
|
||||
if (prev.type === 'Declaration') {
|
||||
this.token(types.Semicolon, ';');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
22
node_modules/css-tree/cjs/syntax/node/Dimension.cjs
generated
vendored
22
node_modules/css-tree/cjs/syntax/node/Dimension.cjs
generated
vendored
@ -4,24 +4,24 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'Dimension';
|
||||
const structure = {
|
||||
value: String,
|
||||
unit: String
|
||||
value: String,
|
||||
unit: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const value = this.consumeNumber(types.Dimension);
|
||||
const start = this.tokenStart;
|
||||
const value = this.consumeNumber(types.Dimension);
|
||||
|
||||
return {
|
||||
type: 'Dimension',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value,
|
||||
unit: this.substring(start + value.length, this.tokenStart)
|
||||
};
|
||||
return {
|
||||
type: 'Dimension',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value,
|
||||
unit: this.substring(start + value.length, this.tokenStart),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Dimension, node.value + node.unit);
|
||||
this.token(types.Dimension, node.value + node.unit);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
43
node_modules/css-tree/cjs/syntax/node/Function.cjs
generated
vendored
43
node_modules/css-tree/cjs/syntax/node/Function.cjs
generated
vendored
@ -5,37 +5,38 @@ const types = require('../../tokenizer/types.cjs');
|
||||
const name = 'Function';
|
||||
const walkContext = 'function';
|
||||
const structure = {
|
||||
name: String,
|
||||
children: [[]]
|
||||
name: String,
|
||||
children: [[]],
|
||||
};
|
||||
|
||||
// <function-token> <sequence> )
|
||||
function parse(readSequence, recognizer) {
|
||||
const start = this.tokenStart;
|
||||
const name = this.consumeFunctionName();
|
||||
const nameLowerCase = name.toLowerCase();
|
||||
let children;
|
||||
const start = this.tokenStart;
|
||||
const name = this.consumeFunctionName();
|
||||
const nameLowerCase = name.toLowerCase();
|
||||
let children;
|
||||
|
||||
children = recognizer.hasOwnProperty(nameLowerCase)
|
||||
? recognizer[nameLowerCase].call(this, recognizer)
|
||||
: readSequence.call(this, recognizer);
|
||||
children =
|
||||
recognizer.hasOwnProperty(nameLowerCase) ?
|
||||
recognizer[nameLowerCase].call(this, recognizer)
|
||||
: readSequence.call(this, recognizer);
|
||||
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightParenthesis);
|
||||
}
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightParenthesis);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Function',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
children
|
||||
};
|
||||
return {
|
||||
type: 'Function',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Function, node.name + '(');
|
||||
this.children(node);
|
||||
this.token(types.RightParenthesis, ')');
|
||||
this.token(types.Function, node.name + '(');
|
||||
this.children(node);
|
||||
this.token(types.RightParenthesis, ')');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
18
node_modules/css-tree/cjs/syntax/node/Hash.cjs
generated
vendored
18
node_modules/css-tree/cjs/syntax/node/Hash.cjs
generated
vendored
@ -6,21 +6,21 @@ const types = require('../../tokenizer/types.cjs');
|
||||
const xxx = 'XXX';
|
||||
const name = 'Hash';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const start = this.tokenStart;
|
||||
|
||||
this.eat(types.Hash);
|
||||
this.eat(types.Hash);
|
||||
|
||||
return {
|
||||
type: 'Hash',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value: this.substrToCursor(start + 1)
|
||||
};
|
||||
return {
|
||||
type: 'Hash',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value: this.substrToCursor(start + 1),
|
||||
};
|
||||
}
|
||||
function generate(node) {
|
||||
this.token(types.Hash, '#' + node.value);
|
||||
this.token(types.Hash, '#' + node.value);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
26
node_modules/css-tree/cjs/syntax/node/IdSelector.cjs
generated
vendored
26
node_modules/css-tree/cjs/syntax/node/IdSelector.cjs
generated
vendored
@ -4,27 +4,27 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'IdSelector';
|
||||
const structure = {
|
||||
name: String
|
||||
name: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const start = this.tokenStart;
|
||||
|
||||
// TODO: check value is an ident
|
||||
this.eat(types.Hash);
|
||||
// TODO: check value is an ident
|
||||
this.eat(types.Hash);
|
||||
|
||||
return {
|
||||
type: 'IdSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name: this.substrToCursor(start + 1)
|
||||
};
|
||||
return {
|
||||
type: 'IdSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name: this.substrToCursor(start + 1),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
// Using Delim instead of Hash is a hack to avoid for a whitespace between ident and id-selector
|
||||
// in safe mode (e.g. "a#id"), because IE11 doesn't allow a sequence <ident-token> <hash-token>
|
||||
// without a whitespace in values (e.g. "1px solid#000")
|
||||
this.token(types.Delim, '#' + node.name);
|
||||
// Using Delim instead of Hash is a hack to avoid for a whitespace between ident and id-selector
|
||||
// in safe mode (e.g. "a#id"), because IE11 doesn't allow a sequence <ident-token> <hash-token>
|
||||
// without a whitespace in values (e.g. "1px solid#000")
|
||||
this.token(types.Delim, '#' + node.name);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
14
node_modules/css-tree/cjs/syntax/node/Identifier.cjs
generated
vendored
14
node_modules/css-tree/cjs/syntax/node/Identifier.cjs
generated
vendored
@ -4,19 +4,19 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'Identifier';
|
||||
const structure = {
|
||||
name: String
|
||||
name: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
return {
|
||||
type: 'Identifier',
|
||||
loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
name: this.consume(types.Ident)
|
||||
};
|
||||
return {
|
||||
type: 'Identifier',
|
||||
loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
name: this.consume(types.Ident),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Ident, node.name);
|
||||
this.token(types.Ident, node.name);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
96
node_modules/css-tree/cjs/syntax/node/MediaFeature.cjs
generated
vendored
96
node_modules/css-tree/cjs/syntax/node/MediaFeature.cjs
generated
vendored
@ -4,70 +4,70 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'MediaFeature';
|
||||
const structure = {
|
||||
name: String,
|
||||
value: ['Identifier', 'Number', 'Dimension', 'Ratio', null]
|
||||
name: String,
|
||||
value: ['Identifier', 'Number', 'Dimension', 'Ratio', null],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
let name;
|
||||
let value = null;
|
||||
const start = this.tokenStart;
|
||||
let name;
|
||||
let value = null;
|
||||
|
||||
this.eat(types.LeftParenthesis);
|
||||
this.eat(types.LeftParenthesis);
|
||||
this.skipSC();
|
||||
|
||||
name = this.consume(types.Ident);
|
||||
this.skipSC();
|
||||
|
||||
if (this.tokenType !== types.RightParenthesis) {
|
||||
this.eat(types.Colon);
|
||||
this.skipSC();
|
||||
|
||||
name = this.consume(types.Ident);
|
||||
this.skipSC();
|
||||
|
||||
if (this.tokenType !== types.RightParenthesis) {
|
||||
this.eat(types.Colon);
|
||||
this.skipSC();
|
||||
|
||||
switch (this.tokenType) {
|
||||
case types.Number:
|
||||
if (this.lookupNonWSType(1) === types.Delim) {
|
||||
value = this.Ratio();
|
||||
} else {
|
||||
value = this.Number();
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case types.Dimension:
|
||||
value = this.Dimension();
|
||||
break;
|
||||
|
||||
case types.Ident:
|
||||
value = this.Identifier();
|
||||
break;
|
||||
|
||||
default:
|
||||
this.error('Number, dimension, ratio or identifier is expected');
|
||||
switch (this.tokenType) {
|
||||
case types.Number:
|
||||
if (this.lookupNonWSType(1) === types.Delim) {
|
||||
value = this.Ratio();
|
||||
} else {
|
||||
value = this.Number();
|
||||
}
|
||||
|
||||
this.skipSC();
|
||||
break;
|
||||
|
||||
case types.Dimension:
|
||||
value = this.Dimension();
|
||||
break;
|
||||
|
||||
case types.Ident:
|
||||
value = this.Identifier();
|
||||
break;
|
||||
|
||||
default:
|
||||
this.error('Number, dimension, ratio or identifier is expected');
|
||||
}
|
||||
|
||||
this.eat(types.RightParenthesis);
|
||||
this.skipSC();
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'MediaFeature',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
value
|
||||
};
|
||||
this.eat(types.RightParenthesis);
|
||||
|
||||
return {
|
||||
type: 'MediaFeature',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
value,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.LeftParenthesis, '(');
|
||||
this.token(types.Ident, node.name);
|
||||
this.token(types.LeftParenthesis, '(');
|
||||
this.token(types.Ident, node.name);
|
||||
|
||||
if (node.value !== null) {
|
||||
this.token(types.Colon, ':');
|
||||
this.node(node.value);
|
||||
}
|
||||
if (node.value !== null) {
|
||||
this.token(types.Colon, ':');
|
||||
this.node(node.value);
|
||||
}
|
||||
|
||||
this.token(types.RightParenthesis, ')');
|
||||
this.token(types.RightParenthesis, ')');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
65
node_modules/css-tree/cjs/syntax/node/MediaQuery.cjs
generated
vendored
65
node_modules/css-tree/cjs/syntax/node/MediaQuery.cjs
generated
vendored
@ -4,55 +4,50 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'MediaQuery';
|
||||
const structure = {
|
||||
children: [[
|
||||
'Identifier',
|
||||
'MediaFeature',
|
||||
'WhiteSpace'
|
||||
]]
|
||||
children: [['Identifier', 'MediaFeature', 'WhiteSpace']],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const children = this.createList();
|
||||
let child = null;
|
||||
const children = this.createList();
|
||||
let child = null;
|
||||
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
scan:
|
||||
while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.Comment:
|
||||
case types.WhiteSpace:
|
||||
this.next();
|
||||
continue;
|
||||
scan: while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.Comment:
|
||||
case types.WhiteSpace:
|
||||
this.next();
|
||||
continue;
|
||||
|
||||
case types.Ident:
|
||||
child = this.Identifier();
|
||||
break;
|
||||
case types.Ident:
|
||||
child = this.Identifier();
|
||||
break;
|
||||
|
||||
case types.LeftParenthesis:
|
||||
child = this.MediaFeature();
|
||||
break;
|
||||
case types.LeftParenthesis:
|
||||
child = this.MediaFeature();
|
||||
break;
|
||||
|
||||
default:
|
||||
break scan;
|
||||
}
|
||||
|
||||
children.push(child);
|
||||
default:
|
||||
break scan;
|
||||
}
|
||||
|
||||
if (child === null) {
|
||||
this.error('Identifier or parenthesis is expected');
|
||||
}
|
||||
children.push(child);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'MediaQuery',
|
||||
loc: this.getLocationFromList(children),
|
||||
children
|
||||
};
|
||||
if (child === null) {
|
||||
this.error('Identifier or parenthesis is expected');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'MediaQuery',
|
||||
loc: this.getLocationFromList(children),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.children(node);
|
||||
this.children(node);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
34
node_modules/css-tree/cjs/syntax/node/MediaQueryList.cjs
generated
vendored
34
node_modules/css-tree/cjs/syntax/node/MediaQueryList.cjs
generated
vendored
@ -4,35 +4,33 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'MediaQueryList';
|
||||
const structure = {
|
||||
children: [[
|
||||
'MediaQuery'
|
||||
]]
|
||||
children: [['MediaQuery']],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const children = this.createList();
|
||||
const children = this.createList();
|
||||
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
while (!this.eof) {
|
||||
children.push(this.MediaQuery());
|
||||
while (!this.eof) {
|
||||
children.push(this.MediaQuery());
|
||||
|
||||
if (this.tokenType !== types.Comma) {
|
||||
break;
|
||||
}
|
||||
|
||||
this.next();
|
||||
if (this.tokenType !== types.Comma) {
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'MediaQueryList',
|
||||
loc: this.getLocationFromList(children),
|
||||
children
|
||||
};
|
||||
this.next();
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'MediaQueryList',
|
||||
loc: this.getLocationFromList(children),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.children(node, () => this.token(types.Comma, ','));
|
||||
this.children(node, () => this.token(types.Comma, ','));
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
62
node_modules/css-tree/cjs/syntax/node/Nth.cjs
generated
vendored
62
node_modules/css-tree/cjs/syntax/node/Nth.cjs
generated
vendored
@ -4,48 +4,48 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'Nth';
|
||||
const structure = {
|
||||
nth: ['AnPlusB', 'Identifier'],
|
||||
selector: ['SelectorList', null]
|
||||
nth: ['AnPlusB', 'Identifier'],
|
||||
selector: ['SelectorList', null],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
const start = this.tokenStart;
|
||||
let end = start;
|
||||
let selector = null;
|
||||
let nth;
|
||||
const start = this.tokenStart;
|
||||
let end = start;
|
||||
let selector = null;
|
||||
let nth;
|
||||
|
||||
if (this.lookupValue(0, 'odd') || this.lookupValue(0, 'even')) {
|
||||
nth = this.Identifier();
|
||||
} else {
|
||||
nth = this.AnPlusB();
|
||||
}
|
||||
if (this.lookupValue(0, 'odd') || this.lookupValue(0, 'even')) {
|
||||
nth = this.Identifier();
|
||||
} else {
|
||||
nth = this.AnPlusB();
|
||||
}
|
||||
|
||||
end = this.tokenStart;
|
||||
this.skipSC();
|
||||
|
||||
if (this.lookupValue(0, 'of')) {
|
||||
this.next();
|
||||
|
||||
selector = this.SelectorList();
|
||||
end = this.tokenStart;
|
||||
this.skipSC();
|
||||
}
|
||||
|
||||
if (this.lookupValue(0, 'of')) {
|
||||
this.next();
|
||||
|
||||
selector = this.SelectorList();
|
||||
end = this.tokenStart;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Nth',
|
||||
loc: this.getLocation(start, end),
|
||||
nth,
|
||||
selector
|
||||
};
|
||||
return {
|
||||
type: 'Nth',
|
||||
loc: this.getLocation(start, end),
|
||||
nth,
|
||||
selector,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.node(node.nth);
|
||||
if (node.selector !== null) {
|
||||
this.token(types.Ident, 'of');
|
||||
this.node(node.selector);
|
||||
}
|
||||
this.node(node.nth);
|
||||
if (node.selector !== null) {
|
||||
this.token(types.Ident, 'of');
|
||||
this.node(node.selector);
|
||||
}
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
14
node_modules/css-tree/cjs/syntax/node/Number.cjs
generated
vendored
14
node_modules/css-tree/cjs/syntax/node/Number.cjs
generated
vendored
@ -4,19 +4,19 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'Number';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
return {
|
||||
type: 'Number',
|
||||
loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
value: this.consume(types.Number)
|
||||
};
|
||||
return {
|
||||
type: 'Number',
|
||||
loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
value: this.consume(types.Number),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Number, node.value);
|
||||
this.token(types.Number, node.value);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
18
node_modules/css-tree/cjs/syntax/node/Operator.cjs
generated
vendored
18
node_modules/css-tree/cjs/syntax/node/Operator.cjs
generated
vendored
@ -3,23 +3,23 @@
|
||||
// '/' | '*' | ',' | ':' | '+' | '-'
|
||||
const name = 'Operator';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const start = this.tokenStart;
|
||||
|
||||
this.next();
|
||||
this.next();
|
||||
|
||||
return {
|
||||
type: 'Operator',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value: this.substrToCursor(start)
|
||||
};
|
||||
return {
|
||||
type: 'Operator',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value: this.substrToCursor(start),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.tokenize(node.value);
|
||||
this.tokenize(node.value);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
32
node_modules/css-tree/cjs/syntax/node/Parentheses.cjs
generated
vendored
32
node_modules/css-tree/cjs/syntax/node/Parentheses.cjs
generated
vendored
@ -4,32 +4,32 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'Parentheses';
|
||||
const structure = {
|
||||
children: [[]]
|
||||
children: [[]],
|
||||
};
|
||||
|
||||
function parse(readSequence, recognizer) {
|
||||
const start = this.tokenStart;
|
||||
let children = null;
|
||||
const start = this.tokenStart;
|
||||
let children = null;
|
||||
|
||||
this.eat(types.LeftParenthesis);
|
||||
this.eat(types.LeftParenthesis);
|
||||
|
||||
children = readSequence.call(this, recognizer);
|
||||
children = readSequence.call(this, recognizer);
|
||||
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightParenthesis);
|
||||
}
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightParenthesis);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Parentheses',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children
|
||||
};
|
||||
return {
|
||||
type: 'Parentheses',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.LeftParenthesis, '(');
|
||||
this.children(node);
|
||||
this.token(types.RightParenthesis, ')');
|
||||
this.token(types.LeftParenthesis, '(');
|
||||
this.children(node);
|
||||
this.token(types.RightParenthesis, ')');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
14
node_modules/css-tree/cjs/syntax/node/Percentage.cjs
generated
vendored
14
node_modules/css-tree/cjs/syntax/node/Percentage.cjs
generated
vendored
@ -4,19 +4,19 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'Percentage';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
return {
|
||||
type: 'Percentage',
|
||||
loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
value: this.consumeNumber(types.Percentage)
|
||||
};
|
||||
return {
|
||||
type: 'Percentage',
|
||||
loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
value: this.consumeNumber(types.Percentage),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Percentage, node.value + '%');
|
||||
this.token(types.Percentage, node.value + '%');
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
72
node_modules/css-tree/cjs/syntax/node/PseudoClassSelector.cjs
generated
vendored
72
node_modules/css-tree/cjs/syntax/node/PseudoClassSelector.cjs
generated
vendored
@ -5,57 +5,55 @@ const types = require('../../tokenizer/types.cjs');
|
||||
const name = 'PseudoClassSelector';
|
||||
const walkContext = 'function';
|
||||
const structure = {
|
||||
name: String,
|
||||
children: [['Raw'], null]
|
||||
name: String,
|
||||
children: [['Raw'], null],
|
||||
};
|
||||
|
||||
// : [ <ident> | <function-token> <any-value>? ) ]
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
let children = null;
|
||||
let name;
|
||||
let nameLowerCase;
|
||||
const start = this.tokenStart;
|
||||
let children = null;
|
||||
let name;
|
||||
let nameLowerCase;
|
||||
|
||||
this.eat(types.Colon);
|
||||
this.eat(types.Colon);
|
||||
|
||||
if (this.tokenType === types.Function) {
|
||||
name = this.consumeFunctionName();
|
||||
nameLowerCase = name.toLowerCase();
|
||||
if (this.tokenType === types.Function) {
|
||||
name = this.consumeFunctionName();
|
||||
nameLowerCase = name.toLowerCase();
|
||||
|
||||
if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
|
||||
this.skipSC();
|
||||
children = this.pseudo[nameLowerCase].call(this);
|
||||
this.skipSC();
|
||||
} else {
|
||||
children = this.createList();
|
||||
children.push(
|
||||
this.Raw(this.tokenIndex, null, false)
|
||||
);
|
||||
}
|
||||
|
||||
this.eat(types.RightParenthesis);
|
||||
if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
|
||||
this.skipSC();
|
||||
children = this.pseudo[nameLowerCase].call(this);
|
||||
this.skipSC();
|
||||
} else {
|
||||
name = this.consume(types.Ident);
|
||||
children = this.createList();
|
||||
children.push(this.Raw(this.tokenIndex, null, false));
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'PseudoClassSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
children
|
||||
};
|
||||
this.eat(types.RightParenthesis);
|
||||
} else {
|
||||
name = this.consume(types.Ident);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'PseudoClassSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Colon, ':');
|
||||
this.token(types.Colon, ':');
|
||||
|
||||
if (node.children === null) {
|
||||
this.token(types.Ident, node.name);
|
||||
} else {
|
||||
this.token(types.Function, node.name + '(');
|
||||
this.children(node);
|
||||
this.token(types.RightParenthesis, ')');
|
||||
}
|
||||
if (node.children === null) {
|
||||
this.token(types.Ident, node.name);
|
||||
} else {
|
||||
this.token(types.Function, node.name + '(');
|
||||
this.children(node);
|
||||
this.token(types.RightParenthesis, ')');
|
||||
}
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
76
node_modules/css-tree/cjs/syntax/node/PseudoElementSelector.cjs
generated
vendored
76
node_modules/css-tree/cjs/syntax/node/PseudoElementSelector.cjs
generated
vendored
@ -5,59 +5,57 @@ const types = require('../../tokenizer/types.cjs');
|
||||
const name = 'PseudoElementSelector';
|
||||
const walkContext = 'function';
|
||||
const structure = {
|
||||
name: String,
|
||||
children: [['Raw'], null]
|
||||
name: String,
|
||||
children: [['Raw'], null],
|
||||
};
|
||||
|
||||
// :: [ <ident> | <function-token> <any-value>? ) ]
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
let children = null;
|
||||
let name;
|
||||
let nameLowerCase;
|
||||
const start = this.tokenStart;
|
||||
let children = null;
|
||||
let name;
|
||||
let nameLowerCase;
|
||||
|
||||
this.eat(types.Colon);
|
||||
this.eat(types.Colon);
|
||||
this.eat(types.Colon);
|
||||
this.eat(types.Colon);
|
||||
|
||||
if (this.tokenType === types.Function) {
|
||||
name = this.consumeFunctionName();
|
||||
nameLowerCase = name.toLowerCase();
|
||||
if (this.tokenType === types.Function) {
|
||||
name = this.consumeFunctionName();
|
||||
nameLowerCase = name.toLowerCase();
|
||||
|
||||
if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
|
||||
this.skipSC();
|
||||
children = this.pseudo[nameLowerCase].call(this);
|
||||
this.skipSC();
|
||||
} else {
|
||||
children = this.createList();
|
||||
children.push(
|
||||
this.Raw(this.tokenIndex, null, false)
|
||||
);
|
||||
}
|
||||
|
||||
this.eat(types.RightParenthesis);
|
||||
if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
|
||||
this.skipSC();
|
||||
children = this.pseudo[nameLowerCase].call(this);
|
||||
this.skipSC();
|
||||
} else {
|
||||
name = this.consume(types.Ident);
|
||||
children = this.createList();
|
||||
children.push(this.Raw(this.tokenIndex, null, false));
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'PseudoElementSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
children
|
||||
};
|
||||
this.eat(types.RightParenthesis);
|
||||
} else {
|
||||
name = this.consume(types.Ident);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'PseudoElementSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name,
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Colon, ':');
|
||||
this.token(types.Colon, ':');
|
||||
this.token(types.Colon, ':');
|
||||
this.token(types.Colon, ':');
|
||||
|
||||
if (node.children === null) {
|
||||
this.token(types.Ident, node.name);
|
||||
} else {
|
||||
this.token(types.Function, node.name + '(');
|
||||
this.children(node);
|
||||
this.token(types.RightParenthesis, ')');
|
||||
}
|
||||
if (node.children === null) {
|
||||
this.token(types.Ident, node.name);
|
||||
} else {
|
||||
this.token(types.Function, node.name + '(');
|
||||
this.children(node);
|
||||
this.token(types.RightParenthesis, ')');
|
||||
}
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
63
node_modules/css-tree/cjs/syntax/node/Ratio.cjs
generated
vendored
63
node_modules/css-tree/cjs/syntax/node/Ratio.cjs
generated
vendored
@ -3,8 +3,8 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
const charCodeDefinitions = require('../../tokenizer/char-code-definitions.cjs');
|
||||
|
||||
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
|
||||
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
|
||||
const SOLIDUS = 0x002f; // U+002F SOLIDUS (/)
|
||||
const FULLSTOP = 0x002e; // U+002E FULL STOP (.)
|
||||
|
||||
// Terms of <ratio> should be a positive numbers (not zero or negative)
|
||||
// (see https://drafts.csswg.org/mediaqueries-3/#values)
|
||||
@ -13,52 +13,55 @@ const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
|
||||
// to test a term is unsigned number without an exponent part.
|
||||
// Additional checking may be applied on lexer validation.
|
||||
function consumeNumber() {
|
||||
this.skipSC();
|
||||
this.skipSC();
|
||||
|
||||
const value = this.consume(types.Number);
|
||||
const value = this.consume(types.Number);
|
||||
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
const code = value.charCodeAt(i);
|
||||
if (!charCodeDefinitions.isDigit(code) && code !== FULLSTOP) {
|
||||
this.error('Unsigned number is expected', this.tokenStart - value.length + i);
|
||||
}
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
const code = value.charCodeAt(i);
|
||||
if (!charCodeDefinitions.isDigit(code) && code !== FULLSTOP) {
|
||||
this.error(
|
||||
'Unsigned number is expected',
|
||||
this.tokenStart - value.length + i
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (Number(value) === 0) {
|
||||
this.error('Zero number is not allowed', this.tokenStart - value.length);
|
||||
}
|
||||
if (Number(value) === 0) {
|
||||
this.error('Zero number is not allowed', this.tokenStart - value.length);
|
||||
}
|
||||
|
||||
return value;
|
||||
return value;
|
||||
}
|
||||
|
||||
const name = 'Ratio';
|
||||
const structure = {
|
||||
left: String,
|
||||
right: String
|
||||
left: String,
|
||||
right: String,
|
||||
};
|
||||
|
||||
// <positive-integer> S* '/' S* <positive-integer>
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const left = consumeNumber.call(this);
|
||||
let right;
|
||||
const start = this.tokenStart;
|
||||
const left = consumeNumber.call(this);
|
||||
let right;
|
||||
|
||||
this.skipSC();
|
||||
this.eatDelim(SOLIDUS);
|
||||
right = consumeNumber.call(this);
|
||||
this.skipSC();
|
||||
this.eatDelim(SOLIDUS);
|
||||
right = consumeNumber.call(this);
|
||||
|
||||
return {
|
||||
type: 'Ratio',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
left,
|
||||
right
|
||||
};
|
||||
return {
|
||||
type: 'Ratio',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
left,
|
||||
right,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Number, node.left);
|
||||
this.token(types.Delim, '/');
|
||||
this.token(types.Number, node.right);
|
||||
this.token(types.Number, node.left);
|
||||
this.token(types.Delim, '/');
|
||||
this.token(types.Number, node.right);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
47
node_modules/css-tree/cjs/syntax/node/Raw.cjs
generated
vendored
47
node_modules/css-tree/cjs/syntax/node/Raw.cjs
generated
vendored
@ -3,43 +3,46 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
function getOffsetExcludeWS() {
|
||||
if (this.tokenIndex > 0) {
|
||||
if (this.lookupType(-1) === types.WhiteSpace) {
|
||||
return this.tokenIndex > 1
|
||||
? this.getTokenStart(this.tokenIndex - 1)
|
||||
: this.firstCharOffset;
|
||||
}
|
||||
if (this.tokenIndex > 0) {
|
||||
if (this.lookupType(-1) === types.WhiteSpace) {
|
||||
return this.tokenIndex > 1 ?
|
||||
this.getTokenStart(this.tokenIndex - 1)
|
||||
: this.firstCharOffset;
|
||||
}
|
||||
}
|
||||
|
||||
return this.tokenStart;
|
||||
return this.tokenStart;
|
||||
}
|
||||
|
||||
const name = 'Raw';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
|
||||
function parse(startToken, consumeUntil, excludeWhiteSpace) {
|
||||
const startOffset = this.getTokenStart(startToken);
|
||||
let endOffset;
|
||||
const startOffset = this.getTokenStart(startToken);
|
||||
let endOffset;
|
||||
|
||||
this.skipUntilBalanced(startToken, consumeUntil || this.consumeUntilBalanceEnd);
|
||||
this.skipUntilBalanced(
|
||||
startToken,
|
||||
consumeUntil || this.consumeUntilBalanceEnd
|
||||
);
|
||||
|
||||
if (excludeWhiteSpace && this.tokenStart > startOffset) {
|
||||
endOffset = getOffsetExcludeWS.call(this);
|
||||
} else {
|
||||
endOffset = this.tokenStart;
|
||||
}
|
||||
if (excludeWhiteSpace && this.tokenStart > startOffset) {
|
||||
endOffset = getOffsetExcludeWS.call(this);
|
||||
} else {
|
||||
endOffset = this.tokenStart;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Raw',
|
||||
loc: this.getLocation(startOffset, endOffset),
|
||||
value: this.substring(startOffset, endOffset)
|
||||
};
|
||||
return {
|
||||
type: 'Raw',
|
||||
loc: this.getLocation(startOffset, endOffset),
|
||||
value: this.substring(startOffset, endOffset),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.tokenize(node.value);
|
||||
this.tokenize(node.value);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
58
node_modules/css-tree/cjs/syntax/node/Rule.cjs
generated
vendored
58
node_modules/css-tree/cjs/syntax/node/Rule.cjs
generated
vendored
@ -3,52 +3,54 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, this.consumeUntilLeftCurlyBracket, true);
|
||||
return this.Raw(startToken, this.consumeUntilLeftCurlyBracket, true);
|
||||
}
|
||||
|
||||
function consumePrelude() {
|
||||
const prelude = this.SelectorList();
|
||||
const prelude = this.SelectorList();
|
||||
|
||||
if (prelude.type !== 'Raw' &&
|
||||
this.eof === false &&
|
||||
this.tokenType !== types.LeftCurlyBracket) {
|
||||
this.error();
|
||||
}
|
||||
if (
|
||||
prelude.type !== 'Raw' &&
|
||||
this.eof === false &&
|
||||
this.tokenType !== types.LeftCurlyBracket
|
||||
) {
|
||||
this.error();
|
||||
}
|
||||
|
||||
return prelude;
|
||||
return prelude;
|
||||
}
|
||||
|
||||
const name = 'Rule';
|
||||
const walkContext = 'rule';
|
||||
const structure = {
|
||||
prelude: ['SelectorList', 'Raw'],
|
||||
block: ['Block']
|
||||
prelude: ['SelectorList', 'Raw'],
|
||||
block: ['Block'],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const startToken = this.tokenIndex;
|
||||
const startOffset = this.tokenStart;
|
||||
let prelude;
|
||||
let block;
|
||||
const startToken = this.tokenIndex;
|
||||
const startOffset = this.tokenStart;
|
||||
let prelude;
|
||||
let block;
|
||||
|
||||
if (this.parseRulePrelude) {
|
||||
prelude = this.parseWithFallback(consumePrelude, consumeRaw);
|
||||
} else {
|
||||
prelude = consumeRaw.call(this, startToken);
|
||||
}
|
||||
if (this.parseRulePrelude) {
|
||||
prelude = this.parseWithFallback(consumePrelude, consumeRaw);
|
||||
} else {
|
||||
prelude = consumeRaw.call(this, startToken);
|
||||
}
|
||||
|
||||
block = this.Block(true);
|
||||
block = this.Block(true);
|
||||
|
||||
return {
|
||||
type: 'Rule',
|
||||
loc: this.getLocation(startOffset, this.tokenStart),
|
||||
prelude,
|
||||
block
|
||||
};
|
||||
return {
|
||||
type: 'Rule',
|
||||
loc: this.getLocation(startOffset, this.tokenStart),
|
||||
prelude,
|
||||
block,
|
||||
};
|
||||
}
|
||||
function generate(node) {
|
||||
this.node(node.prelude);
|
||||
this.node(node.block);
|
||||
this.node(node.prelude);
|
||||
this.node(node.block);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
44
node_modules/css-tree/cjs/syntax/node/Selector.cjs
generated
vendored
44
node_modules/css-tree/cjs/syntax/node/Selector.cjs
generated
vendored
@ -2,35 +2,37 @@
|
||||
|
||||
const name = 'Selector';
|
||||
const structure = {
|
||||
children: [[
|
||||
'TypeSelector',
|
||||
'IdSelector',
|
||||
'ClassSelector',
|
||||
'AttributeSelector',
|
||||
'PseudoClassSelector',
|
||||
'PseudoElementSelector',
|
||||
'Combinator',
|
||||
'WhiteSpace'
|
||||
]]
|
||||
children: [
|
||||
[
|
||||
'TypeSelector',
|
||||
'IdSelector',
|
||||
'ClassSelector',
|
||||
'AttributeSelector',
|
||||
'PseudoClassSelector',
|
||||
'PseudoElementSelector',
|
||||
'Combinator',
|
||||
'WhiteSpace',
|
||||
],
|
||||
],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const children = this.readSequence(this.scope.Selector);
|
||||
const children = this.readSequence(this.scope.Selector);
|
||||
|
||||
// nothing were consumed
|
||||
if (this.getFirstListNode(children) === null) {
|
||||
this.error('Selector is expected');
|
||||
}
|
||||
// nothing were consumed
|
||||
if (this.getFirstListNode(children) === null) {
|
||||
this.error('Selector is expected');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Selector',
|
||||
loc: this.getLocationFromList(children),
|
||||
children
|
||||
};
|
||||
return {
|
||||
type: 'Selector',
|
||||
loc: this.getLocationFromList(children),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.children(node);
|
||||
this.children(node);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
35
node_modules/css-tree/cjs/syntax/node/SelectorList.cjs
generated
vendored
35
node_modules/css-tree/cjs/syntax/node/SelectorList.cjs
generated
vendored
@ -5,35 +5,32 @@ const types = require('../../tokenizer/types.cjs');
|
||||
const name = 'SelectorList';
|
||||
const walkContext = 'selector';
|
||||
const structure = {
|
||||
children: [[
|
||||
'Selector',
|
||||
'Raw'
|
||||
]]
|
||||
children: [['Selector', 'Raw']],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const children = this.createList();
|
||||
const children = this.createList();
|
||||
|
||||
while (!this.eof) {
|
||||
children.push(this.Selector());
|
||||
while (!this.eof) {
|
||||
children.push(this.Selector());
|
||||
|
||||
if (this.tokenType === types.Comma) {
|
||||
this.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
if (this.tokenType === types.Comma) {
|
||||
this.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'SelectorList',
|
||||
loc: this.getLocationFromList(children),
|
||||
children
|
||||
};
|
||||
break;
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'SelectorList',
|
||||
loc: this.getLocationFromList(children),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.children(node, () => this.token(types.Comma, ','));
|
||||
this.children(node, () => this.token(types.Comma, ','));
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
14
node_modules/css-tree/cjs/syntax/node/String.cjs
generated
vendored
14
node_modules/css-tree/cjs/syntax/node/String.cjs
generated
vendored
@ -5,19 +5,19 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'String';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
return {
|
||||
type: 'String',
|
||||
loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
value: string.decode(this.consume(types.String))
|
||||
};
|
||||
return {
|
||||
type: 'String',
|
||||
loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
value: string.decode(this.consume(types.String)),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.String, string.encode(node.value));
|
||||
this.token(types.String, string.encode(node.value));
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
97
node_modules/css-tree/cjs/syntax/node/StyleSheet.cjs
generated
vendored
97
node_modules/css-tree/cjs/syntax/node/StyleSheet.cjs
generated
vendored
@ -5,75 +5,68 @@ const types = require('../../tokenizer/types.cjs');
|
||||
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
|
||||
|
||||
function consumeRaw(startToken) {
|
||||
return this.Raw(startToken, null, false);
|
||||
return this.Raw(startToken, null, false);
|
||||
}
|
||||
|
||||
const name = 'StyleSheet';
|
||||
const walkContext = 'stylesheet';
|
||||
const structure = {
|
||||
children: [[
|
||||
'Comment',
|
||||
'CDO',
|
||||
'CDC',
|
||||
'Atrule',
|
||||
'Rule',
|
||||
'Raw'
|
||||
]]
|
||||
children: [['Comment', 'CDO', 'CDC', 'Atrule', 'Rule', 'Raw']],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const children = this.createList();
|
||||
let child;
|
||||
const start = this.tokenStart;
|
||||
const children = this.createList();
|
||||
let child;
|
||||
|
||||
while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.WhiteSpace:
|
||||
this.next();
|
||||
continue;
|
||||
while (!this.eof) {
|
||||
switch (this.tokenType) {
|
||||
case types.WhiteSpace:
|
||||
this.next();
|
||||
continue;
|
||||
|
||||
case types.Comment:
|
||||
// ignore comments except exclamation comments (i.e. /*! .. */) on top level
|
||||
if (this.charCodeAt(this.tokenStart + 2) !== EXCLAMATIONMARK) {
|
||||
this.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
child = this.Comment();
|
||||
break;
|
||||
|
||||
case types.CDO: // <!--
|
||||
child = this.CDO();
|
||||
break;
|
||||
|
||||
case types.CDC: // -->
|
||||
child = this.CDC();
|
||||
break;
|
||||
|
||||
// CSS Syntax Module Level 3
|
||||
// §2.2 Error handling
|
||||
// At the "top level" of a stylesheet, an <at-keyword-token> starts an at-rule.
|
||||
case types.AtKeyword:
|
||||
child = this.parseWithFallback(this.Atrule, consumeRaw);
|
||||
break;
|
||||
|
||||
// Anything else starts a qualified rule ...
|
||||
default:
|
||||
child = this.parseWithFallback(this.Rule, consumeRaw);
|
||||
case types.Comment:
|
||||
// ignore comments except exclamation comments (i.e. /*! .. */) on top level
|
||||
if (this.charCodeAt(this.tokenStart + 2) !== EXCLAMATIONMARK) {
|
||||
this.next();
|
||||
continue;
|
||||
}
|
||||
|
||||
children.push(child);
|
||||
child = this.Comment();
|
||||
break;
|
||||
|
||||
case types.CDO: // <!--
|
||||
child = this.CDO();
|
||||
break;
|
||||
|
||||
case types.CDC: // -->
|
||||
child = this.CDC();
|
||||
break;
|
||||
|
||||
// CSS Syntax Module Level 3
|
||||
// §2.2 Error handling
|
||||
// At the "top level" of a stylesheet, an <at-keyword-token> starts an at-rule.
|
||||
case types.AtKeyword:
|
||||
child = this.parseWithFallback(this.Atrule, consumeRaw);
|
||||
break;
|
||||
|
||||
// Anything else starts a qualified rule ...
|
||||
default:
|
||||
child = this.parseWithFallback(this.Rule, consumeRaw);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'StyleSheet',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children
|
||||
};
|
||||
children.push(child);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'StyleSheet',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.children(node);
|
||||
this.children(node);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
47
node_modules/css-tree/cjs/syntax/node/TypeSelector.cjs
generated
vendored
47
node_modules/css-tree/cjs/syntax/node/TypeSelector.cjs
generated
vendored
@ -2,21 +2,20 @@
|
||||
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
|
||||
const VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
|
||||
const ASTERISK = 0x002a; // U+002A ASTERISK (*)
|
||||
const VERTICALLINE = 0x007c; // U+007C VERTICAL LINE (|)
|
||||
|
||||
function eatIdentifierOrAsterisk() {
|
||||
if (this.tokenType !== types.Ident &&
|
||||
this.isDelim(ASTERISK) === false) {
|
||||
this.error('Identifier or asterisk is expected');
|
||||
}
|
||||
if (this.tokenType !== types.Ident && this.isDelim(ASTERISK) === false) {
|
||||
this.error('Identifier or asterisk is expected');
|
||||
}
|
||||
|
||||
this.next();
|
||||
this.next();
|
||||
}
|
||||
|
||||
const name = 'TypeSelector';
|
||||
const structure = {
|
||||
name: String
|
||||
name: String,
|
||||
};
|
||||
|
||||
// ident
|
||||
@ -28,29 +27,29 @@ const structure = {
|
||||
// |ident
|
||||
// |*
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const start = this.tokenStart;
|
||||
|
||||
if (this.isDelim(VERTICALLINE)) {
|
||||
this.next();
|
||||
eatIdentifierOrAsterisk.call(this);
|
||||
} else {
|
||||
eatIdentifierOrAsterisk.call(this);
|
||||
|
||||
if (this.isDelim(VERTICALLINE)) {
|
||||
this.next();
|
||||
eatIdentifierOrAsterisk.call(this);
|
||||
} else {
|
||||
eatIdentifierOrAsterisk.call(this);
|
||||
|
||||
if (this.isDelim(VERTICALLINE)) {
|
||||
this.next();
|
||||
eatIdentifierOrAsterisk.call(this);
|
||||
}
|
||||
this.next();
|
||||
eatIdentifierOrAsterisk.call(this);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'TypeSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name: this.substrToCursor(start)
|
||||
};
|
||||
return {
|
||||
type: 'TypeSelector',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
name: this.substrToCursor(start),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.tokenize(node.name);
|
||||
this.tokenize(node.name);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
180
node_modules/css-tree/cjs/syntax/node/UnicodeRange.cjs
generated
vendored
180
node_modules/css-tree/cjs/syntax/node/UnicodeRange.cjs
generated
vendored
@ -3,54 +3,58 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
const charCodeDefinitions = require('../../tokenizer/char-code-definitions.cjs');
|
||||
|
||||
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
|
||||
const QUESTIONMARK = 0x003F; // U+003F QUESTION MARK (?)
|
||||
const PLUSSIGN = 0x002b; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002d; // U+002D HYPHEN-MINUS (-)
|
||||
const QUESTIONMARK = 0x003f; // U+003F QUESTION MARK (?)
|
||||
|
||||
function eatHexSequence(offset, allowDash) {
|
||||
let len = 0;
|
||||
let len = 0;
|
||||
|
||||
for (let pos = this.tokenStart + offset; pos < this.tokenEnd; pos++) {
|
||||
const code = this.charCodeAt(pos);
|
||||
for (let pos = this.tokenStart + offset; pos < this.tokenEnd; pos++) {
|
||||
const code = this.charCodeAt(pos);
|
||||
|
||||
if (code === HYPHENMINUS && allowDash && len !== 0) {
|
||||
eatHexSequence.call(this, offset + len + 1, false);
|
||||
return -1;
|
||||
}
|
||||
if (code === HYPHENMINUS && allowDash && len !== 0) {
|
||||
eatHexSequence.call(this, offset + len + 1, false);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!charCodeDefinitions.isHexDigit(code)) {
|
||||
this.error(
|
||||
allowDash && len !== 0
|
||||
? 'Hyphen minus' + (len < 6 ? ' or hex digit' : '') + ' is expected'
|
||||
: (len < 6 ? 'Hex digit is expected' : 'Unexpected input'),
|
||||
pos
|
||||
);
|
||||
}
|
||||
if (!charCodeDefinitions.isHexDigit(code)) {
|
||||
this.error(
|
||||
allowDash && len !== 0 ?
|
||||
'Hyphen minus' + (len < 6 ? ' or hex digit' : '') + ' is expected'
|
||||
: len < 6 ? 'Hex digit is expected'
|
||||
: 'Unexpected input',
|
||||
pos
|
||||
);
|
||||
}
|
||||
|
||||
if (++len > 6) {
|
||||
this.error('Too many hex digits', pos);
|
||||
} }
|
||||
if (++len > 6) {
|
||||
this.error('Too many hex digits', pos);
|
||||
}
|
||||
}
|
||||
|
||||
this.next();
|
||||
return len;
|
||||
this.next();
|
||||
return len;
|
||||
}
|
||||
|
||||
function eatQuestionMarkSequence(max) {
|
||||
let count = 0;
|
||||
let count = 0;
|
||||
|
||||
while (this.isDelim(QUESTIONMARK)) {
|
||||
if (++count > max) {
|
||||
this.error('Too many question marks');
|
||||
}
|
||||
|
||||
this.next();
|
||||
while (this.isDelim(QUESTIONMARK)) {
|
||||
if (++count > max) {
|
||||
this.error('Too many question marks');
|
||||
}
|
||||
|
||||
this.next();
|
||||
}
|
||||
}
|
||||
|
||||
function startsWith(code) {
|
||||
if (this.charCodeAt(this.tokenStart) !== code) {
|
||||
this.error((code === PLUSSIGN ? 'Plus sign' : 'Hyphen minus') + ' is expected');
|
||||
}
|
||||
if (this.charCodeAt(this.tokenStart) !== code) {
|
||||
this.error(
|
||||
(code === PLUSSIGN ? 'Plus sign' : 'Hyphen minus') + ' is expected'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// https://drafts.csswg.org/css-syntax/#urange
|
||||
@ -73,83 +77,85 @@ function startsWith(code) {
|
||||
// u <number-token> <number-token> |
|
||||
// u '+' '?'+
|
||||
function scanUnicodeRange() {
|
||||
let hexLength = 0;
|
||||
let hexLength = 0;
|
||||
|
||||
switch (this.tokenType) {
|
||||
case types.Number:
|
||||
// u <number-token> '?'*
|
||||
// u <number-token> <dimension-token>
|
||||
// u <number-token> <number-token>
|
||||
hexLength = eatHexSequence.call(this, 1, true);
|
||||
switch (this.tokenType) {
|
||||
case types.Number:
|
||||
// u <number-token> '?'*
|
||||
// u <number-token> <dimension-token>
|
||||
// u <number-token> <number-token>
|
||||
hexLength = eatHexSequence.call(this, 1, true);
|
||||
|
||||
if (this.isDelim(QUESTIONMARK)) {
|
||||
eatQuestionMarkSequence.call(this, 6 - hexLength);
|
||||
break;
|
||||
}
|
||||
if (this.isDelim(QUESTIONMARK)) {
|
||||
eatQuestionMarkSequence.call(this, 6 - hexLength);
|
||||
break;
|
||||
}
|
||||
|
||||
if (this.tokenType === types.Dimension ||
|
||||
this.tokenType === types.Number) {
|
||||
startsWith.call(this, HYPHENMINUS);
|
||||
eatHexSequence.call(this, 1, false);
|
||||
break;
|
||||
}
|
||||
if (
|
||||
this.tokenType === types.Dimension ||
|
||||
this.tokenType === types.Number
|
||||
) {
|
||||
startsWith.call(this, HYPHENMINUS);
|
||||
eatHexSequence.call(this, 1, false);
|
||||
break;
|
||||
}
|
||||
|
||||
break;
|
||||
break;
|
||||
|
||||
case types.Dimension:
|
||||
// u <dimension-token> '?'*
|
||||
hexLength = eatHexSequence.call(this, 1, true);
|
||||
case types.Dimension:
|
||||
// u <dimension-token> '?'*
|
||||
hexLength = eatHexSequence.call(this, 1, true);
|
||||
|
||||
if (hexLength > 0) {
|
||||
eatQuestionMarkSequence.call(this, 6 - hexLength);
|
||||
}
|
||||
if (hexLength > 0) {
|
||||
eatQuestionMarkSequence.call(this, 6 - hexLength);
|
||||
}
|
||||
|
||||
break;
|
||||
break;
|
||||
|
||||
default:
|
||||
// u '+' <ident-token> '?'*
|
||||
// u '+' '?'+
|
||||
this.eatDelim(PLUSSIGN);
|
||||
default:
|
||||
// u '+' <ident-token> '?'*
|
||||
// u '+' '?'+
|
||||
this.eatDelim(PLUSSIGN);
|
||||
|
||||
if (this.tokenType === types.Ident) {
|
||||
hexLength = eatHexSequence.call(this, 0, true);
|
||||
if (hexLength > 0) {
|
||||
eatQuestionMarkSequence.call(this, 6 - hexLength);
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (this.tokenType === types.Ident) {
|
||||
hexLength = eatHexSequence.call(this, 0, true);
|
||||
if (hexLength > 0) {
|
||||
eatQuestionMarkSequence.call(this, 6 - hexLength);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (this.isDelim(QUESTIONMARK)) {
|
||||
this.next();
|
||||
eatQuestionMarkSequence.call(this, 5);
|
||||
break;
|
||||
}
|
||||
if (this.isDelim(QUESTIONMARK)) {
|
||||
this.next();
|
||||
eatQuestionMarkSequence.call(this, 5);
|
||||
break;
|
||||
}
|
||||
|
||||
this.error('Hex digit or question mark is expected');
|
||||
}
|
||||
this.error('Hex digit or question mark is expected');
|
||||
}
|
||||
}
|
||||
|
||||
const name = 'UnicodeRange';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const start = this.tokenStart;
|
||||
|
||||
// U or u
|
||||
this.eatIdent('u');
|
||||
scanUnicodeRange.call(this);
|
||||
// U or u
|
||||
this.eatIdent('u');
|
||||
scanUnicodeRange.call(this);
|
||||
|
||||
return {
|
||||
type: 'UnicodeRange',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value: this.substrToCursor(start)
|
||||
};
|
||||
return {
|
||||
type: 'UnicodeRange',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value: this.substrToCursor(start),
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.tokenize(node.value);
|
||||
this.tokenize(node.value);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
56
node_modules/css-tree/cjs/syntax/node/Url.cjs
generated
vendored
56
node_modules/css-tree/cjs/syntax/node/Url.cjs
generated
vendored
@ -6,46 +6,46 @@ const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const name = 'Url';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
|
||||
// <url-token> | <function-token> <string> )
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
let value;
|
||||
const start = this.tokenStart;
|
||||
let value;
|
||||
|
||||
switch (this.tokenType) {
|
||||
case types.Url:
|
||||
value = url.decode(this.consume(types.Url));
|
||||
break;
|
||||
switch (this.tokenType) {
|
||||
case types.Url:
|
||||
value = url.decode(this.consume(types.Url));
|
||||
break;
|
||||
|
||||
case types.Function:
|
||||
if (!this.cmpStr(this.tokenStart, this.tokenEnd, 'url(')) {
|
||||
this.error('Function name must be `url`');
|
||||
}
|
||||
case types.Function:
|
||||
if (!this.cmpStr(this.tokenStart, this.tokenEnd, 'url(')) {
|
||||
this.error('Function name must be `url`');
|
||||
}
|
||||
|
||||
this.eat(types.Function);
|
||||
this.skipSC();
|
||||
value = string.decode(this.consume(types.String));
|
||||
this.skipSC();
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightParenthesis);
|
||||
}
|
||||
break;
|
||||
this.eat(types.Function);
|
||||
this.skipSC();
|
||||
value = string.decode(this.consume(types.String));
|
||||
this.skipSC();
|
||||
if (!this.eof) {
|
||||
this.eat(types.RightParenthesis);
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
this.error('Url or Function is expected');
|
||||
}
|
||||
default:
|
||||
this.error('Url or Function is expected');
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'Url',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value
|
||||
};
|
||||
return {
|
||||
type: 'Url',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
value,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.Url, url.encode(node.value));
|
||||
this.token(types.Url, url.encode(node.value));
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
18
node_modules/css-tree/cjs/syntax/node/Value.cjs
generated
vendored
18
node_modules/css-tree/cjs/syntax/node/Value.cjs
generated
vendored
@ -2,22 +2,22 @@
|
||||
|
||||
const name = 'Value';
|
||||
const structure = {
|
||||
children: [[]]
|
||||
children: [[]],
|
||||
};
|
||||
|
||||
function parse() {
|
||||
const start = this.tokenStart;
|
||||
const children = this.readSequence(this.scope.Value);
|
||||
const start = this.tokenStart;
|
||||
const children = this.readSequence(this.scope.Value);
|
||||
|
||||
return {
|
||||
type: 'Value',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children
|
||||
};
|
||||
return {
|
||||
type: 'Value',
|
||||
loc: this.getLocation(start, this.tokenStart),
|
||||
children,
|
||||
};
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.children(node);
|
||||
this.children(node);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
24
node_modules/css-tree/cjs/syntax/node/WhiteSpace.cjs
generated
vendored
24
node_modules/css-tree/cjs/syntax/node/WhiteSpace.cjs
generated
vendored
@ -3,29 +3,29 @@
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const SPACE = Object.freeze({
|
||||
type: 'WhiteSpace',
|
||||
loc: null,
|
||||
value: ' '
|
||||
type: 'WhiteSpace',
|
||||
loc: null,
|
||||
value: ' ',
|
||||
});
|
||||
|
||||
const name = 'WhiteSpace';
|
||||
const structure = {
|
||||
value: String
|
||||
value: String,
|
||||
};
|
||||
|
||||
function parse() {
|
||||
this.eat(types.WhiteSpace);
|
||||
return SPACE;
|
||||
this.eat(types.WhiteSpace);
|
||||
return SPACE;
|
||||
|
||||
// return {
|
||||
// type: 'WhiteSpace',
|
||||
// loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
// value: this.consume(WHITESPACE)
|
||||
// };
|
||||
// return {
|
||||
// type: 'WhiteSpace',
|
||||
// loc: this.getLocation(this.tokenStart, this.tokenEnd),
|
||||
// value: this.consume(WHITESPACE)
|
||||
// };
|
||||
}
|
||||
|
||||
function generate(node) {
|
||||
this.token(types.WhiteSpace, node.value);
|
||||
this.token(types.WhiteSpace, node.value);
|
||||
}
|
||||
|
||||
exports.generate = generate;
|
||||
|
2
node_modules/css-tree/cjs/syntax/node/index-generate.cjs
generated
vendored
2
node_modules/css-tree/cjs/syntax/node/index-generate.cjs
generated
vendored
@ -41,8 +41,6 @@ const Url = require('./Url.cjs');
|
||||
const Value = require('./Value.cjs');
|
||||
const WhiteSpace = require('./WhiteSpace.cjs');
|
||||
|
||||
|
||||
|
||||
exports.AnPlusB = AnPlusB.generate;
|
||||
exports.Atrule = Atrule.generate;
|
||||
exports.AtrulePrelude = AtrulePrelude.generate;
|
||||
|
2
node_modules/css-tree/cjs/syntax/node/index-parse-selector.cjs
generated
vendored
2
node_modules/css-tree/cjs/syntax/node/index-parse-selector.cjs
generated
vendored
@ -16,8 +16,6 @@ const SelectorList = require('./SelectorList.cjs');
|
||||
const String = require('./String.cjs');
|
||||
const TypeSelector = require('./TypeSelector.cjs');
|
||||
|
||||
|
||||
|
||||
exports.AnPlusB = AnPlusB.parse;
|
||||
exports.AttributeSelector = AttributeSelector.parse;
|
||||
exports.ClassSelector = ClassSelector.parse;
|
||||
|
2
node_modules/css-tree/cjs/syntax/node/index-parse.cjs
generated
vendored
2
node_modules/css-tree/cjs/syntax/node/index-parse.cjs
generated
vendored
@ -41,8 +41,6 @@ const Url = require('./Url.cjs');
|
||||
const Value = require('./Value.cjs');
|
||||
const WhiteSpace = require('./WhiteSpace.cjs');
|
||||
|
||||
|
||||
|
||||
exports.AnPlusB = AnPlusB.parse;
|
||||
exports.Atrule = Atrule.parse;
|
||||
exports.AtrulePrelude = AtrulePrelude.parse;
|
||||
|
2
node_modules/css-tree/cjs/syntax/node/index.cjs
generated
vendored
2
node_modules/css-tree/cjs/syntax/node/index.cjs
generated
vendored
@ -41,8 +41,6 @@ const Url = require('./Url.cjs');
|
||||
const Value = require('./Value.cjs');
|
||||
const WhiteSpace = require('./WhiteSpace.cjs');
|
||||
|
||||
|
||||
|
||||
exports.AnPlusB = AnPlusB;
|
||||
exports.Atrule = Atrule;
|
||||
exports.AtrulePrelude = AtrulePrelude;
|
||||
|
60
node_modules/css-tree/cjs/syntax/pseudo/index.cjs
generated
vendored
60
node_modules/css-tree/cjs/syntax/pseudo/index.cjs
generated
vendored
@ -1,52 +1,44 @@
|
||||
'use strict';
|
||||
|
||||
const selectorList = {
|
||||
parse() {
|
||||
return this.createSingleNodeList(
|
||||
this.SelectorList()
|
||||
);
|
||||
}
|
||||
parse() {
|
||||
return this.createSingleNodeList(this.SelectorList());
|
||||
},
|
||||
};
|
||||
|
||||
const selector = {
|
||||
parse() {
|
||||
return this.createSingleNodeList(
|
||||
this.Selector()
|
||||
);
|
||||
}
|
||||
parse() {
|
||||
return this.createSingleNodeList(this.Selector());
|
||||
},
|
||||
};
|
||||
|
||||
const identList = {
|
||||
parse() {
|
||||
return this.createSingleNodeList(
|
||||
this.Identifier()
|
||||
);
|
||||
}
|
||||
parse() {
|
||||
return this.createSingleNodeList(this.Identifier());
|
||||
},
|
||||
};
|
||||
|
||||
const nth = {
|
||||
parse() {
|
||||
return this.createSingleNodeList(
|
||||
this.Nth()
|
||||
);
|
||||
}
|
||||
parse() {
|
||||
return this.createSingleNodeList(this.Nth());
|
||||
},
|
||||
};
|
||||
|
||||
const pseudo = {
|
||||
'dir': identList,
|
||||
'has': selectorList,
|
||||
'lang': identList,
|
||||
'matches': selectorList,
|
||||
'is': selectorList,
|
||||
'-moz-any': selectorList,
|
||||
'-webkit-any': selectorList,
|
||||
'where': selectorList,
|
||||
'not': selectorList,
|
||||
'nth-child': nth,
|
||||
'nth-last-child': nth,
|
||||
'nth-last-of-type': nth,
|
||||
'nth-of-type': nth,
|
||||
'slotted': selector
|
||||
dir: identList,
|
||||
has: selectorList,
|
||||
lang: identList,
|
||||
matches: selectorList,
|
||||
is: selectorList,
|
||||
'-moz-any': selectorList,
|
||||
'-webkit-any': selectorList,
|
||||
where: selectorList,
|
||||
not: selectorList,
|
||||
'nth-child': nth,
|
||||
'nth-last-child': nth,
|
||||
'nth-last-of-type': nth,
|
||||
'nth-of-type': nth,
|
||||
slotted: selector,
|
||||
};
|
||||
|
||||
module.exports = pseudo;
|
||||
|
2
node_modules/css-tree/cjs/syntax/scope/atrulePrelude.cjs
generated
vendored
2
node_modules/css-tree/cjs/syntax/scope/atrulePrelude.cjs
generated
vendored
@ -3,7 +3,7 @@
|
||||
const _default = require('./default.cjs');
|
||||
|
||||
const atrulePrelude = {
|
||||
getNode: _default
|
||||
getNode: _default,
|
||||
};
|
||||
|
||||
module.exports = atrulePrelude;
|
||||
|
106
node_modules/css-tree/cjs/syntax/scope/default.cjs
generated
vendored
106
node_modules/css-tree/cjs/syntax/scope/default.cjs
generated
vendored
@ -2,75 +2,79 @@
|
||||
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
|
||||
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
|
||||
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
|
||||
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
|
||||
const U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
|
||||
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
|
||||
const ASTERISK = 0x002a; // U+002A ASTERISK (*)
|
||||
const PLUSSIGN = 0x002b; // U+002B PLUS SIGN (+)
|
||||
const HYPHENMINUS = 0x002d; // U+002D HYPHEN-MINUS (-)
|
||||
const SOLIDUS = 0x002f; // U+002F SOLIDUS (/)
|
||||
const U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
|
||||
|
||||
function defaultRecognizer(context) {
|
||||
switch (this.tokenType) {
|
||||
case types.Hash:
|
||||
return this.Hash();
|
||||
switch (this.tokenType) {
|
||||
case types.Hash:
|
||||
return this.Hash();
|
||||
|
||||
case types.Comma:
|
||||
return this.Operator();
|
||||
case types.Comma:
|
||||
return this.Operator();
|
||||
|
||||
case types.LeftParenthesis:
|
||||
return this.Parentheses(this.readSequence, context.recognizer);
|
||||
case types.LeftParenthesis:
|
||||
return this.Parentheses(this.readSequence, context.recognizer);
|
||||
|
||||
case types.LeftSquareBracket:
|
||||
return this.Brackets(this.readSequence, context.recognizer);
|
||||
case types.LeftSquareBracket:
|
||||
return this.Brackets(this.readSequence, context.recognizer);
|
||||
|
||||
case types.String:
|
||||
return this.String();
|
||||
case types.String:
|
||||
return this.String();
|
||||
|
||||
case types.Dimension:
|
||||
return this.Dimension();
|
||||
case types.Dimension:
|
||||
return this.Dimension();
|
||||
|
||||
case types.Percentage:
|
||||
return this.Percentage();
|
||||
case types.Percentage:
|
||||
return this.Percentage();
|
||||
|
||||
case types.Number:
|
||||
return this.Number();
|
||||
case types.Number:
|
||||
return this.Number();
|
||||
|
||||
case types.Function:
|
||||
return this.cmpStr(this.tokenStart, this.tokenEnd, 'url(')
|
||||
? this.Url()
|
||||
: this.Function(this.readSequence, context.recognizer);
|
||||
case types.Function:
|
||||
return this.cmpStr(this.tokenStart, this.tokenEnd, 'url(') ?
|
||||
this.Url()
|
||||
: this.Function(this.readSequence, context.recognizer);
|
||||
|
||||
case types.Url:
|
||||
return this.Url();
|
||||
case types.Url:
|
||||
return this.Url();
|
||||
|
||||
case types.Ident:
|
||||
// check for unicode range, it should start with u+ or U+
|
||||
if (this.cmpChar(this.tokenStart, U) &&
|
||||
this.cmpChar(this.tokenStart + 1, PLUSSIGN)) {
|
||||
return this.UnicodeRange();
|
||||
} else {
|
||||
return this.Identifier();
|
||||
}
|
||||
case types.Ident:
|
||||
// check for unicode range, it should start with u+ or U+
|
||||
if (
|
||||
this.cmpChar(this.tokenStart, U) &&
|
||||
this.cmpChar(this.tokenStart + 1, PLUSSIGN)
|
||||
) {
|
||||
return this.UnicodeRange();
|
||||
} else {
|
||||
return this.Identifier();
|
||||
}
|
||||
|
||||
case types.Delim: {
|
||||
const code = this.charCodeAt(this.tokenStart);
|
||||
case types.Delim: {
|
||||
const code = this.charCodeAt(this.tokenStart);
|
||||
|
||||
if (code === SOLIDUS ||
|
||||
code === ASTERISK ||
|
||||
code === PLUSSIGN ||
|
||||
code === HYPHENMINUS) {
|
||||
return this.Operator(); // TODO: replace with Delim
|
||||
}
|
||||
if (
|
||||
code === SOLIDUS ||
|
||||
code === ASTERISK ||
|
||||
code === PLUSSIGN ||
|
||||
code === HYPHENMINUS
|
||||
) {
|
||||
return this.Operator(); // TODO: replace with Delim
|
||||
}
|
||||
|
||||
// TODO: produce a node with Delim node type
|
||||
// TODO: produce a node with Delim node type
|
||||
|
||||
if (code === NUMBERSIGN) {
|
||||
this.error('Hex or identifier is expected', this.tokenStart + 1);
|
||||
}
|
||||
if (code === NUMBERSIGN) {
|
||||
this.error('Hex or identifier is expected', this.tokenStart + 1);
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = defaultRecognizer;
|
||||
|
2
node_modules/css-tree/cjs/syntax/scope/index.cjs
generated
vendored
2
node_modules/css-tree/cjs/syntax/scope/index.cjs
generated
vendored
@ -4,8 +4,6 @@ const atrulePrelude = require('./atrulePrelude.cjs');
|
||||
const selector = require('./selector.cjs');
|
||||
const value = require('./value.cjs');
|
||||
|
||||
|
||||
|
||||
exports.AtrulePrelude = atrulePrelude;
|
||||
exports.Selector = selector;
|
||||
exports.Value = value;
|
||||
|
121
node_modules/css-tree/cjs/syntax/scope/selector.cjs
generated
vendored
121
node_modules/css-tree/cjs/syntax/scope/selector.cjs
generated
vendored
@ -2,83 +2,88 @@
|
||||
|
||||
const types = require('../../tokenizer/types.cjs');
|
||||
|
||||
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
|
||||
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
|
||||
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
|
||||
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
|
||||
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
|
||||
const GREATERTHANSIGN = 0x003E; // U+003E GREATER-THAN SIGN (>)
|
||||
const VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
|
||||
const TILDE = 0x007E; // U+007E TILDE (~)
|
||||
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
|
||||
const ASTERISK = 0x002a; // U+002A ASTERISK (*)
|
||||
const PLUSSIGN = 0x002b; // U+002B PLUS SIGN (+)
|
||||
const SOLIDUS = 0x002f; // U+002F SOLIDUS (/)
|
||||
const FULLSTOP = 0x002e; // U+002E FULL STOP (.)
|
||||
const GREATERTHANSIGN = 0x003e; // U+003E GREATER-THAN SIGN (>)
|
||||
const VERTICALLINE = 0x007c; // U+007C VERTICAL LINE (|)
|
||||
const TILDE = 0x007e; // U+007E TILDE (~)
|
||||
|
||||
function onWhiteSpace(next, children) {
|
||||
if (children.last !== null && children.last.type !== 'Combinator' &&
|
||||
next !== null && next.type !== 'Combinator') {
|
||||
children.push({ // FIXME: this.Combinator() should be used instead
|
||||
type: 'Combinator',
|
||||
loc: null,
|
||||
name: ' '
|
||||
});
|
||||
}
|
||||
if (
|
||||
children.last !== null &&
|
||||
children.last.type !== 'Combinator' &&
|
||||
next !== null &&
|
||||
next.type !== 'Combinator'
|
||||
) {
|
||||
children.push({
|
||||
// FIXME: this.Combinator() should be used instead
|
||||
type: 'Combinator',
|
||||
loc: null,
|
||||
name: ' ',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getNode() {
|
||||
switch (this.tokenType) {
|
||||
case types.LeftSquareBracket:
|
||||
return this.AttributeSelector();
|
||||
switch (this.tokenType) {
|
||||
case types.LeftSquareBracket:
|
||||
return this.AttributeSelector();
|
||||
|
||||
case types.Hash:
|
||||
return this.IdSelector();
|
||||
case types.Hash:
|
||||
return this.IdSelector();
|
||||
|
||||
case types.Colon:
|
||||
if (this.lookupType(1) === types.Colon) {
|
||||
return this.PseudoElementSelector();
|
||||
} else {
|
||||
return this.PseudoClassSelector();
|
||||
}
|
||||
case types.Colon:
|
||||
if (this.lookupType(1) === types.Colon) {
|
||||
return this.PseudoElementSelector();
|
||||
} else {
|
||||
return this.PseudoClassSelector();
|
||||
}
|
||||
|
||||
case types.Ident:
|
||||
return this.TypeSelector();
|
||||
case types.Ident:
|
||||
return this.TypeSelector();
|
||||
|
||||
case types.Number:
|
||||
case types.Percentage:
|
||||
return this.Percentage();
|
||||
case types.Number:
|
||||
case types.Percentage:
|
||||
return this.Percentage();
|
||||
|
||||
case types.Dimension:
|
||||
// throws when .123ident
|
||||
if (this.charCodeAt(this.tokenStart) === FULLSTOP) {
|
||||
this.error('Identifier is expected', this.tokenStart + 1);
|
||||
}
|
||||
break;
|
||||
case types.Dimension:
|
||||
// throws when .123ident
|
||||
if (this.charCodeAt(this.tokenStart) === FULLSTOP) {
|
||||
this.error('Identifier is expected', this.tokenStart + 1);
|
||||
}
|
||||
break;
|
||||
|
||||
case types.Delim: {
|
||||
const code = this.charCodeAt(this.tokenStart);
|
||||
case types.Delim: {
|
||||
const code = this.charCodeAt(this.tokenStart);
|
||||
|
||||
switch (code) {
|
||||
case PLUSSIGN:
|
||||
case GREATERTHANSIGN:
|
||||
case TILDE:
|
||||
case SOLIDUS: // /deep/
|
||||
return this.Combinator();
|
||||
switch (code) {
|
||||
case PLUSSIGN:
|
||||
case GREATERTHANSIGN:
|
||||
case TILDE:
|
||||
case SOLIDUS: // /deep/
|
||||
return this.Combinator();
|
||||
|
||||
case FULLSTOP:
|
||||
return this.ClassSelector();
|
||||
case FULLSTOP:
|
||||
return this.ClassSelector();
|
||||
|
||||
case ASTERISK:
|
||||
case VERTICALLINE:
|
||||
return this.TypeSelector();
|
||||
case ASTERISK:
|
||||
case VERTICALLINE:
|
||||
return this.TypeSelector();
|
||||
|
||||
case NUMBERSIGN:
|
||||
return this.IdSelector();
|
||||
}
|
||||
case NUMBERSIGN:
|
||||
return this.IdSelector();
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
const Selector = {
|
||||
onWhiteSpace,
|
||||
getNode
|
||||
onWhiteSpace,
|
||||
getNode,
|
||||
};
|
||||
|
||||
module.exports = Selector;
|
||||
|
33
node_modules/css-tree/cjs/syntax/scope/value.cjs
generated
vendored
33
node_modules/css-tree/cjs/syntax/scope/value.cjs
generated
vendored
@ -5,25 +5,26 @@ const expression = require('../function/expression.cjs');
|
||||
const _var = require('../function/var.cjs');
|
||||
|
||||
function isPlusMinusOperator(node) {
|
||||
return (
|
||||
node !== null &&
|
||||
node.type === 'Operator' &&
|
||||
(node.value[node.value.length - 1] === '-' || node.value[node.value.length - 1] === '+')
|
||||
);
|
||||
return (
|
||||
node !== null &&
|
||||
node.type === 'Operator' &&
|
||||
(node.value[node.value.length - 1] === '-' ||
|
||||
node.value[node.value.length - 1] === '+')
|
||||
);
|
||||
}
|
||||
|
||||
const value = {
|
||||
getNode: _default,
|
||||
onWhiteSpace(next, children) {
|
||||
if (isPlusMinusOperator(next)) {
|
||||
next.value = ' ' + next.value;
|
||||
}
|
||||
if (isPlusMinusOperator(children.last)) {
|
||||
children.last.value += ' ';
|
||||
}
|
||||
},
|
||||
'expression': expression,
|
||||
'var': _var
|
||||
getNode: _default,
|
||||
onWhiteSpace(next, children) {
|
||||
if (isPlusMinusOperator(next)) {
|
||||
next.value = ' ' + next.value;
|
||||
}
|
||||
if (isPlusMinusOperator(children.last)) {
|
||||
children.last.value += ' ';
|
||||
}
|
||||
},
|
||||
expression: expression,
|
||||
var: _var,
|
||||
};
|
||||
|
||||
module.exports = value;
|
||||
|
133
node_modules/css-tree/cjs/tokenizer/OffsetToLocation.cjs
generated
vendored
133
node_modules/css-tree/cjs/tokenizer/OffsetToLocation.cjs
generated
vendored
@ -8,84 +8,89 @@ const F = 12;
|
||||
const R = 13;
|
||||
|
||||
function computeLinesAndColumns(host) {
|
||||
const source = host.source;
|
||||
const sourceLength = source.length;
|
||||
const startOffset = source.length > 0 ? charCodeDefinitions.isBOM(source.charCodeAt(0)) : 0;
|
||||
const lines = adoptBuffer.adoptBuffer(host.lines, sourceLength);
|
||||
const columns = adoptBuffer.adoptBuffer(host.columns, sourceLength);
|
||||
let line = host.startLine;
|
||||
let column = host.startColumn;
|
||||
const source = host.source;
|
||||
const sourceLength = source.length;
|
||||
const startOffset =
|
||||
source.length > 0 ? charCodeDefinitions.isBOM(source.charCodeAt(0)) : 0;
|
||||
const lines = adoptBuffer.adoptBuffer(host.lines, sourceLength);
|
||||
const columns = adoptBuffer.adoptBuffer(host.columns, sourceLength);
|
||||
let line = host.startLine;
|
||||
let column = host.startColumn;
|
||||
|
||||
for (let i = startOffset; i < sourceLength; i++) {
|
||||
const code = source.charCodeAt(i);
|
||||
for (let i = startOffset; i < sourceLength; i++) {
|
||||
const code = source.charCodeAt(i);
|
||||
|
||||
lines[i] = line;
|
||||
columns[i] = column++;
|
||||
|
||||
if (code === N || code === R || code === F) {
|
||||
if (
|
||||
code === R &&
|
||||
i + 1 < sourceLength &&
|
||||
source.charCodeAt(i + 1) === N
|
||||
) {
|
||||
i++;
|
||||
lines[i] = line;
|
||||
columns[i] = column++;
|
||||
columns[i] = column;
|
||||
}
|
||||
|
||||
if (code === N || code === R || code === F) {
|
||||
if (code === R && i + 1 < sourceLength && source.charCodeAt(i + 1) === N) {
|
||||
i++;
|
||||
lines[i] = line;
|
||||
columns[i] = column;
|
||||
}
|
||||
|
||||
line++;
|
||||
column = 1;
|
||||
}
|
||||
line++;
|
||||
column = 1;
|
||||
}
|
||||
}
|
||||
|
||||
lines[sourceLength] = line;
|
||||
columns[sourceLength] = column;
|
||||
lines[sourceLength] = line;
|
||||
columns[sourceLength] = column;
|
||||
|
||||
host.lines = lines;
|
||||
host.columns = columns;
|
||||
host.computed = true;
|
||||
host.lines = lines;
|
||||
host.columns = columns;
|
||||
host.computed = true;
|
||||
}
|
||||
|
||||
class OffsetToLocation {
|
||||
constructor() {
|
||||
this.lines = null;
|
||||
this.columns = null;
|
||||
this.computed = false;
|
||||
constructor() {
|
||||
this.lines = null;
|
||||
this.columns = null;
|
||||
this.computed = false;
|
||||
}
|
||||
setSource(source, startOffset = 0, startLine = 1, startColumn = 1) {
|
||||
this.source = source;
|
||||
this.startOffset = startOffset;
|
||||
this.startLine = startLine;
|
||||
this.startColumn = startColumn;
|
||||
this.computed = false;
|
||||
}
|
||||
getLocation(offset, filename) {
|
||||
if (!this.computed) {
|
||||
computeLinesAndColumns(this);
|
||||
}
|
||||
setSource(source, startOffset = 0, startLine = 1, startColumn = 1) {
|
||||
this.source = source;
|
||||
this.startOffset = startOffset;
|
||||
this.startLine = startLine;
|
||||
this.startColumn = startColumn;
|
||||
this.computed = false;
|
||||
}
|
||||
getLocation(offset, filename) {
|
||||
if (!this.computed) {
|
||||
computeLinesAndColumns(this);
|
||||
}
|
||||
|
||||
return {
|
||||
source: filename,
|
||||
offset: this.startOffset + offset,
|
||||
line: this.lines[offset],
|
||||
column: this.columns[offset]
|
||||
};
|
||||
return {
|
||||
source: filename,
|
||||
offset: this.startOffset + offset,
|
||||
line: this.lines[offset],
|
||||
column: this.columns[offset],
|
||||
};
|
||||
}
|
||||
getLocationRange(start, end, filename) {
|
||||
if (!this.computed) {
|
||||
computeLinesAndColumns(this);
|
||||
}
|
||||
getLocationRange(start, end, filename) {
|
||||
if (!this.computed) {
|
||||
computeLinesAndColumns(this);
|
||||
}
|
||||
|
||||
return {
|
||||
source: filename,
|
||||
start: {
|
||||
offset: this.startOffset + start,
|
||||
line: this.lines[start],
|
||||
column: this.columns[start]
|
||||
},
|
||||
end: {
|
||||
offset: this.startOffset + end,
|
||||
line: this.lines[end],
|
||||
column: this.columns[end]
|
||||
}
|
||||
};
|
||||
}
|
||||
return {
|
||||
source: filename,
|
||||
start: {
|
||||
offset: this.startOffset + start,
|
||||
line: this.lines[start],
|
||||
column: this.columns[start],
|
||||
},
|
||||
end: {
|
||||
offset: this.startOffset + end,
|
||||
line: this.lines[end],
|
||||
column: this.columns[end],
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
exports.OffsetToLocation = OffsetToLocation;
|
||||
|
458
node_modules/css-tree/cjs/tokenizer/TokenStream.cjs
generated
vendored
458
node_modules/css-tree/cjs/tokenizer/TokenStream.cjs
generated
vendored
@ -5,260 +5,268 @@ const utils = require('./utils.cjs');
|
||||
const names = require('./names.cjs');
|
||||
const types = require('./types.cjs');
|
||||
|
||||
const OFFSET_MASK = 0x00FFFFFF;
|
||||
const OFFSET_MASK = 0x00ffffff;
|
||||
const TYPE_SHIFT = 24;
|
||||
const balancePair = new Map([
|
||||
[types.Function, types.RightParenthesis],
|
||||
[types.LeftParenthesis, types.RightParenthesis],
|
||||
[types.LeftSquareBracket, types.RightSquareBracket],
|
||||
[types.LeftCurlyBracket, types.RightCurlyBracket]
|
||||
[types.Function, types.RightParenthesis],
|
||||
[types.LeftParenthesis, types.RightParenthesis],
|
||||
[types.LeftSquareBracket, types.RightSquareBracket],
|
||||
[types.LeftCurlyBracket, types.RightCurlyBracket],
|
||||
]);
|
||||
|
||||
class TokenStream {
|
||||
constructor(source, tokenize) {
|
||||
this.setSource(source, tokenize);
|
||||
}
|
||||
reset() {
|
||||
this.eof = false;
|
||||
this.tokenIndex = -1;
|
||||
this.tokenType = 0;
|
||||
this.tokenStart = this.firstCharOffset;
|
||||
this.tokenEnd = this.firstCharOffset;
|
||||
}
|
||||
setSource(source = '', tokenize = () => {}) {
|
||||
source = String(source || '');
|
||||
constructor(source, tokenize) {
|
||||
this.setSource(source, tokenize);
|
||||
}
|
||||
reset() {
|
||||
this.eof = false;
|
||||
this.tokenIndex = -1;
|
||||
this.tokenType = 0;
|
||||
this.tokenStart = this.firstCharOffset;
|
||||
this.tokenEnd = this.firstCharOffset;
|
||||
}
|
||||
setSource(source = '', tokenize = () => {}) {
|
||||
source = String(source || '');
|
||||
|
||||
const sourceLength = source.length;
|
||||
const offsetAndType = adoptBuffer.adoptBuffer(this.offsetAndType, source.length + 1); // +1 because of eof-token
|
||||
const balance = adoptBuffer.adoptBuffer(this.balance, source.length + 1);
|
||||
let tokenCount = 0;
|
||||
let balanceCloseType = 0;
|
||||
let balanceStart = 0;
|
||||
let firstCharOffset = -1;
|
||||
const sourceLength = source.length;
|
||||
const offsetAndType = adoptBuffer.adoptBuffer(
|
||||
this.offsetAndType,
|
||||
source.length + 1
|
||||
); // +1 because of eof-token
|
||||
const balance = adoptBuffer.adoptBuffer(this.balance, source.length + 1);
|
||||
let tokenCount = 0;
|
||||
let balanceCloseType = 0;
|
||||
let balanceStart = 0;
|
||||
let firstCharOffset = -1;
|
||||
|
||||
// capture buffers
|
||||
this.offsetAndType = null;
|
||||
this.balance = null;
|
||||
// capture buffers
|
||||
this.offsetAndType = null;
|
||||
this.balance = null;
|
||||
|
||||
tokenize(source, (type, start, end) => {
|
||||
switch (type) {
|
||||
default:
|
||||
balance[tokenCount] = sourceLength;
|
||||
break;
|
||||
tokenize(source, (type, start, end) => {
|
||||
switch (type) {
|
||||
default:
|
||||
balance[tokenCount] = sourceLength;
|
||||
break;
|
||||
|
||||
case balanceCloseType: {
|
||||
let balancePrev = balanceStart & OFFSET_MASK;
|
||||
balanceStart = balance[balancePrev];
|
||||
balanceCloseType = balanceStart >> TYPE_SHIFT;
|
||||
balance[tokenCount] = balancePrev;
|
||||
balance[balancePrev++] = tokenCount;
|
||||
for (; balancePrev < tokenCount; balancePrev++) {
|
||||
if (balance[balancePrev] === sourceLength) {
|
||||
balance[balancePrev] = tokenCount;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case types.LeftParenthesis:
|
||||
case types.Function:
|
||||
case types.LeftSquareBracket:
|
||||
case types.LeftCurlyBracket:
|
||||
balance[tokenCount] = balanceStart;
|
||||
balanceCloseType = balancePair.get(type);
|
||||
balanceStart = (balanceCloseType << TYPE_SHIFT) | tokenCount;
|
||||
break;
|
||||
case balanceCloseType: {
|
||||
let balancePrev = balanceStart & OFFSET_MASK;
|
||||
balanceStart = balance[balancePrev];
|
||||
balanceCloseType = balanceStart >> TYPE_SHIFT;
|
||||
balance[tokenCount] = balancePrev;
|
||||
balance[balancePrev++] = tokenCount;
|
||||
for (; balancePrev < tokenCount; balancePrev++) {
|
||||
if (balance[balancePrev] === sourceLength) {
|
||||
balance[balancePrev] = tokenCount;
|
||||
}
|
||||
|
||||
offsetAndType[tokenCount++] = (type << TYPE_SHIFT) | end;
|
||||
if (firstCharOffset === -1) {
|
||||
firstCharOffset = start;
|
||||
}
|
||||
});
|
||||
|
||||
// finalize buffers
|
||||
offsetAndType[tokenCount] = (types.EOF << TYPE_SHIFT) | sourceLength; // <EOF-token>
|
||||
balance[tokenCount] = sourceLength;
|
||||
balance[sourceLength] = sourceLength; // prevents false positive balance match with any token
|
||||
while (balanceStart !== 0) {
|
||||
const balancePrev = balanceStart & OFFSET_MASK;
|
||||
balanceStart = balance[balancePrev];
|
||||
balance[balancePrev] = sourceLength;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
this.source = source;
|
||||
this.firstCharOffset = firstCharOffset === -1 ? 0 : firstCharOffset;
|
||||
this.tokenCount = tokenCount;
|
||||
this.offsetAndType = offsetAndType;
|
||||
this.balance = balance;
|
||||
case types.LeftParenthesis:
|
||||
case types.Function:
|
||||
case types.LeftSquareBracket:
|
||||
case types.LeftCurlyBracket:
|
||||
balance[tokenCount] = balanceStart;
|
||||
balanceCloseType = balancePair.get(type);
|
||||
balanceStart = (balanceCloseType << TYPE_SHIFT) | tokenCount;
|
||||
break;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
this.next();
|
||||
offsetAndType[tokenCount++] = (type << TYPE_SHIFT) | end;
|
||||
if (firstCharOffset === -1) {
|
||||
firstCharOffset = start;
|
||||
}
|
||||
});
|
||||
|
||||
// finalize buffers
|
||||
offsetAndType[tokenCount] = (types.EOF << TYPE_SHIFT) | sourceLength; // <EOF-token>
|
||||
balance[tokenCount] = sourceLength;
|
||||
balance[sourceLength] = sourceLength; // prevents false positive balance match with any token
|
||||
while (balanceStart !== 0) {
|
||||
const balancePrev = balanceStart & OFFSET_MASK;
|
||||
balanceStart = balance[balancePrev];
|
||||
balance[balancePrev] = sourceLength;
|
||||
}
|
||||
|
||||
lookupType(offset) {
|
||||
offset += this.tokenIndex;
|
||||
this.source = source;
|
||||
this.firstCharOffset = firstCharOffset === -1 ? 0 : firstCharOffset;
|
||||
this.tokenCount = tokenCount;
|
||||
this.offsetAndType = offsetAndType;
|
||||
this.balance = balance;
|
||||
|
||||
if (offset < this.tokenCount) {
|
||||
return this.offsetAndType[offset] >> TYPE_SHIFT;
|
||||
}
|
||||
this.reset();
|
||||
this.next();
|
||||
}
|
||||
|
||||
return types.EOF;
|
||||
}
|
||||
lookupOffset(offset) {
|
||||
offset += this.tokenIndex;
|
||||
lookupType(offset) {
|
||||
offset += this.tokenIndex;
|
||||
|
||||
if (offset < this.tokenCount) {
|
||||
return this.offsetAndType[offset - 1] & OFFSET_MASK;
|
||||
}
|
||||
|
||||
return this.source.length;
|
||||
}
|
||||
lookupValue(offset, referenceStr) {
|
||||
offset += this.tokenIndex;
|
||||
|
||||
if (offset < this.tokenCount) {
|
||||
return utils.cmpStr(
|
||||
this.source,
|
||||
this.offsetAndType[offset - 1] & OFFSET_MASK,
|
||||
this.offsetAndType[offset] & OFFSET_MASK,
|
||||
referenceStr
|
||||
);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
getTokenStart(tokenIndex) {
|
||||
if (tokenIndex === this.tokenIndex) {
|
||||
return this.tokenStart;
|
||||
}
|
||||
|
||||
if (tokenIndex > 0) {
|
||||
return tokenIndex < this.tokenCount
|
||||
? this.offsetAndType[tokenIndex - 1] & OFFSET_MASK
|
||||
: this.offsetAndType[this.tokenCount] & OFFSET_MASK;
|
||||
}
|
||||
|
||||
return this.firstCharOffset;
|
||||
}
|
||||
substrToCursor(start) {
|
||||
return this.source.substring(start, this.tokenStart);
|
||||
if (offset < this.tokenCount) {
|
||||
return this.offsetAndType[offset] >> TYPE_SHIFT;
|
||||
}
|
||||
|
||||
isBalanceEdge(pos) {
|
||||
return this.balance[this.tokenIndex] < pos;
|
||||
}
|
||||
isDelim(code, offset) {
|
||||
if (offset) {
|
||||
return (
|
||||
this.lookupType(offset) === types.Delim &&
|
||||
this.source.charCodeAt(this.lookupOffset(offset)) === code
|
||||
);
|
||||
}
|
||||
return types.EOF;
|
||||
}
|
||||
lookupOffset(offset) {
|
||||
offset += this.tokenIndex;
|
||||
|
||||
return (
|
||||
this.tokenType === types.Delim &&
|
||||
this.source.charCodeAt(this.tokenStart) === code
|
||||
);
|
||||
if (offset < this.tokenCount) {
|
||||
return this.offsetAndType[offset - 1] & OFFSET_MASK;
|
||||
}
|
||||
|
||||
skip(tokenCount) {
|
||||
let next = this.tokenIndex + tokenCount;
|
||||
return this.source.length;
|
||||
}
|
||||
lookupValue(offset, referenceStr) {
|
||||
offset += this.tokenIndex;
|
||||
|
||||
if (next < this.tokenCount) {
|
||||
this.tokenIndex = next;
|
||||
this.tokenStart = this.offsetAndType[next - 1] & OFFSET_MASK;
|
||||
next = this.offsetAndType[next];
|
||||
this.tokenType = next >> TYPE_SHIFT;
|
||||
this.tokenEnd = next & OFFSET_MASK;
|
||||
} else {
|
||||
this.tokenIndex = this.tokenCount;
|
||||
this.next();
|
||||
}
|
||||
}
|
||||
next() {
|
||||
let next = this.tokenIndex + 1;
|
||||
|
||||
if (next < this.tokenCount) {
|
||||
this.tokenIndex = next;
|
||||
this.tokenStart = this.tokenEnd;
|
||||
next = this.offsetAndType[next];
|
||||
this.tokenType = next >> TYPE_SHIFT;
|
||||
this.tokenEnd = next & OFFSET_MASK;
|
||||
} else {
|
||||
this.eof = true;
|
||||
this.tokenIndex = this.tokenCount;
|
||||
this.tokenType = types.EOF;
|
||||
this.tokenStart = this.tokenEnd = this.source.length;
|
||||
}
|
||||
}
|
||||
skipSC() {
|
||||
while (this.tokenType === types.WhiteSpace || this.tokenType === types.Comment) {
|
||||
this.next();
|
||||
}
|
||||
}
|
||||
skipUntilBalanced(startToken, stopConsume) {
|
||||
let cursor = startToken;
|
||||
let balanceEnd;
|
||||
let offset;
|
||||
|
||||
loop:
|
||||
for (; cursor < this.tokenCount; cursor++) {
|
||||
balanceEnd = this.balance[cursor];
|
||||
|
||||
// stop scanning on balance edge that points to offset before start token
|
||||
if (balanceEnd < startToken) {
|
||||
break loop;
|
||||
}
|
||||
|
||||
offset = cursor > 0 ? this.offsetAndType[cursor - 1] & OFFSET_MASK : this.firstCharOffset;
|
||||
|
||||
// check stop condition
|
||||
switch (stopConsume(this.source.charCodeAt(offset))) {
|
||||
case 1: // just stop
|
||||
break loop;
|
||||
|
||||
case 2: // stop & included
|
||||
cursor++;
|
||||
break loop;
|
||||
|
||||
default:
|
||||
// fast forward to the end of balanced block
|
||||
if (this.balance[balanceEnd] === cursor) {
|
||||
cursor = balanceEnd;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.skip(cursor - this.tokenIndex);
|
||||
if (offset < this.tokenCount) {
|
||||
return utils.cmpStr(
|
||||
this.source,
|
||||
this.offsetAndType[offset - 1] & OFFSET_MASK,
|
||||
this.offsetAndType[offset] & OFFSET_MASK,
|
||||
referenceStr
|
||||
);
|
||||
}
|
||||
|
||||
forEachToken(fn) {
|
||||
for (let i = 0, offset = this.firstCharOffset; i < this.tokenCount; i++) {
|
||||
const start = offset;
|
||||
const item = this.offsetAndType[i];
|
||||
const end = item & OFFSET_MASK;
|
||||
const type = item >> TYPE_SHIFT;
|
||||
|
||||
offset = end;
|
||||
|
||||
fn(type, start, end, i);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
getTokenStart(tokenIndex) {
|
||||
if (tokenIndex === this.tokenIndex) {
|
||||
return this.tokenStart;
|
||||
}
|
||||
dump() {
|
||||
const tokens = new Array(this.tokenCount);
|
||||
|
||||
this.forEachToken((type, start, end, index) => {
|
||||
tokens[index] = {
|
||||
idx: index,
|
||||
type: names[type],
|
||||
chunk: this.source.substring(start, end),
|
||||
balance: this.balance[index]
|
||||
};
|
||||
});
|
||||
|
||||
return tokens;
|
||||
if (tokenIndex > 0) {
|
||||
return tokenIndex < this.tokenCount ?
|
||||
this.offsetAndType[tokenIndex - 1] & OFFSET_MASK
|
||||
: this.offsetAndType[this.tokenCount] & OFFSET_MASK;
|
||||
}
|
||||
|
||||
return this.firstCharOffset;
|
||||
}
|
||||
substrToCursor(start) {
|
||||
return this.source.substring(start, this.tokenStart);
|
||||
}
|
||||
|
||||
isBalanceEdge(pos) {
|
||||
return this.balance[this.tokenIndex] < pos;
|
||||
}
|
||||
isDelim(code, offset) {
|
||||
if (offset) {
|
||||
return (
|
||||
this.lookupType(offset) === types.Delim &&
|
||||
this.source.charCodeAt(this.lookupOffset(offset)) === code
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
this.tokenType === types.Delim &&
|
||||
this.source.charCodeAt(this.tokenStart) === code
|
||||
);
|
||||
}
|
||||
|
||||
skip(tokenCount) {
|
||||
let next = this.tokenIndex + tokenCount;
|
||||
|
||||
if (next < this.tokenCount) {
|
||||
this.tokenIndex = next;
|
||||
this.tokenStart = this.offsetAndType[next - 1] & OFFSET_MASK;
|
||||
next = this.offsetAndType[next];
|
||||
this.tokenType = next >> TYPE_SHIFT;
|
||||
this.tokenEnd = next & OFFSET_MASK;
|
||||
} else {
|
||||
this.tokenIndex = this.tokenCount;
|
||||
this.next();
|
||||
}
|
||||
}
|
||||
next() {
|
||||
let next = this.tokenIndex + 1;
|
||||
|
||||
if (next < this.tokenCount) {
|
||||
this.tokenIndex = next;
|
||||
this.tokenStart = this.tokenEnd;
|
||||
next = this.offsetAndType[next];
|
||||
this.tokenType = next >> TYPE_SHIFT;
|
||||
this.tokenEnd = next & OFFSET_MASK;
|
||||
} else {
|
||||
this.eof = true;
|
||||
this.tokenIndex = this.tokenCount;
|
||||
this.tokenType = types.EOF;
|
||||
this.tokenStart = this.tokenEnd = this.source.length;
|
||||
}
|
||||
}
|
||||
skipSC() {
|
||||
while (
|
||||
this.tokenType === types.WhiteSpace ||
|
||||
this.tokenType === types.Comment
|
||||
) {
|
||||
this.next();
|
||||
}
|
||||
}
|
||||
skipUntilBalanced(startToken, stopConsume) {
|
||||
let cursor = startToken;
|
||||
let balanceEnd;
|
||||
let offset;
|
||||
|
||||
loop: for (; cursor < this.tokenCount; cursor++) {
|
||||
balanceEnd = this.balance[cursor];
|
||||
|
||||
// stop scanning on balance edge that points to offset before start token
|
||||
if (balanceEnd < startToken) {
|
||||
break loop;
|
||||
}
|
||||
|
||||
offset =
|
||||
cursor > 0 ?
|
||||
this.offsetAndType[cursor - 1] & OFFSET_MASK
|
||||
: this.firstCharOffset;
|
||||
|
||||
// check stop condition
|
||||
switch (stopConsume(this.source.charCodeAt(offset))) {
|
||||
case 1: // just stop
|
||||
break loop;
|
||||
|
||||
case 2: // stop & included
|
||||
cursor++;
|
||||
break loop;
|
||||
|
||||
default:
|
||||
// fast forward to the end of balanced block
|
||||
if (this.balance[balanceEnd] === cursor) {
|
||||
cursor = balanceEnd;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.skip(cursor - this.tokenIndex);
|
||||
}
|
||||
|
||||
forEachToken(fn) {
|
||||
for (let i = 0, offset = this.firstCharOffset; i < this.tokenCount; i++) {
|
||||
const start = offset;
|
||||
const item = this.offsetAndType[i];
|
||||
const end = item & OFFSET_MASK;
|
||||
const type = item >> TYPE_SHIFT;
|
||||
|
||||
offset = end;
|
||||
|
||||
fn(type, start, end, i);
|
||||
}
|
||||
}
|
||||
dump() {
|
||||
const tokens = new Array(this.tokenCount);
|
||||
|
||||
this.forEachToken((type, start, end, index) => {
|
||||
tokens[index] = {
|
||||
idx: index,
|
||||
type: names[type],
|
||||
chunk: this.source.substring(start, end),
|
||||
balance: this.balance[index],
|
||||
};
|
||||
});
|
||||
|
||||
return tokens;
|
||||
}
|
||||
}
|
||||
|
||||
exports.TokenStream = TokenStream;
|
||||
|
8
node_modules/css-tree/cjs/tokenizer/adopt-buffer.cjs
generated
vendored
8
node_modules/css-tree/cjs/tokenizer/adopt-buffer.cjs
generated
vendored
@ -3,11 +3,11 @@
|
||||
const MIN_SIZE = 16 * 1024;
|
||||
|
||||
function adoptBuffer(buffer = null, size) {
|
||||
if (buffer === null || buffer.length < size) {
|
||||
return new Uint32Array(Math.max(size + 1024, MIN_SIZE));
|
||||
}
|
||||
if (buffer === null || buffer.length < size) {
|
||||
return new Uint32Array(Math.max(size + 1024, MIN_SIZE));
|
||||
}
|
||||
|
||||
return buffer;
|
||||
return buffer;
|
||||
}
|
||||
|
||||
exports.adoptBuffer = adoptBuffer;
|
||||
|
191
node_modules/css-tree/cjs/tokenizer/char-code-definitions.cjs
generated
vendored
191
node_modules/css-tree/cjs/tokenizer/char-code-definitions.cjs
generated
vendored
@ -8,66 +8,66 @@ const EOF = 0;
|
||||
// digit
|
||||
// A code point between U+0030 DIGIT ZERO (0) and U+0039 DIGIT NINE (9).
|
||||
function isDigit(code) {
|
||||
return code >= 0x0030 && code <= 0x0039;
|
||||
return code >= 0x0030 && code <= 0x0039;
|
||||
}
|
||||
|
||||
// hex digit
|
||||
// A digit, or a code point between U+0041 LATIN CAPITAL LETTER A (A) and U+0046 LATIN CAPITAL LETTER F (F),
|
||||
// or a code point between U+0061 LATIN SMALL LETTER A (a) and U+0066 LATIN SMALL LETTER F (f).
|
||||
function isHexDigit(code) {
|
||||
return (
|
||||
isDigit(code) || // 0 .. 9
|
||||
(code >= 0x0041 && code <= 0x0046) || // A .. F
|
||||
(code >= 0x0061 && code <= 0x0066) // a .. f
|
||||
);
|
||||
return (
|
||||
isDigit(code) || // 0 .. 9
|
||||
(code >= 0x0041 && code <= 0x0046) || // A .. F
|
||||
(code >= 0x0061 && code <= 0x0066) // a .. f
|
||||
);
|
||||
}
|
||||
|
||||
// uppercase letter
|
||||
// A code point between U+0041 LATIN CAPITAL LETTER A (A) and U+005A LATIN CAPITAL LETTER Z (Z).
|
||||
function isUppercaseLetter(code) {
|
||||
return code >= 0x0041 && code <= 0x005A;
|
||||
return code >= 0x0041 && code <= 0x005a;
|
||||
}
|
||||
|
||||
// lowercase letter
|
||||
// A code point between U+0061 LATIN SMALL LETTER A (a) and U+007A LATIN SMALL LETTER Z (z).
|
||||
function isLowercaseLetter(code) {
|
||||
return code >= 0x0061 && code <= 0x007A;
|
||||
return code >= 0x0061 && code <= 0x007a;
|
||||
}
|
||||
|
||||
// letter
|
||||
// An uppercase letter or a lowercase letter.
|
||||
function isLetter(code) {
|
||||
return isUppercaseLetter(code) || isLowercaseLetter(code);
|
||||
return isUppercaseLetter(code) || isLowercaseLetter(code);
|
||||
}
|
||||
|
||||
// non-ASCII code point
|
||||
// A code point with a value equal to or greater than U+0080 <control>.
|
||||
function isNonAscii(code) {
|
||||
return code >= 0x0080;
|
||||
return code >= 0x0080;
|
||||
}
|
||||
|
||||
// name-start code point
|
||||
// A letter, a non-ASCII code point, or U+005F LOW LINE (_).
|
||||
function isNameStart(code) {
|
||||
return isLetter(code) || isNonAscii(code) || code === 0x005F;
|
||||
return isLetter(code) || isNonAscii(code) || code === 0x005f;
|
||||
}
|
||||
|
||||
// name code point
|
||||
// A name-start code point, a digit, or U+002D HYPHEN-MINUS (-).
|
||||
function isName(code) {
|
||||
return isNameStart(code) || isDigit(code) || code === 0x002D;
|
||||
return isNameStart(code) || isDigit(code) || code === 0x002d;
|
||||
}
|
||||
|
||||
// non-printable code point
|
||||
// A code point between U+0000 NULL and U+0008 BACKSPACE, or U+000B LINE TABULATION,
|
||||
// or a code point between U+000E SHIFT OUT and U+001F INFORMATION SEPARATOR ONE, or U+007F DELETE.
|
||||
function isNonPrintable(code) {
|
||||
return (
|
||||
(code >= 0x0000 && code <= 0x0008) ||
|
||||
(code === 0x000B) ||
|
||||
(code >= 0x000E && code <= 0x001F) ||
|
||||
(code === 0x007F)
|
||||
);
|
||||
return (
|
||||
(code >= 0x0000 && code <= 0x0008) ||
|
||||
code === 0x000b ||
|
||||
(code >= 0x000e && code <= 0x001f) ||
|
||||
code === 0x007f
|
||||
);
|
||||
}
|
||||
|
||||
// newline
|
||||
@ -75,96 +75,94 @@ function isNonPrintable(code) {
|
||||
// as they are converted to U+000A LINE FEED during preprocessing.
|
||||
// TODO: we doesn't do a preprocessing, so check a code point for U+000D CARRIAGE RETURN and U+000C FORM FEED
|
||||
function isNewline(code) {
|
||||
return code === 0x000A || code === 0x000D || code === 0x000C;
|
||||
return code === 0x000a || code === 0x000d || code === 0x000c;
|
||||
}
|
||||
|
||||
// whitespace
|
||||
// A newline, U+0009 CHARACTER TABULATION, or U+0020 SPACE.
|
||||
function isWhiteSpace(code) {
|
||||
return isNewline(code) || code === 0x0020 || code === 0x0009;
|
||||
return isNewline(code) || code === 0x0020 || code === 0x0009;
|
||||
}
|
||||
|
||||
// § 4.3.8. Check if two code points are a valid escape
|
||||
function isValidEscape(first, second) {
|
||||
// If the first code point is not U+005C REVERSE SOLIDUS (\), return false.
|
||||
if (first !== 0x005C) {
|
||||
return false;
|
||||
}
|
||||
// If the first code point is not U+005C REVERSE SOLIDUS (\), return false.
|
||||
if (first !== 0x005c) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, if the second code point is a newline or EOF, return false.
|
||||
if (isNewline(second) || second === EOF) {
|
||||
return false;
|
||||
}
|
||||
// Otherwise, if the second code point is a newline or EOF, return false.
|
||||
if (isNewline(second) || second === EOF) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Otherwise, return true.
|
||||
return true;
|
||||
// Otherwise, return true.
|
||||
return true;
|
||||
}
|
||||
|
||||
// § 4.3.9. Check if three code points would start an identifier
|
||||
function isIdentifierStart(first, second, third) {
|
||||
// Look at the first code point:
|
||||
// Look at the first code point:
|
||||
|
||||
// U+002D HYPHEN-MINUS
|
||||
if (first === 0x002D) {
|
||||
// If the second code point is a name-start code point or a U+002D HYPHEN-MINUS,
|
||||
// or the second and third code points are a valid escape, return true. Otherwise, return false.
|
||||
return (
|
||||
isNameStart(second) ||
|
||||
second === 0x002D ||
|
||||
isValidEscape(second, third)
|
||||
);
|
||||
}
|
||||
// U+002D HYPHEN-MINUS
|
||||
if (first === 0x002d) {
|
||||
// If the second code point is a name-start code point or a U+002D HYPHEN-MINUS,
|
||||
// or the second and third code points are a valid escape, return true. Otherwise, return false.
|
||||
return (
|
||||
isNameStart(second) || second === 0x002d || isValidEscape(second, third)
|
||||
);
|
||||
}
|
||||
|
||||
// name-start code point
|
||||
if (isNameStart(first)) {
|
||||
// Return true.
|
||||
return true;
|
||||
}
|
||||
// name-start code point
|
||||
if (isNameStart(first)) {
|
||||
// Return true.
|
||||
return true;
|
||||
}
|
||||
|
||||
// U+005C REVERSE SOLIDUS (\)
|
||||
if (first === 0x005C) {
|
||||
// If the first and second code points are a valid escape, return true. Otherwise, return false.
|
||||
return isValidEscape(first, second);
|
||||
}
|
||||
// U+005C REVERSE SOLIDUS (\)
|
||||
if (first === 0x005c) {
|
||||
// If the first and second code points are a valid escape, return true. Otherwise, return false.
|
||||
return isValidEscape(first, second);
|
||||
}
|
||||
|
||||
// anything else
|
||||
// Return false.
|
||||
return false;
|
||||
// anything else
|
||||
// Return false.
|
||||
return false;
|
||||
}
|
||||
|
||||
// § 4.3.10. Check if three code points would start a number
|
||||
function isNumberStart(first, second, third) {
|
||||
// Look at the first code point:
|
||||
// Look at the first code point:
|
||||
|
||||
// U+002B PLUS SIGN (+)
|
||||
// U+002D HYPHEN-MINUS (-)
|
||||
if (first === 0x002B || first === 0x002D) {
|
||||
// If the second code point is a digit, return true.
|
||||
if (isDigit(second)) {
|
||||
return 2;
|
||||
}
|
||||
|
||||
// Otherwise, if the second code point is a U+002E FULL STOP (.)
|
||||
// and the third code point is a digit, return true.
|
||||
// Otherwise, return false.
|
||||
return second === 0x002E && isDigit(third) ? 3 : 0;
|
||||
// U+002B PLUS SIGN (+)
|
||||
// U+002D HYPHEN-MINUS (-)
|
||||
if (first === 0x002b || first === 0x002d) {
|
||||
// If the second code point is a digit, return true.
|
||||
if (isDigit(second)) {
|
||||
return 2;
|
||||
}
|
||||
|
||||
// U+002E FULL STOP (.)
|
||||
if (first === 0x002E) {
|
||||
// If the second code point is a digit, return true. Otherwise, return false.
|
||||
return isDigit(second) ? 2 : 0;
|
||||
}
|
||||
// Otherwise, if the second code point is a U+002E FULL STOP (.)
|
||||
// and the third code point is a digit, return true.
|
||||
// Otherwise, return false.
|
||||
return second === 0x002e && isDigit(third) ? 3 : 0;
|
||||
}
|
||||
|
||||
// digit
|
||||
if (isDigit(first)) {
|
||||
// Return true.
|
||||
return 1;
|
||||
}
|
||||
// U+002E FULL STOP (.)
|
||||
if (first === 0x002e) {
|
||||
// If the second code point is a digit, return true. Otherwise, return false.
|
||||
return isDigit(second) ? 2 : 0;
|
||||
}
|
||||
|
||||
// anything else
|
||||
// Return false.
|
||||
return 0;
|
||||
// digit
|
||||
if (isDigit(first)) {
|
||||
// Return true.
|
||||
return 1;
|
||||
}
|
||||
|
||||
// anything else
|
||||
// Return false.
|
||||
return 0;
|
||||
}
|
||||
|
||||
//
|
||||
@ -173,17 +171,17 @@ function isNumberStart(first, second, third) {
|
||||
|
||||
// detect BOM (https://en.wikipedia.org/wiki/Byte_order_mark)
|
||||
function isBOM(code) {
|
||||
// UTF-16BE
|
||||
if (code === 0xFEFF) {
|
||||
return 1;
|
||||
}
|
||||
// UTF-16BE
|
||||
if (code === 0xfeff) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// UTF-16LE
|
||||
if (code === 0xFFFE) {
|
||||
return 1;
|
||||
}
|
||||
// UTF-16LE
|
||||
if (code === 0xfffe) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Fast code category
|
||||
@ -196,16 +194,17 @@ const NameStartCategory = 0x84;
|
||||
const NonPrintableCategory = 0x85;
|
||||
|
||||
for (let i = 0; i < CATEGORY.length; i++) {
|
||||
CATEGORY[i] =
|
||||
isWhiteSpace(i) && WhiteSpaceCategory ||
|
||||
isDigit(i) && DigitCategory ||
|
||||
isNameStart(i) && NameStartCategory ||
|
||||
isNonPrintable(i) && NonPrintableCategory ||
|
||||
i || EofCategory;
|
||||
CATEGORY[i] =
|
||||
(isWhiteSpace(i) && WhiteSpaceCategory) ||
|
||||
(isDigit(i) && DigitCategory) ||
|
||||
(isNameStart(i) && NameStartCategory) ||
|
||||
(isNonPrintable(i) && NonPrintableCategory) ||
|
||||
i ||
|
||||
EofCategory;
|
||||
}
|
||||
|
||||
function charCodeCategory(code) {
|
||||
return code < 0x80 ? CATEGORY[code] : NameStartCategory;
|
||||
return code < 0x80 ? CATEGORY[code] : NameStartCategory;
|
||||
}
|
||||
|
||||
exports.DigitCategory = DigitCategory;
|
||||
|
954
node_modules/css-tree/cjs/tokenizer/index.cjs
generated
vendored
954
node_modules/css-tree/cjs/tokenizer/index.cjs
generated
vendored
@ -8,485 +8,535 @@ const OffsetToLocation = require('./OffsetToLocation.cjs');
|
||||
const TokenStream = require('./TokenStream.cjs');
|
||||
|
||||
function tokenize(source, onToken) {
|
||||
function getCharCode(offset) {
|
||||
return offset < sourceLength ? source.charCodeAt(offset) : 0;
|
||||
function getCharCode(offset) {
|
||||
return offset < sourceLength ? source.charCodeAt(offset) : 0;
|
||||
}
|
||||
|
||||
// § 4.3.3. Consume a numeric token
|
||||
function consumeNumericToken() {
|
||||
// Consume a number and let number be the result.
|
||||
offset = utils.consumeNumber(source, offset);
|
||||
|
||||
// If the next 3 input code points would start an identifier, then:
|
||||
if (
|
||||
charCodeDefinitions.isIdentifierStart(
|
||||
getCharCode(offset),
|
||||
getCharCode(offset + 1),
|
||||
getCharCode(offset + 2)
|
||||
)
|
||||
) {
|
||||
// Create a <dimension-token> with the same value and type flag as number, and a unit set initially to the empty string.
|
||||
// Consume a name. Set the <dimension-token>’s unit to the returned value.
|
||||
// Return the <dimension-token>.
|
||||
type = types.Dimension;
|
||||
offset = utils.consumeName(source, offset);
|
||||
return;
|
||||
}
|
||||
|
||||
// § 4.3.3. Consume a numeric token
|
||||
function consumeNumericToken() {
|
||||
// Consume a number and let number be the result.
|
||||
offset = utils.consumeNumber(source, offset);
|
||||
|
||||
// If the next 3 input code points would start an identifier, then:
|
||||
if (charCodeDefinitions.isIdentifierStart(getCharCode(offset), getCharCode(offset + 1), getCharCode(offset + 2))) {
|
||||
// Create a <dimension-token> with the same value and type flag as number, and a unit set initially to the empty string.
|
||||
// Consume a name. Set the <dimension-token>’s unit to the returned value.
|
||||
// Return the <dimension-token>.
|
||||
type = types.Dimension;
|
||||
offset = utils.consumeName(source, offset);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, if the next input code point is U+0025 PERCENTAGE SIGN (%), consume it.
|
||||
if (getCharCode(offset) === 0x0025) {
|
||||
// Create a <percentage-token> with the same value as number, and return it.
|
||||
type = types.Percentage;
|
||||
offset++;
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create a <number-token> with the same value and type flag as number, and return it.
|
||||
type = types.Number;
|
||||
// Otherwise, if the next input code point is U+0025 PERCENTAGE SIGN (%), consume it.
|
||||
if (getCharCode(offset) === 0x0025) {
|
||||
// Create a <percentage-token> with the same value as number, and return it.
|
||||
type = types.Percentage;
|
||||
offset++;
|
||||
return;
|
||||
}
|
||||
|
||||
// § 4.3.4. Consume an ident-like token
|
||||
function consumeIdentLikeToken() {
|
||||
const nameStartOffset = offset;
|
||||
// Otherwise, create a <number-token> with the same value and type flag as number, and return it.
|
||||
type = types.Number;
|
||||
}
|
||||
|
||||
// Consume a name, and let string be the result.
|
||||
offset = utils.consumeName(source, offset);
|
||||
// § 4.3.4. Consume an ident-like token
|
||||
function consumeIdentLikeToken() {
|
||||
const nameStartOffset = offset;
|
||||
|
||||
// If string’s value is an ASCII case-insensitive match for "url",
|
||||
// and the next input code point is U+0028 LEFT PARENTHESIS ((), consume it.
|
||||
if (utils.cmpStr(source, nameStartOffset, offset, 'url') && getCharCode(offset) === 0x0028) {
|
||||
// While the next two input code points are whitespace, consume the next input code point.
|
||||
offset = utils.findWhiteSpaceEnd(source, offset + 1);
|
||||
// Consume a name, and let string be the result.
|
||||
offset = utils.consumeName(source, offset);
|
||||
|
||||
// If the next one or two input code points are U+0022 QUOTATION MARK ("), U+0027 APOSTROPHE ('),
|
||||
// or whitespace followed by U+0022 QUOTATION MARK (") or U+0027 APOSTROPHE ('),
|
||||
// then create a <function-token> with its value set to string and return it.
|
||||
if (getCharCode(offset) === 0x0022 ||
|
||||
getCharCode(offset) === 0x0027) {
|
||||
type = types.Function;
|
||||
offset = nameStartOffset + 4;
|
||||
return;
|
||||
// If string’s value is an ASCII case-insensitive match for "url",
|
||||
// and the next input code point is U+0028 LEFT PARENTHESIS ((), consume it.
|
||||
if (
|
||||
utils.cmpStr(source, nameStartOffset, offset, 'url') &&
|
||||
getCharCode(offset) === 0x0028
|
||||
) {
|
||||
// While the next two input code points are whitespace, consume the next input code point.
|
||||
offset = utils.findWhiteSpaceEnd(source, offset + 1);
|
||||
|
||||
// If the next one or two input code points are U+0022 QUOTATION MARK ("), U+0027 APOSTROPHE ('),
|
||||
// or whitespace followed by U+0022 QUOTATION MARK (") or U+0027 APOSTROPHE ('),
|
||||
// then create a <function-token> with its value set to string and return it.
|
||||
if (getCharCode(offset) === 0x0022 || getCharCode(offset) === 0x0027) {
|
||||
type = types.Function;
|
||||
offset = nameStartOffset + 4;
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, consume a url token, and return it.
|
||||
consumeUrlToken();
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, if the next input code point is U+0028 LEFT PARENTHESIS ((), consume it.
|
||||
// Create a <function-token> with its value set to string and return it.
|
||||
if (getCharCode(offset) === 0x0028) {
|
||||
type = types.Function;
|
||||
offset++;
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, create an <ident-token> with its value set to string and return it.
|
||||
type = types.Ident;
|
||||
}
|
||||
|
||||
// § 4.3.5. Consume a string token
|
||||
function consumeStringToken(endingCodePoint) {
|
||||
// This algorithm may be called with an ending code point, which denotes the code point
|
||||
// that ends the string. If an ending code point is not specified,
|
||||
// the current input code point is used.
|
||||
if (!endingCodePoint) {
|
||||
endingCodePoint = getCharCode(offset++);
|
||||
}
|
||||
|
||||
// Initially create a <string-token> with its value set to the empty string.
|
||||
type = types.String;
|
||||
|
||||
// Repeatedly consume the next input code point from the stream:
|
||||
for (; offset < source.length; offset++) {
|
||||
const code = source.charCodeAt(offset);
|
||||
|
||||
switch (charCodeDefinitions.charCodeCategory(code)) {
|
||||
// ending code point
|
||||
case endingCodePoint:
|
||||
// Return the <string-token>.
|
||||
offset++;
|
||||
return;
|
||||
|
||||
// EOF
|
||||
// case EofCategory:
|
||||
// This is a parse error. Return the <string-token>.
|
||||
// return;
|
||||
|
||||
// newline
|
||||
case charCodeDefinitions.WhiteSpaceCategory:
|
||||
if (charCodeDefinitions.isNewline(code)) {
|
||||
// This is a parse error. Reconsume the current input code point,
|
||||
// create a <bad-string-token>, and return it.
|
||||
offset += utils.getNewlineLength(source, offset, code);
|
||||
type = types.BadString;
|
||||
return;
|
||||
}
|
||||
break;
|
||||
|
||||
// U+005C REVERSE SOLIDUS (\)
|
||||
case 0x005c:
|
||||
// If the next input code point is EOF, do nothing.
|
||||
if (offset === source.length - 1) {
|
||||
break;
|
||||
}
|
||||
|
||||
const nextCode = getCharCode(offset + 1);
|
||||
|
||||
// Otherwise, if the next input code point is a newline, consume it.
|
||||
if (charCodeDefinitions.isNewline(nextCode)) {
|
||||
offset += utils.getNewlineLength(source, offset + 1, nextCode);
|
||||
} else if (charCodeDefinitions.isValidEscape(code, nextCode)) {
|
||||
// Otherwise, (the stream starts with a valid escape) consume
|
||||
// an escaped code point and append the returned code point to
|
||||
// the <string-token>’s value.
|
||||
offset = utils.consumeEscaped(source, offset) - 1;
|
||||
}
|
||||
break;
|
||||
|
||||
// anything else
|
||||
// Append the current input code point to the <string-token>’s value.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// § 4.3.6. Consume a url token
|
||||
// Note: This algorithm assumes that the initial "url(" has already been consumed.
|
||||
// This algorithm also assumes that it’s being called to consume an "unquoted" value, like url(foo).
|
||||
// A quoted value, like url("foo"), is parsed as a <function-token>. Consume an ident-like token
|
||||
// automatically handles this distinction; this algorithm shouldn’t be called directly otherwise.
|
||||
function consumeUrlToken() {
|
||||
// Initially create a <url-token> with its value set to the empty string.
|
||||
type = types.Url;
|
||||
|
||||
// Consume as much whitespace as possible.
|
||||
offset = utils.findWhiteSpaceEnd(source, offset);
|
||||
|
||||
// Repeatedly consume the next input code point from the stream:
|
||||
for (; offset < source.length; offset++) {
|
||||
const code = source.charCodeAt(offset);
|
||||
|
||||
switch (charCodeDefinitions.charCodeCategory(code)) {
|
||||
// U+0029 RIGHT PARENTHESIS ())
|
||||
case 0x0029:
|
||||
// Return the <url-token>.
|
||||
offset++;
|
||||
return;
|
||||
|
||||
// EOF
|
||||
// case EofCategory:
|
||||
// This is a parse error. Return the <url-token>.
|
||||
// return;
|
||||
|
||||
// whitespace
|
||||
case charCodeDefinitions.WhiteSpaceCategory:
|
||||
// Consume as much whitespace as possible.
|
||||
offset = utils.findWhiteSpaceEnd(source, offset);
|
||||
|
||||
// If the next input code point is U+0029 RIGHT PARENTHESIS ()) or EOF,
|
||||
// consume it and return the <url-token>
|
||||
// (if EOF was encountered, this is a parse error);
|
||||
if (getCharCode(offset) === 0x0029 || offset >= source.length) {
|
||||
if (offset < source.length) {
|
||||
offset++;
|
||||
}
|
||||
|
||||
// Otherwise, consume a url token, and return it.
|
||||
consumeUrlToken();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, if the next input code point is U+0028 LEFT PARENTHESIS ((), consume it.
|
||||
// Create a <function-token> with its value set to string and return it.
|
||||
if (getCharCode(offset) === 0x0028) {
|
||||
type = types.Function;
|
||||
offset++;
|
||||
return;
|
||||
}
|
||||
// otherwise, consume the remnants of a bad url, create a <bad-url-token>,
|
||||
// and return it.
|
||||
offset = utils.consumeBadUrlRemnants(source, offset);
|
||||
type = types.BadUrl;
|
||||
return;
|
||||
|
||||
// Otherwise, create an <ident-token> with its value set to string and return it.
|
||||
type = types.Ident;
|
||||
// U+0022 QUOTATION MARK (")
|
||||
// U+0027 APOSTROPHE (')
|
||||
// U+0028 LEFT PARENTHESIS (()
|
||||
// non-printable code point
|
||||
case 0x0022:
|
||||
case 0x0027:
|
||||
case 0x0028:
|
||||
case charCodeDefinitions.NonPrintableCategory:
|
||||
// This is a parse error. Consume the remnants of a bad url,
|
||||
// create a <bad-url-token>, and return it.
|
||||
offset = utils.consumeBadUrlRemnants(source, offset);
|
||||
type = types.BadUrl;
|
||||
return;
|
||||
|
||||
// U+005C REVERSE SOLIDUS (\)
|
||||
case 0x005c:
|
||||
// If the stream starts with a valid escape, consume an escaped code point and
|
||||
// append the returned code point to the <url-token>’s value.
|
||||
if (
|
||||
charCodeDefinitions.isValidEscape(code, getCharCode(offset + 1))
|
||||
) {
|
||||
offset = utils.consumeEscaped(source, offset) - 1;
|
||||
break;
|
||||
}
|
||||
|
||||
// Otherwise, this is a parse error. Consume the remnants of a bad url,
|
||||
// create a <bad-url-token>, and return it.
|
||||
offset = utils.consumeBadUrlRemnants(source, offset);
|
||||
type = types.BadUrl;
|
||||
return;
|
||||
|
||||
// anything else
|
||||
// Append the current input code point to the <url-token>’s value.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// § 4.3.5. Consume a string token
|
||||
function consumeStringToken(endingCodePoint) {
|
||||
// This algorithm may be called with an ending code point, which denotes the code point
|
||||
// that ends the string. If an ending code point is not specified,
|
||||
// the current input code point is used.
|
||||
if (!endingCodePoint) {
|
||||
endingCodePoint = getCharCode(offset++);
|
||||
// ensure source is a string
|
||||
source = String(source || '');
|
||||
|
||||
const sourceLength = source.length;
|
||||
let start = charCodeDefinitions.isBOM(getCharCode(0));
|
||||
let offset = start;
|
||||
let type;
|
||||
|
||||
// https://drafts.csswg.org/css-syntax-3/#consume-token
|
||||
// § 4.3.1. Consume a token
|
||||
while (offset < sourceLength) {
|
||||
const code = source.charCodeAt(offset);
|
||||
|
||||
switch (charCodeDefinitions.charCodeCategory(code)) {
|
||||
// whitespace
|
||||
case charCodeDefinitions.WhiteSpaceCategory:
|
||||
// Consume as much whitespace as possible. Return a <whitespace-token>.
|
||||
type = types.WhiteSpace;
|
||||
offset = utils.findWhiteSpaceEnd(source, offset + 1);
|
||||
break;
|
||||
|
||||
// U+0022 QUOTATION MARK (")
|
||||
case 0x0022:
|
||||
// Consume a string token and return it.
|
||||
consumeStringToken();
|
||||
break;
|
||||
|
||||
// U+0023 NUMBER SIGN (#)
|
||||
case 0x0023:
|
||||
// If the next input code point is a name code point or the next two input code points are a valid escape, then:
|
||||
if (
|
||||
charCodeDefinitions.isName(getCharCode(offset + 1)) ||
|
||||
charCodeDefinitions.isValidEscape(
|
||||
getCharCode(offset + 1),
|
||||
getCharCode(offset + 2)
|
||||
)
|
||||
) {
|
||||
// Create a <hash-token>.
|
||||
type = types.Hash;
|
||||
|
||||
// If the next 3 input code points would start an identifier, set the <hash-token>’s type flag to "id".
|
||||
// if (isIdentifierStart(getCharCode(offset + 1), getCharCode(offset + 2), getCharCode(offset + 3))) {
|
||||
// // TODO: set id flag
|
||||
// }
|
||||
|
||||
// Consume a name, and set the <hash-token>’s value to the returned string.
|
||||
offset = utils.consumeName(source, offset + 1);
|
||||
|
||||
// Return the <hash-token>.
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
|
||||
// Initially create a <string-token> with its value set to the empty string.
|
||||
type = types.String;
|
||||
break;
|
||||
|
||||
// Repeatedly consume the next input code point from the stream:
|
||||
for (; offset < source.length; offset++) {
|
||||
const code = source.charCodeAt(offset);
|
||||
// U+0027 APOSTROPHE (')
|
||||
case 0x0027:
|
||||
// Consume a string token and return it.
|
||||
consumeStringToken();
|
||||
break;
|
||||
|
||||
switch (charCodeDefinitions.charCodeCategory(code)) {
|
||||
// ending code point
|
||||
case endingCodePoint:
|
||||
// Return the <string-token>.
|
||||
offset++;
|
||||
return;
|
||||
// U+0028 LEFT PARENTHESIS (()
|
||||
case 0x0028:
|
||||
// Return a <(-token>.
|
||||
type = types.LeftParenthesis;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// EOF
|
||||
// case EofCategory:
|
||||
// This is a parse error. Return the <string-token>.
|
||||
// return;
|
||||
// U+0029 RIGHT PARENTHESIS ())
|
||||
case 0x0029:
|
||||
// Return a <)-token>.
|
||||
type = types.RightParenthesis;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// newline
|
||||
case charCodeDefinitions.WhiteSpaceCategory:
|
||||
if (charCodeDefinitions.isNewline(code)) {
|
||||
// This is a parse error. Reconsume the current input code point,
|
||||
// create a <bad-string-token>, and return it.
|
||||
offset += utils.getNewlineLength(source, offset, code);
|
||||
type = types.BadString;
|
||||
return;
|
||||
}
|
||||
break;
|
||||
// U+002B PLUS SIGN (+)
|
||||
case 0x002b:
|
||||
// If the input stream starts with a number, ...
|
||||
if (
|
||||
charCodeDefinitions.isNumberStart(
|
||||
code,
|
||||
getCharCode(offset + 1),
|
||||
getCharCode(offset + 2)
|
||||
)
|
||||
) {
|
||||
// ... reconsume the current input code point, consume a numeric token, and return it.
|
||||
consumeNumericToken();
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
break;
|
||||
|
||||
// U+005C REVERSE SOLIDUS (\)
|
||||
case 0x005C:
|
||||
// If the next input code point is EOF, do nothing.
|
||||
if (offset === source.length - 1) {
|
||||
break;
|
||||
}
|
||||
// U+002C COMMA (,)
|
||||
case 0x002c:
|
||||
// Return a <comma-token>.
|
||||
type = types.Comma;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
const nextCode = getCharCode(offset + 1);
|
||||
|
||||
// Otherwise, if the next input code point is a newline, consume it.
|
||||
if (charCodeDefinitions.isNewline(nextCode)) {
|
||||
offset += utils.getNewlineLength(source, offset + 1, nextCode);
|
||||
} else if (charCodeDefinitions.isValidEscape(code, nextCode)) {
|
||||
// Otherwise, (the stream starts with a valid escape) consume
|
||||
// an escaped code point and append the returned code point to
|
||||
// the <string-token>’s value.
|
||||
offset = utils.consumeEscaped(source, offset) - 1;
|
||||
}
|
||||
break;
|
||||
|
||||
// anything else
|
||||
// Append the current input code point to the <string-token>’s value.
|
||||
// U+002D HYPHEN-MINUS (-)
|
||||
case 0x002d:
|
||||
// If the input stream starts with a number, reconsume the current input code point, consume a numeric token, and return it.
|
||||
if (
|
||||
charCodeDefinitions.isNumberStart(
|
||||
code,
|
||||
getCharCode(offset + 1),
|
||||
getCharCode(offset + 2)
|
||||
)
|
||||
) {
|
||||
consumeNumericToken();
|
||||
} else {
|
||||
// Otherwise, if the next 2 input code points are U+002D HYPHEN-MINUS U+003E GREATER-THAN SIGN (->), consume them and return a <CDC-token>.
|
||||
if (
|
||||
getCharCode(offset + 1) === 0x002d &&
|
||||
getCharCode(offset + 2) === 0x003e
|
||||
) {
|
||||
type = types.CDC;
|
||||
offset = offset + 3;
|
||||
} else {
|
||||
// Otherwise, if the input stream starts with an identifier, ...
|
||||
if (
|
||||
charCodeDefinitions.isIdentifierStart(
|
||||
code,
|
||||
getCharCode(offset + 1),
|
||||
getCharCode(offset + 2)
|
||||
)
|
||||
) {
|
||||
// ... reconsume the current input code point, consume an ident-like token, and return it.
|
||||
consumeIdentLikeToken();
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
// § 4.3.6. Consume a url token
|
||||
// Note: This algorithm assumes that the initial "url(" has already been consumed.
|
||||
// This algorithm also assumes that it’s being called to consume an "unquoted" value, like url(foo).
|
||||
// A quoted value, like url("foo"), is parsed as a <function-token>. Consume an ident-like token
|
||||
// automatically handles this distinction; this algorithm shouldn’t be called directly otherwise.
|
||||
function consumeUrlToken() {
|
||||
// Initially create a <url-token> with its value set to the empty string.
|
||||
type = types.Url;
|
||||
|
||||
// Consume as much whitespace as possible.
|
||||
offset = utils.findWhiteSpaceEnd(source, offset);
|
||||
|
||||
// Repeatedly consume the next input code point from the stream:
|
||||
for (; offset < source.length; offset++) {
|
||||
const code = source.charCodeAt(offset);
|
||||
|
||||
switch (charCodeDefinitions.charCodeCategory(code)) {
|
||||
// U+0029 RIGHT PARENTHESIS ())
|
||||
case 0x0029:
|
||||
// Return the <url-token>.
|
||||
offset++;
|
||||
return;
|
||||
|
||||
// EOF
|
||||
// case EofCategory:
|
||||
// This is a parse error. Return the <url-token>.
|
||||
// return;
|
||||
|
||||
// whitespace
|
||||
case charCodeDefinitions.WhiteSpaceCategory:
|
||||
// Consume as much whitespace as possible.
|
||||
offset = utils.findWhiteSpaceEnd(source, offset);
|
||||
|
||||
// If the next input code point is U+0029 RIGHT PARENTHESIS ()) or EOF,
|
||||
// consume it and return the <url-token>
|
||||
// (if EOF was encountered, this is a parse error);
|
||||
if (getCharCode(offset) === 0x0029 || offset >= source.length) {
|
||||
if (offset < source.length) {
|
||||
offset++;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// otherwise, consume the remnants of a bad url, create a <bad-url-token>,
|
||||
// and return it.
|
||||
offset = utils.consumeBadUrlRemnants(source, offset);
|
||||
type = types.BadUrl;
|
||||
return;
|
||||
|
||||
// U+0022 QUOTATION MARK (")
|
||||
// U+0027 APOSTROPHE (')
|
||||
// U+0028 LEFT PARENTHESIS (()
|
||||
// non-printable code point
|
||||
case 0x0022:
|
||||
case 0x0027:
|
||||
case 0x0028:
|
||||
case charCodeDefinitions.NonPrintableCategory:
|
||||
// This is a parse error. Consume the remnants of a bad url,
|
||||
// create a <bad-url-token>, and return it.
|
||||
offset = utils.consumeBadUrlRemnants(source, offset);
|
||||
type = types.BadUrl;
|
||||
return;
|
||||
|
||||
// U+005C REVERSE SOLIDUS (\)
|
||||
case 0x005C:
|
||||
// If the stream starts with a valid escape, consume an escaped code point and
|
||||
// append the returned code point to the <url-token>’s value.
|
||||
if (charCodeDefinitions.isValidEscape(code, getCharCode(offset + 1))) {
|
||||
offset = utils.consumeEscaped(source, offset) - 1;
|
||||
break;
|
||||
}
|
||||
|
||||
// Otherwise, this is a parse error. Consume the remnants of a bad url,
|
||||
// create a <bad-url-token>, and return it.
|
||||
offset = utils.consumeBadUrlRemnants(source, offset);
|
||||
type = types.BadUrl;
|
||||
return;
|
||||
|
||||
// anything else
|
||||
// Append the current input code point to the <url-token>’s value.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ensure source is a string
|
||||
source = String(source || '');
|
||||
|
||||
const sourceLength = source.length;
|
||||
let start = charCodeDefinitions.isBOM(getCharCode(0));
|
||||
let offset = start;
|
||||
let type;
|
||||
|
||||
// https://drafts.csswg.org/css-syntax-3/#consume-token
|
||||
// § 4.3.1. Consume a token
|
||||
while (offset < sourceLength) {
|
||||
const code = source.charCodeAt(offset);
|
||||
|
||||
switch (charCodeDefinitions.charCodeCategory(code)) {
|
||||
// whitespace
|
||||
case charCodeDefinitions.WhiteSpaceCategory:
|
||||
// Consume as much whitespace as possible. Return a <whitespace-token>.
|
||||
type = types.WhiteSpace;
|
||||
offset = utils.findWhiteSpaceEnd(source, offset + 1);
|
||||
break;
|
||||
|
||||
// U+0022 QUOTATION MARK (")
|
||||
case 0x0022:
|
||||
// Consume a string token and return it.
|
||||
consumeStringToken();
|
||||
break;
|
||||
|
||||
// U+0023 NUMBER SIGN (#)
|
||||
case 0x0023:
|
||||
// If the next input code point is a name code point or the next two input code points are a valid escape, then:
|
||||
if (charCodeDefinitions.isName(getCharCode(offset + 1)) || charCodeDefinitions.isValidEscape(getCharCode(offset + 1), getCharCode(offset + 2))) {
|
||||
// Create a <hash-token>.
|
||||
type = types.Hash;
|
||||
|
||||
// If the next 3 input code points would start an identifier, set the <hash-token>’s type flag to "id".
|
||||
// if (isIdentifierStart(getCharCode(offset + 1), getCharCode(offset + 2), getCharCode(offset + 3))) {
|
||||
// // TODO: set id flag
|
||||
// }
|
||||
|
||||
// Consume a name, and set the <hash-token>’s value to the returned string.
|
||||
offset = utils.consumeName(source, offset + 1);
|
||||
|
||||
// Return the <hash-token>.
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
// U+0027 APOSTROPHE (')
|
||||
case 0x0027:
|
||||
// Consume a string token and return it.
|
||||
consumeStringToken();
|
||||
break;
|
||||
|
||||
// U+0028 LEFT PARENTHESIS (()
|
||||
case 0x0028:
|
||||
// Return a <(-token>.
|
||||
type = types.LeftParenthesis;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+0029 RIGHT PARENTHESIS ())
|
||||
case 0x0029:
|
||||
// Return a <)-token>.
|
||||
type = types.RightParenthesis;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+002B PLUS SIGN (+)
|
||||
case 0x002B:
|
||||
// If the input stream starts with a number, ...
|
||||
if (charCodeDefinitions.isNumberStart(code, getCharCode(offset + 1), getCharCode(offset + 2))) {
|
||||
// ... reconsume the current input code point, consume a numeric token, and return it.
|
||||
consumeNumericToken();
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
break;
|
||||
|
||||
// U+002C COMMA (,)
|
||||
case 0x002C:
|
||||
// Return a <comma-token>.
|
||||
type = types.Comma;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+002D HYPHEN-MINUS (-)
|
||||
case 0x002D:
|
||||
// If the input stream starts with a number, reconsume the current input code point, consume a numeric token, and return it.
|
||||
if (charCodeDefinitions.isNumberStart(code, getCharCode(offset + 1), getCharCode(offset + 2))) {
|
||||
consumeNumericToken();
|
||||
} else {
|
||||
// Otherwise, if the next 2 input code points are U+002D HYPHEN-MINUS U+003E GREATER-THAN SIGN (->), consume them and return a <CDC-token>.
|
||||
if (getCharCode(offset + 1) === 0x002D &&
|
||||
getCharCode(offset + 2) === 0x003E) {
|
||||
type = types.CDC;
|
||||
offset = offset + 3;
|
||||
} else {
|
||||
// Otherwise, if the input stream starts with an identifier, ...
|
||||
if (charCodeDefinitions.isIdentifierStart(code, getCharCode(offset + 1), getCharCode(offset + 2))) {
|
||||
// ... reconsume the current input code point, consume an ident-like token, and return it.
|
||||
consumeIdentLikeToken();
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
// U+002E FULL STOP (.)
|
||||
case 0x002E:
|
||||
// If the input stream starts with a number, ...
|
||||
if (charCodeDefinitions.isNumberStart(code, getCharCode(offset + 1), getCharCode(offset + 2))) {
|
||||
// ... reconsume the current input code point, consume a numeric token, and return it.
|
||||
consumeNumericToken();
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
// U+002F SOLIDUS (/)
|
||||
case 0x002F:
|
||||
// If the next two input code point are U+002F SOLIDUS (/) followed by a U+002A ASTERISK (*),
|
||||
if (getCharCode(offset + 1) === 0x002A) {
|
||||
// ... consume them and all following code points up to and including the first U+002A ASTERISK (*)
|
||||
// followed by a U+002F SOLIDUS (/), or up to an EOF code point.
|
||||
type = types.Comment;
|
||||
offset = source.indexOf('*/', offset + 2);
|
||||
offset = offset === -1 ? source.length : offset + 2;
|
||||
} else {
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
break;
|
||||
|
||||
// U+003A COLON (:)
|
||||
case 0x003A:
|
||||
// Return a <colon-token>.
|
||||
type = types.Colon;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+003B SEMICOLON (;)
|
||||
case 0x003B:
|
||||
// Return a <semicolon-token>.
|
||||
type = types.Semicolon;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+003C LESS-THAN SIGN (<)
|
||||
case 0x003C:
|
||||
// If the next 3 input code points are U+0021 EXCLAMATION MARK U+002D HYPHEN-MINUS U+002D HYPHEN-MINUS (!--), ...
|
||||
if (getCharCode(offset + 1) === 0x0021 &&
|
||||
getCharCode(offset + 2) === 0x002D &&
|
||||
getCharCode(offset + 3) === 0x002D) {
|
||||
// ... consume them and return a <CDO-token>.
|
||||
type = types.CDO;
|
||||
offset = offset + 4;
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
// U+0040 COMMERCIAL AT (@)
|
||||
case 0x0040:
|
||||
// If the next 3 input code points would start an identifier, ...
|
||||
if (charCodeDefinitions.isIdentifierStart(getCharCode(offset + 1), getCharCode(offset + 2), getCharCode(offset + 3))) {
|
||||
// ... consume a name, create an <at-keyword-token> with its value set to the returned value, and return it.
|
||||
type = types.AtKeyword;
|
||||
offset = utils.consumeName(source, offset + 1);
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
// U+005B LEFT SQUARE BRACKET ([)
|
||||
case 0x005B:
|
||||
// Return a <[-token>.
|
||||
type = types.LeftSquareBracket;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+005C REVERSE SOLIDUS (\)
|
||||
case 0x005C:
|
||||
// If the input stream starts with a valid escape, ...
|
||||
if (charCodeDefinitions.isValidEscape(code, getCharCode(offset + 1))) {
|
||||
// ... reconsume the current input code point, consume an ident-like token, and return it.
|
||||
consumeIdentLikeToken();
|
||||
} else {
|
||||
// Otherwise, this is a parse error. Return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
break;
|
||||
|
||||
// U+005D RIGHT SQUARE BRACKET (])
|
||||
case 0x005D:
|
||||
// Return a <]-token>.
|
||||
type = types.RightSquareBracket;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+007B LEFT CURLY BRACKET ({)
|
||||
case 0x007B:
|
||||
// Return a <{-token>.
|
||||
type = types.LeftCurlyBracket;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+007D RIGHT CURLY BRACKET (})
|
||||
case 0x007D:
|
||||
// Return a <}-token>.
|
||||
type = types.RightCurlyBracket;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// digit
|
||||
case charCodeDefinitions.DigitCategory:
|
||||
// Reconsume the current input code point, consume a numeric token, and return it.
|
||||
consumeNumericToken();
|
||||
break;
|
||||
|
||||
// name-start code point
|
||||
case charCodeDefinitions.NameStartCategory:
|
||||
// Reconsume the current input code point, consume an ident-like token, and return it.
|
||||
consumeIdentLikeToken();
|
||||
break;
|
||||
|
||||
// EOF
|
||||
// case EofCategory:
|
||||
// Return an <EOF-token>.
|
||||
// break;
|
||||
|
||||
// anything else
|
||||
default:
|
||||
// Return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
// U+002E FULL STOP (.)
|
||||
case 0x002e:
|
||||
// If the input stream starts with a number, ...
|
||||
if (
|
||||
charCodeDefinitions.isNumberStart(
|
||||
code,
|
||||
getCharCode(offset + 1),
|
||||
getCharCode(offset + 2)
|
||||
)
|
||||
) {
|
||||
// ... reconsume the current input code point, consume a numeric token, and return it.
|
||||
consumeNumericToken();
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
|
||||
// put token to stream
|
||||
onToken(type, start, start = offset);
|
||||
break;
|
||||
|
||||
// U+002F SOLIDUS (/)
|
||||
case 0x002f:
|
||||
// If the next two input code point are U+002F SOLIDUS (/) followed by a U+002A ASTERISK (*),
|
||||
if (getCharCode(offset + 1) === 0x002a) {
|
||||
// ... consume them and all following code points up to and including the first U+002A ASTERISK (*)
|
||||
// followed by a U+002F SOLIDUS (/), or up to an EOF code point.
|
||||
type = types.Comment;
|
||||
offset = source.indexOf('*/', offset + 2);
|
||||
offset = offset === -1 ? source.length : offset + 2;
|
||||
} else {
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
break;
|
||||
|
||||
// U+003A COLON (:)
|
||||
case 0x003a:
|
||||
// Return a <colon-token>.
|
||||
type = types.Colon;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+003B SEMICOLON (;)
|
||||
case 0x003b:
|
||||
// Return a <semicolon-token>.
|
||||
type = types.Semicolon;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+003C LESS-THAN SIGN (<)
|
||||
case 0x003c:
|
||||
// If the next 3 input code points are U+0021 EXCLAMATION MARK U+002D HYPHEN-MINUS U+002D HYPHEN-MINUS (!--), ...
|
||||
if (
|
||||
getCharCode(offset + 1) === 0x0021 &&
|
||||
getCharCode(offset + 2) === 0x002d &&
|
||||
getCharCode(offset + 3) === 0x002d
|
||||
) {
|
||||
// ... consume them and return a <CDO-token>.
|
||||
type = types.CDO;
|
||||
offset = offset + 4;
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
// U+0040 COMMERCIAL AT (@)
|
||||
case 0x0040:
|
||||
// If the next 3 input code points would start an identifier, ...
|
||||
if (
|
||||
charCodeDefinitions.isIdentifierStart(
|
||||
getCharCode(offset + 1),
|
||||
getCharCode(offset + 2),
|
||||
getCharCode(offset + 3)
|
||||
)
|
||||
) {
|
||||
// ... consume a name, create an <at-keyword-token> with its value set to the returned value, and return it.
|
||||
type = types.AtKeyword;
|
||||
offset = utils.consumeName(source, offset + 1);
|
||||
} else {
|
||||
// Otherwise, return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
// U+005B LEFT SQUARE BRACKET ([)
|
||||
case 0x005b:
|
||||
// Return a <[-token>.
|
||||
type = types.LeftSquareBracket;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+005C REVERSE SOLIDUS (\)
|
||||
case 0x005c:
|
||||
// If the input stream starts with a valid escape, ...
|
||||
if (charCodeDefinitions.isValidEscape(code, getCharCode(offset + 1))) {
|
||||
// ... reconsume the current input code point, consume an ident-like token, and return it.
|
||||
consumeIdentLikeToken();
|
||||
} else {
|
||||
// Otherwise, this is a parse error. Return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
break;
|
||||
|
||||
// U+005D RIGHT SQUARE BRACKET (])
|
||||
case 0x005d:
|
||||
// Return a <]-token>.
|
||||
type = types.RightSquareBracket;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+007B LEFT CURLY BRACKET ({)
|
||||
case 0x007b:
|
||||
// Return a <{-token>.
|
||||
type = types.LeftCurlyBracket;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// U+007D RIGHT CURLY BRACKET (})
|
||||
case 0x007d:
|
||||
// Return a <}-token>.
|
||||
type = types.RightCurlyBracket;
|
||||
offset++;
|
||||
break;
|
||||
|
||||
// digit
|
||||
case charCodeDefinitions.DigitCategory:
|
||||
// Reconsume the current input code point, consume a numeric token, and return it.
|
||||
consumeNumericToken();
|
||||
break;
|
||||
|
||||
// name-start code point
|
||||
case charCodeDefinitions.NameStartCategory:
|
||||
// Reconsume the current input code point, consume an ident-like token, and return it.
|
||||
consumeIdentLikeToken();
|
||||
break;
|
||||
|
||||
// EOF
|
||||
// case EofCategory:
|
||||
// Return an <EOF-token>.
|
||||
// break;
|
||||
|
||||
// anything else
|
||||
default:
|
||||
// Return a <delim-token> with its value set to the current input code point.
|
||||
type = types.Delim;
|
||||
offset++;
|
||||
}
|
||||
|
||||
// put token to stream
|
||||
onToken(type, start, (start = offset));
|
||||
}
|
||||
}
|
||||
|
||||
exports.AtKeyword = types.AtKeyword;
|
||||
|
50
node_modules/css-tree/cjs/tokenizer/names.cjs
generated
vendored
50
node_modules/css-tree/cjs/tokenizer/names.cjs
generated
vendored
@ -1,31 +1,31 @@
|
||||
'use strict';
|
||||
|
||||
const tokenNames = [
|
||||
'EOF-token',
|
||||
'ident-token',
|
||||
'function-token',
|
||||
'at-keyword-token',
|
||||
'hash-token',
|
||||
'string-token',
|
||||
'bad-string-token',
|
||||
'url-token',
|
||||
'bad-url-token',
|
||||
'delim-token',
|
||||
'number-token',
|
||||
'percentage-token',
|
||||
'dimension-token',
|
||||
'whitespace-token',
|
||||
'CDO-token',
|
||||
'CDC-token',
|
||||
'colon-token',
|
||||
'semicolon-token',
|
||||
'comma-token',
|
||||
'[-token',
|
||||
']-token',
|
||||
'(-token',
|
||||
')-token',
|
||||
'{-token',
|
||||
'}-token'
|
||||
'EOF-token',
|
||||
'ident-token',
|
||||
'function-token',
|
||||
'at-keyword-token',
|
||||
'hash-token',
|
||||
'string-token',
|
||||
'bad-string-token',
|
||||
'url-token',
|
||||
'bad-url-token',
|
||||
'delim-token',
|
||||
'number-token',
|
||||
'percentage-token',
|
||||
'dimension-token',
|
||||
'whitespace-token',
|
||||
'CDO-token',
|
||||
'CDC-token',
|
||||
'colon-token',
|
||||
'semicolon-token',
|
||||
'comma-token',
|
||||
'[-token',
|
||||
']-token',
|
||||
'(-token',
|
||||
')-token',
|
||||
'{-token',
|
||||
'}-token',
|
||||
];
|
||||
|
||||
module.exports = tokenNames;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user