chore: update deps

This commit is contained in:
Rim
2025-03-29 15:51:03 -04:00
parent e1fab5e7ef
commit a9cac1605b
2120 changed files with 369757 additions and 880 deletions

28
node_modules/css-tree/lib/convertor/create.js generated vendored Normal file
View File

@ -0,0 +1,28 @@
import { List } from '../utils/List.js';
export function createConvertor(walk) {
return {
fromPlainObject(ast) {
walk(ast, {
enter(node) {
if (node.children && node.children instanceof List === false) {
node.children = new List().fromArray(node.children);
}
}
});
return ast;
},
toPlainObject(ast) {
walk(ast, {
leave(node) {
if (node.children && node.children instanceof List) {
node.children = node.children.toArray();
}
}
});
return ast;
}
};
};

4
node_modules/css-tree/lib/convertor/index.js generated vendored Normal file
View File

@ -0,0 +1,4 @@
import { createConvertor } from './create.js';
import walker from '../walker/index.js';
export default createConvertor(walker);

6
node_modules/css-tree/lib/data-patch.js generated vendored Normal file
View File

@ -0,0 +1,6 @@
import { createRequire } from 'module';
const require = createRequire(import.meta.url);
const patch = require('../data/patch.json');
export default patch;

95
node_modules/css-tree/lib/data.js generated vendored Normal file
View File

@ -0,0 +1,95 @@
import { createRequire } from 'module';
import patch from './data-patch.js';
const require = createRequire(import.meta.url);
const mdnAtrules = require('mdn-data/css/at-rules.json');
const mdnProperties = require('mdn-data/css/properties.json');
const mdnSyntaxes = require('mdn-data/css/syntaxes.json');
const extendSyntax = /^\s*\|\s*/;
function preprocessAtrules(dict) {
const result = Object.create(null);
for (const atruleName in dict) {
const atrule = dict[atruleName];
let descriptors = null;
if (atrule.descriptors) {
descriptors = Object.create(null);
for (const descriptor in atrule.descriptors) {
descriptors[descriptor] = atrule.descriptors[descriptor].syntax;
}
}
result[atruleName.substr(1)] = {
prelude: atrule.syntax.trim().replace(/\{(.|\s)+\}/, '').match(/^@\S+\s+([^;\{]*)/)[1].trim() || null,
descriptors
};
}
return result;
}
function patchDictionary(dict, patchDict) {
const result = {};
// copy all syntaxes for an original dict
for (const key in dict) {
result[key] = dict[key].syntax || dict[key];
}
// apply a patch
for (const key in patchDict) {
if (key in dict) {
if (patchDict[key].syntax) {
result[key] = extendSyntax.test(patchDict[key].syntax)
? result[key] + ' ' + patchDict[key].syntax.trim()
: patchDict[key].syntax;
} else {
delete result[key];
}
} else {
if (patchDict[key].syntax) {
result[key] = patchDict[key].syntax.replace(extendSyntax, '');
}
}
}
return result;
}
function patchAtrules(dict, patchDict) {
const result = {};
// copy all syntaxes for an original dict
for (const key in dict) {
const patchDescriptors = (patchDict[key] && patchDict[key].descriptors) || null;
result[key] = {
prelude: key in patchDict && 'prelude' in patchDict[key]
? patchDict[key].prelude
: dict[key].prelude || null,
descriptors: patchDictionary(dict[key].descriptors || {}, patchDescriptors || {})
};
}
// apply a patch
for (const key in patchDict) {
if (!hasOwnProperty.call(dict, key)) {
result[key] = {
prelude: patchDict[key].prelude || null,
descriptors: patchDict[key].descriptors && patchDictionary({}, patchDict[key].descriptors)
};
}
}
return result;
}
export default {
types: patchDictionary(mdnSyntaxes, patch.types),
atrules: patchAtrules(preprocessAtrules(mdnAtrules), patch.atrules),
properties: patchDictionary(mdnProperties, patch.properties)
};

View File

@ -0,0 +1,12 @@
import { createCustomError } from '../utils/create-custom-error.js';
export function SyntaxError(message, input, offset) {
return Object.assign(createCustomError('SyntaxError', message), {
input,
offset,
rawMessage: message,
message: message + '\n' +
' ' + input + '\n' +
'--' + new Array((offset || input.length) + 1).join('-') + '^'
});
};

131
node_modules/css-tree/lib/definition-syntax/generate.js generated vendored Normal file
View File

@ -0,0 +1,131 @@
function noop(value) {
return value;
}
function generateMultiplier(multiplier) {
const { min, max, comma } = multiplier;
if (min === 0 && max === 0) {
return comma ? '#?' : '*';
}
if (min === 0 && max === 1) {
return '?';
}
if (min === 1 && max === 0) {
return comma ? '#' : '+';
}
if (min === 1 && max === 1) {
return '';
}
return (
(comma ? '#' : '') +
(min === max
? '{' + min + '}'
: '{' + min + ',' + (max !== 0 ? max : '') + '}'
)
);
}
function generateTypeOpts(node) {
switch (node.type) {
case 'Range':
return (
' [' +
(node.min === null ? '-∞' : node.min) +
',' +
(node.max === null ? '∞' : node.max) +
']'
);
default:
throw new Error('Unknown node type `' + node.type + '`');
}
}
function generateSequence(node, decorate, forceBraces, compact) {
const combinator = node.combinator === ' ' || compact ? node.combinator : ' ' + node.combinator + ' ';
const result = node.terms
.map(term => internalGenerate(term, decorate, forceBraces, compact))
.join(combinator);
if (node.explicit || forceBraces) {
return (compact || result[0] === ',' ? '[' : '[ ') + result + (compact ? ']' : ' ]');
}
return result;
}
function internalGenerate(node, decorate, forceBraces, compact) {
let result;
switch (node.type) {
case 'Group':
result =
generateSequence(node, decorate, forceBraces, compact) +
(node.disallowEmpty ? '!' : '');
break;
case 'Multiplier':
// return since node is a composition
return (
internalGenerate(node.term, decorate, forceBraces, compact) +
decorate(generateMultiplier(node), node)
);
case 'Type':
result = '<' + node.name + (node.opts ? decorate(generateTypeOpts(node.opts), node.opts) : '') + '>';
break;
case 'Property':
result = '<\'' + node.name + '\'>';
break;
case 'Keyword':
result = node.name;
break;
case 'AtKeyword':
result = '@' + node.name;
break;
case 'Function':
result = node.name + '(';
break;
case 'String':
case 'Token':
result = node.value;
break;
case 'Comma':
result = ',';
break;
default:
throw new Error('Unknown node type `' + node.type + '`');
}
return decorate(result, node);
}
export function generate(node, options) {
let decorate = noop;
let forceBraces = false;
let compact = false;
if (typeof options === 'function') {
decorate = options;
} else if (options) {
forceBraces = Boolean(options.forceBraces);
compact = Boolean(options.compact);
if (typeof options.decorate === 'function') {
decorate = options.decorate;
}
}
return internalGenerate(node, decorate, forceBraces, compact);
};

4
node_modules/css-tree/lib/definition-syntax/index.js generated vendored Normal file
View File

@ -0,0 +1,4 @@
export { SyntaxError } from './SyntaxError.js';
export { generate } from './generate.js';
export { parse } from './parse.js';
export { walk } from './walk.js';

584
node_modules/css-tree/lib/definition-syntax/parse.js generated vendored Normal file
View File

@ -0,0 +1,584 @@
import { Tokenizer } from './tokenizer.js';
const TAB = 9;
const N = 10;
const F = 12;
const R = 13;
const SPACE = 32;
const EXCLAMATIONMARK = 33; // !
const NUMBERSIGN = 35; // #
const AMPERSAND = 38; // &
const APOSTROPHE = 39; // '
const LEFTPARENTHESIS = 40; // (
const RIGHTPARENTHESIS = 41; // )
const ASTERISK = 42; // *
const PLUSSIGN = 43; // +
const COMMA = 44; // ,
const HYPERMINUS = 45; // -
const LESSTHANSIGN = 60; // <
const GREATERTHANSIGN = 62; // >
const QUESTIONMARK = 63; // ?
const COMMERCIALAT = 64; // @
const LEFTSQUAREBRACKET = 91; // [
const RIGHTSQUAREBRACKET = 93; // ]
const LEFTCURLYBRACKET = 123; // {
const VERTICALLINE = 124; // |
const RIGHTCURLYBRACKET = 125; // }
const INFINITY = 8734; // ∞
const NAME_CHAR = new Uint8Array(128).map((_, idx) =>
/[a-zA-Z0-9\-]/.test(String.fromCharCode(idx)) ? 1 : 0
);
const COMBINATOR_PRECEDENCE = {
' ': 1,
'&&': 2,
'||': 3,
'|': 4
};
function scanSpaces(tokenizer) {
return tokenizer.substringToPos(
tokenizer.findWsEnd(tokenizer.pos)
);
}
function scanWord(tokenizer) {
let end = tokenizer.pos;
for (; end < tokenizer.str.length; end++) {
const code = tokenizer.str.charCodeAt(end);
if (code >= 128 || NAME_CHAR[code] === 0) {
break;
}
}
if (tokenizer.pos === end) {
tokenizer.error('Expect a keyword');
}
return tokenizer.substringToPos(end);
}
function scanNumber(tokenizer) {
let end = tokenizer.pos;
for (; end < tokenizer.str.length; end++) {
const code = tokenizer.str.charCodeAt(end);
if (code < 48 || code > 57) {
break;
}
}
if (tokenizer.pos === end) {
tokenizer.error('Expect a number');
}
return tokenizer.substringToPos(end);
}
function scanString(tokenizer) {
const end = tokenizer.str.indexOf('\'', tokenizer.pos + 1);
if (end === -1) {
tokenizer.pos = tokenizer.str.length;
tokenizer.error('Expect an apostrophe');
}
return tokenizer.substringToPos(end + 1);
}
function readMultiplierRange(tokenizer) {
let min = null;
let max = null;
tokenizer.eat(LEFTCURLYBRACKET);
min = scanNumber(tokenizer);
if (tokenizer.charCode() === COMMA) {
tokenizer.pos++;
if (tokenizer.charCode() !== RIGHTCURLYBRACKET) {
max = scanNumber(tokenizer);
}
} else {
max = min;
}
tokenizer.eat(RIGHTCURLYBRACKET);
return {
min: Number(min),
max: max ? Number(max) : 0
};
}
function readMultiplier(tokenizer) {
let range = null;
let comma = false;
switch (tokenizer.charCode()) {
case ASTERISK:
tokenizer.pos++;
range = {
min: 0,
max: 0
};
break;
case PLUSSIGN:
tokenizer.pos++;
range = {
min: 1,
max: 0
};
break;
case QUESTIONMARK:
tokenizer.pos++;
range = {
min: 0,
max: 1
};
break;
case NUMBERSIGN:
tokenizer.pos++;
comma = true;
if (tokenizer.charCode() === LEFTCURLYBRACKET) {
range = readMultiplierRange(tokenizer);
} else if (tokenizer.charCode() === QUESTIONMARK) {
// https://www.w3.org/TR/css-values-4/#component-multipliers
// > the # and ? multipliers may be stacked as #?
// In this case just treat "#?" as a single multiplier
// { min: 0, max: 0, comma: true }
tokenizer.pos++;
range = {
min: 0,
max: 0
};
} else {
range = {
min: 1,
max: 0
};
}
break;
case LEFTCURLYBRACKET:
range = readMultiplierRange(tokenizer);
break;
default:
return null;
}
return {
type: 'Multiplier',
comma,
min: range.min,
max: range.max,
term: null
};
}
function maybeMultiplied(tokenizer, node) {
const multiplier = readMultiplier(tokenizer);
if (multiplier !== null) {
multiplier.term = node;
// https://www.w3.org/TR/css-values-4/#component-multipliers
// > The + and # multipliers may be stacked as +#;
// Represent "+#" as nested multipliers:
// { ...<multiplier #>,
// term: {
// ...<multipler +>,
// term: node
// }
// }
if (tokenizer.charCode() === NUMBERSIGN &&
tokenizer.charCodeAt(tokenizer.pos - 1) === PLUSSIGN) {
return maybeMultiplied(tokenizer, multiplier);
}
return multiplier;
}
return node;
}
function maybeToken(tokenizer) {
const ch = tokenizer.peek();
if (ch === '') {
return null;
}
return {
type: 'Token',
value: ch
};
}
function readProperty(tokenizer) {
let name;
tokenizer.eat(LESSTHANSIGN);
tokenizer.eat(APOSTROPHE);
name = scanWord(tokenizer);
tokenizer.eat(APOSTROPHE);
tokenizer.eat(GREATERTHANSIGN);
return maybeMultiplied(tokenizer, {
type: 'Property',
name
});
}
// https://drafts.csswg.org/css-values-3/#numeric-ranges
// 4.1. Range Restrictions and Range Definition Notation
//
// Range restrictions can be annotated in the numeric type notation using CSS bracketed
// range notation—[min,max]—within the angle brackets, after the identifying keyword,
// indicating a closed range between (and including) min and max.
// For example, <integer [0, 10]> indicates an integer between 0 and 10, inclusive.
function readTypeRange(tokenizer) {
// use null for Infinity to make AST format JSON serializable/deserializable
let min = null; // -Infinity
let max = null; // Infinity
let sign = 1;
tokenizer.eat(LEFTSQUAREBRACKET);
if (tokenizer.charCode() === HYPERMINUS) {
tokenizer.peek();
sign = -1;
}
if (sign == -1 && tokenizer.charCode() === INFINITY) {
tokenizer.peek();
} else {
min = sign * Number(scanNumber(tokenizer));
if (NAME_CHAR[tokenizer.charCode()] !== 0) {
min += scanWord(tokenizer);
}
}
scanSpaces(tokenizer);
tokenizer.eat(COMMA);
scanSpaces(tokenizer);
if (tokenizer.charCode() === INFINITY) {
tokenizer.peek();
} else {
sign = 1;
if (tokenizer.charCode() === HYPERMINUS) {
tokenizer.peek();
sign = -1;
}
max = sign * Number(scanNumber(tokenizer));
if (NAME_CHAR[tokenizer.charCode()] !== 0) {
max += scanWord(tokenizer);
}
}
tokenizer.eat(RIGHTSQUAREBRACKET);
return {
type: 'Range',
min,
max
};
}
function readType(tokenizer) {
let name;
let opts = null;
tokenizer.eat(LESSTHANSIGN);
name = scanWord(tokenizer);
if (tokenizer.charCode() === LEFTPARENTHESIS &&
tokenizer.nextCharCode() === RIGHTPARENTHESIS) {
tokenizer.pos += 2;
name += '()';
}
if (tokenizer.charCodeAt(tokenizer.findWsEnd(tokenizer.pos)) === LEFTSQUAREBRACKET) {
scanSpaces(tokenizer);
opts = readTypeRange(tokenizer);
}
tokenizer.eat(GREATERTHANSIGN);
return maybeMultiplied(tokenizer, {
type: 'Type',
name,
opts
});
}
function readKeywordOrFunction(tokenizer) {
const name = scanWord(tokenizer);
if (tokenizer.charCode() === LEFTPARENTHESIS) {
tokenizer.pos++;
return {
type: 'Function',
name
};
}
return maybeMultiplied(tokenizer, {
type: 'Keyword',
name
});
}
function regroupTerms(terms, combinators) {
function createGroup(terms, combinator) {
return {
type: 'Group',
terms,
combinator,
disallowEmpty: false,
explicit: false
};
}
let combinator;
combinators = Object.keys(combinators)
.sort((a, b) => COMBINATOR_PRECEDENCE[a] - COMBINATOR_PRECEDENCE[b]);
while (combinators.length > 0) {
combinator = combinators.shift();
let i = 0;
let subgroupStart = 0;
for (; i < terms.length; i++) {
const term = terms[i];
if (term.type === 'Combinator') {
if (term.value === combinator) {
if (subgroupStart === -1) {
subgroupStart = i - 1;
}
terms.splice(i, 1);
i--;
} else {
if (subgroupStart !== -1 && i - subgroupStart > 1) {
terms.splice(
subgroupStart,
i - subgroupStart,
createGroup(terms.slice(subgroupStart, i), combinator)
);
i = subgroupStart + 1;
}
subgroupStart = -1;
}
}
}
if (subgroupStart !== -1 && combinators.length) {
terms.splice(
subgroupStart,
i - subgroupStart,
createGroup(terms.slice(subgroupStart, i), combinator)
);
}
}
return combinator;
}
function readImplicitGroup(tokenizer) {
const terms = [];
const combinators = {};
let token;
let prevToken = null;
let prevTokenPos = tokenizer.pos;
while (token = peek(tokenizer)) {
if (token.type !== 'Spaces') {
if (token.type === 'Combinator') {
// check for combinator in group beginning and double combinator sequence
if (prevToken === null || prevToken.type === 'Combinator') {
tokenizer.pos = prevTokenPos;
tokenizer.error('Unexpected combinator');
}
combinators[token.value] = true;
} else if (prevToken !== null && prevToken.type !== 'Combinator') {
combinators[' '] = true; // a b
terms.push({
type: 'Combinator',
value: ' '
});
}
terms.push(token);
prevToken = token;
prevTokenPos = tokenizer.pos;
}
}
// check for combinator in group ending
if (prevToken !== null && prevToken.type === 'Combinator') {
tokenizer.pos -= prevTokenPos;
tokenizer.error('Unexpected combinator');
}
return {
type: 'Group',
terms,
combinator: regroupTerms(terms, combinators) || ' ',
disallowEmpty: false,
explicit: false
};
}
function readGroup(tokenizer) {
let result;
tokenizer.eat(LEFTSQUAREBRACKET);
result = readImplicitGroup(tokenizer);
tokenizer.eat(RIGHTSQUAREBRACKET);
result.explicit = true;
if (tokenizer.charCode() === EXCLAMATIONMARK) {
tokenizer.pos++;
result.disallowEmpty = true;
}
return result;
}
function peek(tokenizer) {
let code = tokenizer.charCode();
if (code < 128 && NAME_CHAR[code] === 1) {
return readKeywordOrFunction(tokenizer);
}
switch (code) {
case RIGHTSQUAREBRACKET:
// don't eat, stop scan a group
break;
case LEFTSQUAREBRACKET:
return maybeMultiplied(tokenizer, readGroup(tokenizer));
case LESSTHANSIGN:
return tokenizer.nextCharCode() === APOSTROPHE
? readProperty(tokenizer)
: readType(tokenizer);
case VERTICALLINE:
return {
type: 'Combinator',
value: tokenizer.substringToPos(
tokenizer.pos + (tokenizer.nextCharCode() === VERTICALLINE ? 2 : 1)
)
};
case AMPERSAND:
tokenizer.pos++;
tokenizer.eat(AMPERSAND);
return {
type: 'Combinator',
value: '&&'
};
case COMMA:
tokenizer.pos++;
return {
type: 'Comma'
};
case APOSTROPHE:
return maybeMultiplied(tokenizer, {
type: 'String',
value: scanString(tokenizer)
});
case SPACE:
case TAB:
case N:
case R:
case F:
return {
type: 'Spaces',
value: scanSpaces(tokenizer)
};
case COMMERCIALAT:
code = tokenizer.nextCharCode();
if (code < 128 && NAME_CHAR[code] === 1) {
tokenizer.pos++;
return {
type: 'AtKeyword',
name: scanWord(tokenizer)
};
}
return maybeToken(tokenizer);
case ASTERISK:
case PLUSSIGN:
case QUESTIONMARK:
case NUMBERSIGN:
case EXCLAMATIONMARK:
// prohibited tokens (used as a multiplier start)
break;
case LEFTCURLYBRACKET:
// LEFTCURLYBRACKET is allowed since mdn/data uses it w/o quoting
// check next char isn't a number, because it's likely a disjoined multiplier
code = tokenizer.nextCharCode();
if (code < 48 || code > 57) {
return maybeToken(tokenizer);
}
break;
default:
return maybeToken(tokenizer);
}
}
export function parse(source) {
const tokenizer = new Tokenizer(source);
const result = readImplicitGroup(tokenizer);
if (tokenizer.pos !== source.length) {
tokenizer.error('Unexpected input');
}
// reduce redundant groups with single group term
if (result.terms.length === 1 && result.terms[0].type === 'Group') {
return result.terms[0];
}
return result;
};

View File

@ -0,0 +1,52 @@
import { SyntaxError } from './SyntaxError.js';
const TAB = 9;
const N = 10;
const F = 12;
const R = 13;
const SPACE = 32;
export class Tokenizer {
constructor(str) {
this.str = str;
this.pos = 0;
}
charCodeAt(pos) {
return pos < this.str.length ? this.str.charCodeAt(pos) : 0;
}
charCode() {
return this.charCodeAt(this.pos);
}
nextCharCode() {
return this.charCodeAt(this.pos + 1);
}
nextNonWsCode(pos) {
return this.charCodeAt(this.findWsEnd(pos));
}
findWsEnd(pos) {
for (; pos < this.str.length; pos++) {
const code = this.str.charCodeAt(pos);
if (code !== R && code !== N && code !== F && code !== SPACE && code !== TAB) {
break;
}
}
return pos;
}
substringToPos(end) {
return this.str.substring(this.pos, this.pos = end);
}
eat(code) {
if (this.charCode() !== code) {
this.error('Expect `' + String.fromCharCode(code) + '`');
}
this.pos++;
}
peek() {
return this.pos < this.str.length ? this.str.charAt(this.pos++) : '';
}
error(message) {
throw new SyntaxError(message, this.str, this.pos);
}
};

52
node_modules/css-tree/lib/definition-syntax/walk.js generated vendored Normal file
View File

@ -0,0 +1,52 @@
const noop = function() {};
function ensureFunction(value) {
return typeof value === 'function' ? value : noop;
}
export function walk(node, options, context) {
function walk(node) {
enter.call(context, node);
switch (node.type) {
case 'Group':
node.terms.forEach(walk);
break;
case 'Multiplier':
walk(node.term);
break;
case 'Type':
case 'Property':
case 'Keyword':
case 'AtKeyword':
case 'Function':
case 'String':
case 'Token':
case 'Comma':
break;
default:
throw new Error('Unknown type: ' + node.type);
}
leave.call(context, node);
}
let enter = noop;
let leave = noop;
if (typeof options === 'function') {
enter = options;
} else if (options) {
enter = ensureFunction(options.enter);
leave = ensureFunction(options.leave);
}
if (enter === noop && leave === noop) {
throw new Error('Neither `enter` nor `leave` walker handler is set or both aren\'t a function');
}
walk(node, context);
};

98
node_modules/css-tree/lib/generator/create.js generated vendored Normal file
View File

@ -0,0 +1,98 @@
import { tokenize, Delim, WhiteSpace } from '../tokenizer/index.js';
import { generateSourceMap } from './sourceMap.js';
import * as tokenBefore from './token-before.js';
const REVERSESOLIDUS = 0x005c; // U+005C REVERSE SOLIDUS (\)
function processChildren(node, delimeter) {
if (typeof delimeter === 'function') {
let prev = null;
node.children.forEach(node => {
if (prev !== null) {
delimeter.call(this, prev);
}
this.node(node);
prev = node;
});
return;
}
node.children.forEach(this.node, this);
}
function processChunk(chunk) {
tokenize(chunk, (type, start, end) => {
this.token(type, chunk.slice(start, end));
});
}
export function createGenerator(config) {
const types = new Map();
for (let name in config.node) {
const item = config.node[name];
const fn = item.generate || item;
if (typeof fn === 'function') {
types.set(name, item.generate || item);
}
}
return function(node, options) {
let buffer = '';
let prevCode = 0;
let handlers = {
node(node) {
if (types.has(node.type)) {
types.get(node.type).call(publicApi, node);
} else {
throw new Error('Unknown node type: ' + node.type);
}
},
tokenBefore: tokenBefore.safe,
token(type, value) {
prevCode = this.tokenBefore(prevCode, type, value);
this.emit(value, type, false);
if (type === Delim && value.charCodeAt(0) === REVERSESOLIDUS) {
this.emit('\n', WhiteSpace, true);
}
},
emit(value) {
buffer += value;
},
result() {
return buffer;
}
};
if (options) {
if (typeof options.decorator === 'function') {
handlers = options.decorator(handlers);
}
if (options.sourceMap) {
handlers = generateSourceMap(handlers);
}
if (options.mode in tokenBefore) {
handlers.tokenBefore = tokenBefore[options.mode];
}
}
const publicApi = {
node: (node) => handlers.node(node),
children: processChildren,
token: (type, value) => handlers.token(type, value),
tokenize: processChunk
};
handlers.node(node);
return handlers.result();
};
};

4
node_modules/css-tree/lib/generator/index.js generated vendored Normal file
View File

@ -0,0 +1,4 @@
import { createGenerator } from './create.js';
import config from '../syntax/config/generator.js';
export default createGenerator(config);

92
node_modules/css-tree/lib/generator/sourceMap.js generated vendored Normal file
View File

@ -0,0 +1,92 @@
import { SourceMapGenerator } from 'source-map-js/lib/source-map-generator.js';
const trackNodes = new Set(['Atrule', 'Selector', 'Declaration']);
export function generateSourceMap(handlers) {
const map = new SourceMapGenerator();
const generated = {
line: 1,
column: 0
};
const original = {
line: 0, // should be zero to add first mapping
column: 0
};
const activatedGenerated = {
line: 1,
column: 0
};
const activatedMapping = {
generated: activatedGenerated
};
let line = 1;
let column = 0;
let sourceMappingActive = false;
const origHandlersNode = handlers.node;
handlers.node = function(node) {
if (node.loc && node.loc.start && trackNodes.has(node.type)) {
const nodeLine = node.loc.start.line;
const nodeColumn = node.loc.start.column - 1;
if (original.line !== nodeLine ||
original.column !== nodeColumn) {
original.line = nodeLine;
original.column = nodeColumn;
generated.line = line;
generated.column = column;
if (sourceMappingActive) {
sourceMappingActive = false;
if (generated.line !== activatedGenerated.line ||
generated.column !== activatedGenerated.column) {
map.addMapping(activatedMapping);
}
}
sourceMappingActive = true;
map.addMapping({
source: node.loc.source,
original,
generated
});
}
}
origHandlersNode.call(this, node);
if (sourceMappingActive && trackNodes.has(node.type)) {
activatedGenerated.line = line;
activatedGenerated.column = column;
}
};
const origHandlersEmit = handlers.emit;
handlers.emit = function(value, type, auto) {
for (let i = 0; i < value.length; i++) {
if (value.charCodeAt(i) === 10) { // \n
line++;
column = 0;
} else {
column++;
}
}
origHandlersEmit(value, type, auto);
};
const origHandlersResult = handlers.result;
handlers.result = function() {
if (sourceMappingActive) {
map.addMapping(activatedMapping);
}
return {
css: origHandlersResult(),
map
};
};
return handlers;
};

182
node_modules/css-tree/lib/generator/token-before.js generated vendored Normal file
View File

@ -0,0 +1,182 @@
import {
WhiteSpace,
Delim,
Ident,
Function as FunctionToken,
Url,
BadUrl,
AtKeyword,
Hash,
Percentage,
Dimension,
Number as NumberToken,
String as StringToken,
Colon,
LeftParenthesis,
RightParenthesis,
CDC
} from '../tokenizer/index.js';
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const code = (type, value) => {
if (type === Delim) {
type = value;
}
if (typeof type === 'string') {
const charCode = type.charCodeAt(0);
return charCode > 0x7F ? 0x8000 : charCode << 8;
}
return type;
};
// https://www.w3.org/TR/css-syntax-3/#serialization
// The only requirement for serialization is that it must "round-trip" with parsing,
// that is, parsing the stylesheet must produce the same data structures as parsing,
// serializing, and parsing again, except for consecutive <whitespace-token>s,
// which may be collapsed into a single token.
const specPairs = [
[Ident, Ident],
[Ident, FunctionToken],
[Ident, Url],
[Ident, BadUrl],
[Ident, '-'],
[Ident, NumberToken],
[Ident, Percentage],
[Ident, Dimension],
[Ident, CDC],
[Ident, LeftParenthesis],
[AtKeyword, Ident],
[AtKeyword, FunctionToken],
[AtKeyword, Url],
[AtKeyword, BadUrl],
[AtKeyword, '-'],
[AtKeyword, NumberToken],
[AtKeyword, Percentage],
[AtKeyword, Dimension],
[AtKeyword, CDC],
[Hash, Ident],
[Hash, FunctionToken],
[Hash, Url],
[Hash, BadUrl],
[Hash, '-'],
[Hash, NumberToken],
[Hash, Percentage],
[Hash, Dimension],
[Hash, CDC],
[Dimension, Ident],
[Dimension, FunctionToken],
[Dimension, Url],
[Dimension, BadUrl],
[Dimension, '-'],
[Dimension, NumberToken],
[Dimension, Percentage],
[Dimension, Dimension],
[Dimension, CDC],
['#', Ident],
['#', FunctionToken],
['#', Url],
['#', BadUrl],
['#', '-'],
['#', NumberToken],
['#', Percentage],
['#', Dimension],
['#', CDC], // https://github.com/w3c/csswg-drafts/pull/6874
['-', Ident],
['-', FunctionToken],
['-', Url],
['-', BadUrl],
['-', '-'],
['-', NumberToken],
['-', Percentage],
['-', Dimension],
['-', CDC], // https://github.com/w3c/csswg-drafts/pull/6874
[NumberToken, Ident],
[NumberToken, FunctionToken],
[NumberToken, Url],
[NumberToken, BadUrl],
[NumberToken, NumberToken],
[NumberToken, Percentage],
[NumberToken, Dimension],
[NumberToken, '%'],
[NumberToken, CDC], // https://github.com/w3c/csswg-drafts/pull/6874
['@', Ident],
['@', FunctionToken],
['@', Url],
['@', BadUrl],
['@', '-'],
['@', CDC], // https://github.com/w3c/csswg-drafts/pull/6874
['.', NumberToken],
['.', Percentage],
['.', Dimension],
['+', NumberToken],
['+', Percentage],
['+', Dimension],
['/', '*']
];
// validate with scripts/generate-safe
const safePairs = specPairs.concat([
[Ident, Hash],
[Dimension, Hash],
[Hash, Hash],
[AtKeyword, LeftParenthesis],
[AtKeyword, StringToken],
[AtKeyword, Colon],
[Percentage, Percentage],
[Percentage, Dimension],
[Percentage, FunctionToken],
[Percentage, '-'],
[RightParenthesis, Ident],
[RightParenthesis, FunctionToken],
[RightParenthesis, Percentage],
[RightParenthesis, Dimension],
[RightParenthesis, Hash],
[RightParenthesis, '-']
]);
function createMap(pairs) {
const isWhiteSpaceRequired = new Set(
pairs.map(([prev, next]) => (code(prev) << 16 | code(next)))
);
return function(prevCode, type, value) {
const nextCode = code(type, value);
const nextCharCode = value.charCodeAt(0);
const emitWs =
(nextCharCode === HYPHENMINUS &&
type !== Ident &&
type !== FunctionToken &&
type !== CDC) ||
(nextCharCode === PLUSSIGN)
? isWhiteSpaceRequired.has(prevCode << 16 | nextCharCode << 8)
: isWhiteSpaceRequired.has(prevCode << 16 | nextCode);
if (emitWs) {
this.emit(' ', WhiteSpace, true);
}
return nextCode;
};
}
export const spec = createMap(specPairs);
export const safe = createMap(safePairs);

30
node_modules/css-tree/lib/index.js generated vendored Normal file
View File

@ -0,0 +1,30 @@
import syntax from './syntax/index.js';
export * from './version.js';
export { default as createSyntax } from './syntax/create.js';
export { List } from './utils/List.js';
export { Lexer } from './lexer/Lexer.js';
export { tokenTypes, tokenNames, TokenStream } from './tokenizer/index.js';
export * as definitionSyntax from './definition-syntax/index.js';
export { clone } from './utils/clone.js';
export * from './utils/names.js';
export * as ident from './utils/ident.js';
export * as string from './utils/string.js';
export * as url from './utils/url.js';
export const {
tokenize,
parse,
generate,
lexer,
createLexer,
walk,
find,
findLast,
findAll,
toPlainObject,
fromPlainObject,
fork
} = syntax;

453
node_modules/css-tree/lib/lexer/Lexer.js generated vendored Normal file
View File

@ -0,0 +1,453 @@
import { SyntaxReferenceError, SyntaxMatchError } from './error.js';
import * as names from '../utils/names.js';
import { cssWideKeywords } from './generic-const.js';
import generic from './generic.js';
import { parse, generate, walk } from '../definition-syntax/index.js';
import prepareTokens from './prepare-tokens.js';
import { buildMatchGraph } from './match-graph.js';
import { matchAsTree } from './match.js';
import * as trace from './trace.js';
import { matchFragments } from './search.js';
import { getStructureFromConfig } from './structure.js';
const cssWideKeywordsSyntax = buildMatchGraph(cssWideKeywords.join(' | '));
function dumpMapSyntax(map, compact, syntaxAsAst) {
const result = {};
for (const name in map) {
if (map[name].syntax) {
result[name] = syntaxAsAst
? map[name].syntax
: generate(map[name].syntax, { compact });
}
}
return result;
}
function dumpAtruleMapSyntax(map, compact, syntaxAsAst) {
const result = {};
for (const [name, atrule] of Object.entries(map)) {
result[name] = {
prelude: atrule.prelude && (
syntaxAsAst
? atrule.prelude.syntax
: generate(atrule.prelude.syntax, { compact })
),
descriptors: atrule.descriptors && dumpMapSyntax(atrule.descriptors, compact, syntaxAsAst)
};
}
return result;
}
function valueHasVar(tokens) {
for (let i = 0; i < tokens.length; i++) {
if (tokens[i].value.toLowerCase() === 'var(') {
return true;
}
}
return false;
}
function buildMatchResult(matched, error, iterations) {
return {
matched,
iterations,
error,
...trace
};
}
function matchSyntax(lexer, syntax, value, useCssWideKeywords) {
const tokens = prepareTokens(value, lexer.syntax);
let result;
if (valueHasVar(tokens)) {
return buildMatchResult(null, new Error('Matching for a tree with var() is not supported'));
}
if (useCssWideKeywords) {
result = matchAsTree(tokens, lexer.cssWideKeywordsSyntax, lexer);
}
if (!useCssWideKeywords || !result.match) {
result = matchAsTree(tokens, syntax.match, lexer);
if (!result.match) {
return buildMatchResult(
null,
new SyntaxMatchError(result.reason, syntax.syntax, value, result),
result.iterations
);
}
}
return buildMatchResult(result.match, null, result.iterations);
}
export class Lexer {
constructor(config, syntax, structure) {
this.cssWideKeywordsSyntax = cssWideKeywordsSyntax;
this.syntax = syntax;
this.generic = false;
this.atrules = Object.create(null);
this.properties = Object.create(null);
this.types = Object.create(null);
this.structure = structure || getStructureFromConfig(config);
if (config) {
if (config.types) {
for (const name in config.types) {
this.addType_(name, config.types[name]);
}
}
if (config.generic) {
this.generic = true;
for (const name in generic) {
this.addType_(name, generic[name]);
}
}
if (config.atrules) {
for (const name in config.atrules) {
this.addAtrule_(name, config.atrules[name]);
}
}
if (config.properties) {
for (const name in config.properties) {
this.addProperty_(name, config.properties[name]);
}
}
}
}
checkStructure(ast) {
function collectWarning(node, message) {
warns.push({ node, message });
}
const structure = this.structure;
const warns = [];
this.syntax.walk(ast, function(node) {
if (structure.hasOwnProperty(node.type)) {
structure[node.type].check(node, collectWarning);
} else {
collectWarning(node, 'Unknown node type `' + node.type + '`');
}
});
return warns.length ? warns : false;
}
createDescriptor(syntax, type, name, parent = null) {
const ref = {
type,
name
};
const descriptor = {
type,
name,
parent,
serializable: typeof syntax === 'string' || (syntax && typeof syntax.type === 'string'),
syntax: null,
match: null
};
if (typeof syntax === 'function') {
descriptor.match = buildMatchGraph(syntax, ref);
} else {
if (typeof syntax === 'string') {
// lazy parsing on first access
Object.defineProperty(descriptor, 'syntax', {
get() {
Object.defineProperty(descriptor, 'syntax', {
value: parse(syntax)
});
return descriptor.syntax;
}
});
} else {
descriptor.syntax = syntax;
}
// lazy graph build on first access
Object.defineProperty(descriptor, 'match', {
get() {
Object.defineProperty(descriptor, 'match', {
value: buildMatchGraph(descriptor.syntax, ref)
});
return descriptor.match;
}
});
}
return descriptor;
}
addAtrule_(name, syntax) {
if (!syntax) {
return;
}
this.atrules[name] = {
type: 'Atrule',
name: name,
prelude: syntax.prelude ? this.createDescriptor(syntax.prelude, 'AtrulePrelude', name) : null,
descriptors: syntax.descriptors
? Object.keys(syntax.descriptors).reduce(
(map, descName) => {
map[descName] = this.createDescriptor(syntax.descriptors[descName], 'AtruleDescriptor', descName, name);
return map;
},
Object.create(null)
)
: null
};
}
addProperty_(name, syntax) {
if (!syntax) {
return;
}
this.properties[name] = this.createDescriptor(syntax, 'Property', name);
}
addType_(name, syntax) {
if (!syntax) {
return;
}
this.types[name] = this.createDescriptor(syntax, 'Type', name);
}
checkAtruleName(atruleName) {
if (!this.getAtrule(atruleName)) {
return new SyntaxReferenceError('Unknown at-rule', '@' + atruleName);
}
}
checkAtrulePrelude(atruleName, prelude) {
const error = this.checkAtruleName(atruleName);
if (error) {
return error;
}
const atrule = this.getAtrule(atruleName);
if (!atrule.prelude && prelude) {
return new SyntaxError('At-rule `@' + atruleName + '` should not contain a prelude');
}
if (atrule.prelude && !prelude) {
if (!matchSyntax(this, atrule.prelude, '', false).matched) {
return new SyntaxError('At-rule `@' + atruleName + '` should contain a prelude');
}
}
}
checkAtruleDescriptorName(atruleName, descriptorName) {
const error = this.checkAtruleName(atruleName);
if (error) {
return error;
}
const atrule = this.getAtrule(atruleName);
const descriptor = names.keyword(descriptorName);
if (!atrule.descriptors) {
return new SyntaxError('At-rule `@' + atruleName + '` has no known descriptors');
}
if (!atrule.descriptors[descriptor.name] &&
!atrule.descriptors[descriptor.basename]) {
return new SyntaxReferenceError('Unknown at-rule descriptor', descriptorName);
}
}
checkPropertyName(propertyName) {
if (!this.getProperty(propertyName)) {
return new SyntaxReferenceError('Unknown property', propertyName);
}
}
matchAtrulePrelude(atruleName, prelude) {
const error = this.checkAtrulePrelude(atruleName, prelude);
if (error) {
return buildMatchResult(null, error);
}
const atrule = this.getAtrule(atruleName);
if (!atrule.prelude) {
return buildMatchResult(null, null);
}
return matchSyntax(this, atrule.prelude, prelude || '', false);
}
matchAtruleDescriptor(atruleName, descriptorName, value) {
const error = this.checkAtruleDescriptorName(atruleName, descriptorName);
if (error) {
return buildMatchResult(null, error);
}
const atrule = this.getAtrule(atruleName);
const descriptor = names.keyword(descriptorName);
return matchSyntax(this, atrule.descriptors[descriptor.name] || atrule.descriptors[descriptor.basename], value, false);
}
matchDeclaration(node) {
if (node.type !== 'Declaration') {
return buildMatchResult(null, new Error('Not a Declaration node'));
}
return this.matchProperty(node.property, node.value);
}
matchProperty(propertyName, value) {
// don't match syntax for a custom property at the moment
if (names.property(propertyName).custom) {
return buildMatchResult(null, new Error('Lexer matching doesn\'t applicable for custom properties'));
}
const error = this.checkPropertyName(propertyName);
if (error) {
return buildMatchResult(null, error);
}
return matchSyntax(this, this.getProperty(propertyName), value, true);
}
matchType(typeName, value) {
const typeSyntax = this.getType(typeName);
if (!typeSyntax) {
return buildMatchResult(null, new SyntaxReferenceError('Unknown type', typeName));
}
return matchSyntax(this, typeSyntax, value, false);
}
match(syntax, value) {
if (typeof syntax !== 'string' && (!syntax || !syntax.type)) {
return buildMatchResult(null, new SyntaxReferenceError('Bad syntax'));
}
if (typeof syntax === 'string' || !syntax.match) {
syntax = this.createDescriptor(syntax, 'Type', 'anonymous');
}
return matchSyntax(this, syntax, value, false);
}
findValueFragments(propertyName, value, type, name) {
return matchFragments(this, value, this.matchProperty(propertyName, value), type, name);
}
findDeclarationValueFragments(declaration, type, name) {
return matchFragments(this, declaration.value, this.matchDeclaration(declaration), type, name);
}
findAllFragments(ast, type, name) {
const result = [];
this.syntax.walk(ast, {
visit: 'Declaration',
enter: (declaration) => {
result.push.apply(result, this.findDeclarationValueFragments(declaration, type, name));
}
});
return result;
}
getAtrule(atruleName, fallbackBasename = true) {
const atrule = names.keyword(atruleName);
const atruleEntry = atrule.vendor && fallbackBasename
? this.atrules[atrule.name] || this.atrules[atrule.basename]
: this.atrules[atrule.name];
return atruleEntry || null;
}
getAtrulePrelude(atruleName, fallbackBasename = true) {
const atrule = this.getAtrule(atruleName, fallbackBasename);
return atrule && atrule.prelude || null;
}
getAtruleDescriptor(atruleName, name) {
return this.atrules.hasOwnProperty(atruleName) && this.atrules.declarators
? this.atrules[atruleName].declarators[name] || null
: null;
}
getProperty(propertyName, fallbackBasename = true) {
const property = names.property(propertyName);
const propertyEntry = property.vendor && fallbackBasename
? this.properties[property.name] || this.properties[property.basename]
: this.properties[property.name];
return propertyEntry || null;
}
getType(name) {
return hasOwnProperty.call(this.types, name) ? this.types[name] : null;
}
validate() {
function validate(syntax, name, broken, descriptor) {
if (broken.has(name)) {
return broken.get(name);
}
broken.set(name, false);
if (descriptor.syntax !== null) {
walk(descriptor.syntax, function(node) {
if (node.type !== 'Type' && node.type !== 'Property') {
return;
}
const map = node.type === 'Type' ? syntax.types : syntax.properties;
const brokenMap = node.type === 'Type' ? brokenTypes : brokenProperties;
if (!hasOwnProperty.call(map, node.name) || validate(syntax, node.name, brokenMap, map[node.name])) {
broken.set(name, true);
}
}, this);
}
}
let brokenTypes = new Map();
let brokenProperties = new Map();
for (const key in this.types) {
validate(this, key, brokenTypes, this.types[key]);
}
for (const key in this.properties) {
validate(this, key, brokenProperties, this.properties[key]);
}
brokenTypes = [...brokenTypes.keys()].filter(name => brokenTypes.get(name));
brokenProperties = [...brokenProperties.keys()].filter(name => brokenProperties.get(name));
if (brokenTypes.length || brokenProperties.length) {
return {
types: brokenTypes,
properties: brokenProperties
};
}
return null;
}
dump(syntaxAsAst, pretty) {
return {
generic: this.generic,
types: dumpMapSyntax(this.types, !pretty, syntaxAsAst),
properties: dumpMapSyntax(this.properties, !pretty, syntaxAsAst),
atrules: dumpAtruleMapSyntax(this.atrules, !pretty, syntaxAsAst)
};
}
toString() {
return JSON.stringify(this.dump());
}
};

123
node_modules/css-tree/lib/lexer/error.js generated vendored Normal file
View File

@ -0,0 +1,123 @@
import { createCustomError } from '../utils/create-custom-error.js';
import { generate } from '../definition-syntax/generate.js';
const defaultLoc = { offset: 0, line: 1, column: 1 };
function locateMismatch(matchResult, node) {
const tokens = matchResult.tokens;
const longestMatch = matchResult.longestMatch;
const mismatchNode = longestMatch < tokens.length ? tokens[longestMatch].node || null : null;
const badNode = mismatchNode !== node ? mismatchNode : null;
let mismatchOffset = 0;
let mismatchLength = 0;
let entries = 0;
let css = '';
let start;
let end;
for (let i = 0; i < tokens.length; i++) {
const token = tokens[i].value;
if (i === longestMatch) {
mismatchLength = token.length;
mismatchOffset = css.length;
}
if (badNode !== null && tokens[i].node === badNode) {
if (i <= longestMatch) {
entries++;
} else {
entries = 0;
}
}
css += token;
}
if (longestMatch === tokens.length || entries > 1) { // last
start = fromLoc(badNode || node, 'end') || buildLoc(defaultLoc, css);
end = buildLoc(start);
} else {
start = fromLoc(badNode, 'start') ||
buildLoc(fromLoc(node, 'start') || defaultLoc, css.slice(0, mismatchOffset));
end = fromLoc(badNode, 'end') ||
buildLoc(start, css.substr(mismatchOffset, mismatchLength));
}
return {
css,
mismatchOffset,
mismatchLength,
start,
end
};
}
function fromLoc(node, point) {
const value = node && node.loc && node.loc[point];
if (value) {
return 'line' in value ? buildLoc(value) : value;
}
return null;
}
function buildLoc({ offset, line, column }, extra) {
const loc = {
offset,
line,
column
};
if (extra) {
const lines = extra.split(/\n|\r\n?|\f/);
loc.offset += extra.length;
loc.line += lines.length - 1;
loc.column = lines.length === 1 ? loc.column + extra.length : lines.pop().length + 1;
}
return loc;
}
export const SyntaxReferenceError = function(type, referenceName) {
const error = createCustomError(
'SyntaxReferenceError',
type + (referenceName ? ' `' + referenceName + '`' : '')
);
error.reference = referenceName;
return error;
};
export const SyntaxMatchError = function(message, syntax, node, matchResult) {
const error = createCustomError('SyntaxMatchError', message);
const {
css,
mismatchOffset,
mismatchLength,
start,
end
} = locateMismatch(matchResult, node);
error.rawMessage = message;
error.syntax = syntax ? generate(syntax) : '<generic>';
error.css = css;
error.mismatchOffset = mismatchOffset;
error.mismatchLength = mismatchLength;
error.message = message + '\n' +
' syntax: ' + error.syntax + '\n' +
' value: ' + (css || '<empty string>') + '\n' +
' --------' + new Array(error.mismatchOffset + 1).join('-') + '^';
Object.assign(error, start);
error.loc = {
source: (node && node.loc && node.loc.source) || '<unknown>',
start,
end
};
return error;
};

238
node_modules/css-tree/lib/lexer/generic-an-plus-b.js generated vendored Normal file
View File

@ -0,0 +1,238 @@
import {
isDigit,
cmpChar,
Delim,
WhiteSpace,
Comment,
Ident,
Number as NumberToken,
Dimension
} from '../tokenizer/index.js';
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
const DISALLOW_SIGN = true;
const ALLOW_SIGN = false;
function isDelim(token, code) {
return token !== null && token.type === Delim && token.value.charCodeAt(0) === code;
}
function skipSC(token, offset, getNextToken) {
while (token !== null && (token.type === WhiteSpace || token.type === Comment)) {
token = getNextToken(++offset);
}
return offset;
}
function checkInteger(token, valueOffset, disallowSign, offset) {
if (!token) {
return 0;
}
const code = token.value.charCodeAt(valueOffset);
if (code === PLUSSIGN || code === HYPHENMINUS) {
if (disallowSign) {
// Number sign is not allowed
return 0;
}
valueOffset++;
}
for (; valueOffset < token.value.length; valueOffset++) {
if (!isDigit(token.value.charCodeAt(valueOffset))) {
// Integer is expected
return 0;
}
}
return offset + 1;
}
// ... <signed-integer>
// ... ['+' | '-'] <signless-integer>
function consumeB(token, offset_, getNextToken) {
let sign = false;
let offset = skipSC(token, offset_, getNextToken);
token = getNextToken(offset);
if (token === null) {
return offset_;
}
if (token.type !== NumberToken) {
if (isDelim(token, PLUSSIGN) || isDelim(token, HYPHENMINUS)) {
sign = true;
offset = skipSC(getNextToken(++offset), offset, getNextToken);
token = getNextToken(offset);
if (token === null || token.type !== NumberToken) {
return 0;
}
} else {
return offset_;
}
}
if (!sign) {
const code = token.value.charCodeAt(0);
if (code !== PLUSSIGN && code !== HYPHENMINUS) {
// Number sign is expected
return 0;
}
}
return checkInteger(token, sign ? 0 : 1, sign, offset);
}
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
export default function anPlusB(token, getNextToken) {
/* eslint-disable brace-style*/
let offset = 0;
if (!token) {
return 0;
}
// <integer>
if (token.type === NumberToken) {
return checkInteger(token, 0, ALLOW_SIGN, offset); // b
}
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
// -n- <signless-integer>
// <dashndashdigit-ident>
else if (token.type === Ident && token.value.charCodeAt(0) === HYPHENMINUS) {
// expect 1st char is N
if (!cmpChar(token.value, 1, N)) {
return 0;
}
switch (token.value.length) {
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
case 2:
return consumeB(getNextToken(++offset), offset, getNextToken);
// -n- <signless-integer>
case 3:
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
return 0;
}
offset = skipSC(getNextToken(++offset), offset, getNextToken);
token = getNextToken(offset);
return checkInteger(token, 0, DISALLOW_SIGN, offset);
// <dashndashdigit-ident>
default:
if (token.value.charCodeAt(2) !== HYPHENMINUS) {
return 0;
}
return checkInteger(token, 3, DISALLOW_SIGN, offset);
}
}
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
// '+'? n- <signless-integer>
// '+'? <ndashdigit-ident>
else if (token.type === Ident || (isDelim(token, PLUSSIGN) && getNextToken(offset + 1).type === Ident)) {
// just ignore a plus
if (token.type !== Ident) {
token = getNextToken(++offset);
}
if (token === null || !cmpChar(token.value, 0, N)) {
return 0;
}
switch (token.value.length) {
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
case 1:
return consumeB(getNextToken(++offset), offset, getNextToken);
// '+'? n- <signless-integer>
case 2:
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
return 0;
}
offset = skipSC(getNextToken(++offset), offset, getNextToken);
token = getNextToken(offset);
return checkInteger(token, 0, DISALLOW_SIGN, offset);
// '+'? <ndashdigit-ident>
default:
if (token.value.charCodeAt(1) !== HYPHENMINUS) {
return 0;
}
return checkInteger(token, 2, DISALLOW_SIGN, offset);
}
}
// <ndashdigit-dimension>
// <ndash-dimension> <signless-integer>
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
else if (token.type === Dimension) {
let code = token.value.charCodeAt(0);
let sign = code === PLUSSIGN || code === HYPHENMINUS ? 1 : 0;
let i = sign;
for (; i < token.value.length; i++) {
if (!isDigit(token.value.charCodeAt(i))) {
break;
}
}
if (i === sign) {
// Integer is expected
return 0;
}
if (!cmpChar(token.value, i, N)) {
return 0;
}
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
if (i + 1 === token.value.length) {
return consumeB(getNextToken(++offset), offset, getNextToken);
} else {
if (token.value.charCodeAt(i + 1) !== HYPHENMINUS) {
return 0;
}
// <ndash-dimension> <signless-integer>
if (i + 2 === token.value.length) {
offset = skipSC(getNextToken(++offset), offset, getNextToken);
token = getNextToken(offset);
return checkInteger(token, 0, DISALLOW_SIGN, offset);
}
// <ndashdigit-dimension>
else {
return checkInteger(token, i + 2, DISALLOW_SIGN, offset);
}
}
}
return 0;
};

8
node_modules/css-tree/lib/lexer/generic-const.js generated vendored Normal file
View File

@ -0,0 +1,8 @@
// https://drafts.csswg.org/css-cascade-5/
export const cssWideKeywords = [
'initial',
'inherit',
'unset',
'revert',
'revert-layer'
];

151
node_modules/css-tree/lib/lexer/generic-urange.js generated vendored Normal file
View File

@ -0,0 +1,151 @@
import {
isHexDigit,
cmpChar,
Ident,
Delim,
Number as NumberToken,
Dimension
} from '../tokenizer/index.js';
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const QUESTIONMARK = 0x003F; // U+003F QUESTION MARK (?)
const U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
function isDelim(token, code) {
return token !== null && token.type === Delim && token.value.charCodeAt(0) === code;
}
function startsWith(token, code) {
return token.value.charCodeAt(0) === code;
}
function hexSequence(token, offset, allowDash) {
let hexlen = 0;
for (let pos = offset; pos < token.value.length; pos++) {
const code = token.value.charCodeAt(pos);
if (code === HYPHENMINUS && allowDash && hexlen !== 0) {
hexSequence(token, offset + hexlen + 1, false);
return 6; // dissallow following question marks
}
if (!isHexDigit(code)) {
return 0; // not a hex digit
}
if (++hexlen > 6) {
return 0; // too many hex digits
};
}
return hexlen;
}
function withQuestionMarkSequence(consumed, length, getNextToken) {
if (!consumed) {
return 0; // nothing consumed
}
while (isDelim(getNextToken(length), QUESTIONMARK)) {
if (++consumed > 6) {
return 0; // too many question marks
}
length++;
}
return length;
}
// https://drafts.csswg.org/css-syntax/#urange
// Informally, the <urange> production has three forms:
// U+0001
// Defines a range consisting of a single code point, in this case the code point "1".
// U+0001-00ff
// Defines a range of codepoints between the first and the second value, in this case
// the range between "1" and "ff" (255 in decimal) inclusive.
// U+00??
// Defines a range of codepoints where the "?" characters range over all hex digits,
// in this case defining the same as the value U+0000-00ff.
// In each form, a maximum of 6 digits is allowed for each hexadecimal number (if you treat "?" as a hexadecimal digit).
//
// <urange> =
// u '+' <ident-token> '?'* |
// u <dimension-token> '?'* |
// u <number-token> '?'* |
// u <number-token> <dimension-token> |
// u <number-token> <number-token> |
// u '+' '?'+
export default function urange(token, getNextToken) {
let length = 0;
// should start with `u` or `U`
if (token === null || token.type !== Ident || !cmpChar(token.value, 0, U)) {
return 0;
}
token = getNextToken(++length);
if (token === null) {
return 0;
}
// u '+' <ident-token> '?'*
// u '+' '?'+
if (isDelim(token, PLUSSIGN)) {
token = getNextToken(++length);
if (token === null) {
return 0;
}
if (token.type === Ident) {
// u '+' <ident-token> '?'*
return withQuestionMarkSequence(hexSequence(token, 0, true), ++length, getNextToken);
}
if (isDelim(token, QUESTIONMARK)) {
// u '+' '?'+
return withQuestionMarkSequence(1, ++length, getNextToken);
}
// Hex digit or question mark is expected
return 0;
}
// u <number-token> '?'*
// u <number-token> <dimension-token>
// u <number-token> <number-token>
if (token.type === NumberToken) {
const consumedHexLength = hexSequence(token, 1, true);
if (consumedHexLength === 0) {
return 0;
}
token = getNextToken(++length);
if (token === null) {
// u <number-token> <eof>
return length;
}
if (token.type === Dimension || token.type === NumberToken) {
// u <number-token> <dimension-token>
// u <number-token> <number-token>
if (!startsWith(token, HYPHENMINUS) || !hexSequence(token, 1, false)) {
return 0;
}
return length + 1;
}
// u <number-token> '?'*
return withQuestionMarkSequence(consumedHexLength, length, getNextToken);
}
// u <dimension-token> '?'*
if (token.type === Dimension) {
return withQuestionMarkSequence(hexSequence(token, 1, true), ++length, getNextToken);
}
return 0;
};

598
node_modules/css-tree/lib/lexer/generic.js generated vendored Normal file
View File

@ -0,0 +1,598 @@
import { cssWideKeywords } from './generic-const.js';
import anPlusB from './generic-an-plus-b.js';
import urange from './generic-urange.js';
import {
isIdentifierStart,
isHexDigit,
isDigit,
cmpStr,
consumeNumber,
Ident,
Function as FunctionToken,
AtKeyword,
Hash,
String as StringToken,
BadString,
Url,
BadUrl,
Delim,
Number as NumberToken,
Percentage,
Dimension,
WhiteSpace,
CDO,
CDC,
Colon,
Semicolon,
Comma,
LeftSquareBracket,
RightSquareBracket,
LeftParenthesis,
RightParenthesis,
LeftCurlyBracket,
RightCurlyBracket
} from '../tokenizer/index.js';
const calcFunctionNames = ['calc(', '-moz-calc(', '-webkit-calc('];
const balancePair = new Map([
[FunctionToken, RightParenthesis],
[LeftParenthesis, RightParenthesis],
[LeftSquareBracket, RightSquareBracket],
[LeftCurlyBracket, RightCurlyBracket]
]);
// units
const LENGTH = [
// absolute length units https://www.w3.org/TR/css-values-3/#lengths
'cm', 'mm', 'q', 'in', 'pt', 'pc', 'px',
// font-relative length units https://drafts.csswg.org/css-values-4/#font-relative-lengths
'em', 'rem',
'ex', 'rex',
'cap', 'rcap',
'ch', 'rch',
'ic', 'ric',
'lh', 'rlh',
// viewport-percentage lengths https://drafts.csswg.org/css-values-4/#viewport-relative-lengths
'vw', 'svw', 'lvw', 'dvw',
'vh', 'svh', 'lvh', 'dvh',
'vi', 'svi', 'lvi', 'dvi',
'vb', 'svb', 'lvb', 'dvb',
'vmin', 'svmin', 'lvmin', 'dvmin',
'vmax', 'svmax', 'lvmax', 'dvmax',
// container relative lengths https://drafts.csswg.org/css-contain-3/#container-lengths
'cqw', 'cqh', 'cqi', 'cqb', 'cqmin', 'cqmax'
];
const ANGLE = ['deg', 'grad', 'rad', 'turn']; // https://www.w3.org/TR/css-values-3/#angles
const TIME = ['s', 'ms']; // https://www.w3.org/TR/css-values-3/#time
const FREQUENCY = ['hz', 'khz']; // https://www.w3.org/TR/css-values-3/#frequency
const RESOLUTION = ['dpi', 'dpcm', 'dppx', 'x']; // https://www.w3.org/TR/css-values-3/#resolution
const FLEX = ['fr']; // https://drafts.csswg.org/css-grid/#fr-unit
const DECIBEL = ['db']; // https://www.w3.org/TR/css3-speech/#mixing-props-voice-volume
const SEMITONES = ['st']; // https://www.w3.org/TR/css3-speech/#voice-props-voice-pitch
// safe char code getter
function charCodeAt(str, index) {
return index < str.length ? str.charCodeAt(index) : 0;
}
function eqStr(actual, expected) {
return cmpStr(actual, 0, actual.length, expected);
}
function eqStrAny(actual, expected) {
for (let i = 0; i < expected.length; i++) {
if (eqStr(actual, expected[i])) {
return true;
}
}
return false;
}
// IE postfix hack, i.e. 123\0 or 123px\9
function isPostfixIeHack(str, offset) {
if (offset !== str.length - 2) {
return false;
}
return (
charCodeAt(str, offset) === 0x005C && // U+005C REVERSE SOLIDUS (\)
isDigit(charCodeAt(str, offset + 1))
);
}
function outOfRange(opts, value, numEnd) {
if (opts && opts.type === 'Range') {
const num = Number(
numEnd !== undefined && numEnd !== value.length
? value.substr(0, numEnd)
: value
);
if (isNaN(num)) {
return true;
}
// FIXME: when opts.min is a string it's a dimension, skip a range validation
// for now since it requires a type covertation which is not implmented yet
if (opts.min !== null && num < opts.min && typeof opts.min !== 'string') {
return true;
}
// FIXME: when opts.max is a string it's a dimension, skip a range validation
// for now since it requires a type covertation which is not implmented yet
if (opts.max !== null && num > opts.max && typeof opts.max !== 'string') {
return true;
}
}
return false;
}
function consumeFunction(token, getNextToken) {
let balanceCloseType = 0;
let balanceStash = [];
let length = 0;
// balanced token consuming
scan:
do {
switch (token.type) {
case RightCurlyBracket:
case RightParenthesis:
case RightSquareBracket:
if (token.type !== balanceCloseType) {
break scan;
}
balanceCloseType = balanceStash.pop();
if (balanceStash.length === 0) {
length++;
break scan;
}
break;
case FunctionToken:
case LeftParenthesis:
case LeftSquareBracket:
case LeftCurlyBracket:
balanceStash.push(balanceCloseType);
balanceCloseType = balancePair.get(token.type);
break;
}
length++;
} while (token = getNextToken(length));
return length;
}
// TODO: implement
// can be used wherever <length>, <frequency>, <angle>, <time>, <percentage>, <number>, or <integer> values are allowed
// https://drafts.csswg.org/css-values/#calc-notation
function calc(next) {
return function(token, getNextToken, opts) {
if (token === null) {
return 0;
}
if (token.type === FunctionToken && eqStrAny(token.value, calcFunctionNames)) {
return consumeFunction(token, getNextToken);
}
return next(token, getNextToken, opts);
};
}
function tokenType(expectedTokenType) {
return function(token) {
if (token === null || token.type !== expectedTokenType) {
return 0;
}
return 1;
};
}
// =========================
// Complex types
//
// https://drafts.csswg.org/css-values-4/#custom-idents
// 4.2. Author-defined Identifiers: the <custom-ident> type
// Some properties accept arbitrary author-defined identifiers as a component value.
// This generic data type is denoted by <custom-ident>, and represents any valid CSS identifier
// that would not be misinterpreted as a pre-defined keyword in that propertys value definition.
//
// See also: https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident
function customIdent(token) {
if (token === null || token.type !== Ident) {
return 0;
}
const name = token.value.toLowerCase();
// The CSS-wide keywords are not valid <custom-ident>s
if (eqStrAny(name, cssWideKeywords)) {
return 0;
}
// The default keyword is reserved and is also not a valid <custom-ident>
if (eqStr(name, 'default')) {
return 0;
}
// TODO: ignore property specific keywords (as described https://developer.mozilla.org/en-US/docs/Web/CSS/custom-ident)
// Specifications using <custom-ident> must specify clearly what other keywords
// are excluded from <custom-ident>, if any—for example by saying that any pre-defined keywords
// in that propertys value definition are excluded. Excluded keywords are excluded
// in all ASCII case permutations.
return 1;
}
// https://drafts.csswg.org/css-variables/#typedef-custom-property-name
// A custom property is any property whose name starts with two dashes (U+002D HYPHEN-MINUS), like --foo.
// The <custom-property-name> production corresponds to this: its defined as any valid identifier
// that starts with two dashes, except -- itself, which is reserved for future use by CSS.
// NOTE: Current implementation treat `--` as a valid name since most (all?) major browsers treat it as valid.
function customPropertyName(token) {
// ... defined as any valid identifier
if (token === null || token.type !== Ident) {
return 0;
}
// ... that starts with two dashes (U+002D HYPHEN-MINUS)
if (charCodeAt(token.value, 0) !== 0x002D || charCodeAt(token.value, 1) !== 0x002D) {
return 0;
}
return 1;
}
// https://drafts.csswg.org/css-color-4/#hex-notation
// The syntax of a <hex-color> is a <hash-token> token whose value consists of 3, 4, 6, or 8 hexadecimal digits.
// In other words, a hex color is written as a hash character, "#", followed by some number of digits 0-9 or
// letters a-f (the case of the letters doesnt matter - #00ff00 is identical to #00FF00).
function hexColor(token) {
if (token === null || token.type !== Hash) {
return 0;
}
const length = token.value.length;
// valid values (length): #rgb (4), #rgba (5), #rrggbb (7), #rrggbbaa (9)
if (length !== 4 && length !== 5 && length !== 7 && length !== 9) {
return 0;
}
for (let i = 1; i < length; i++) {
if (!isHexDigit(charCodeAt(token.value, i))) {
return 0;
}
}
return 1;
}
function idSelector(token) {
if (token === null || token.type !== Hash) {
return 0;
}
if (!isIdentifierStart(charCodeAt(token.value, 1), charCodeAt(token.value, 2), charCodeAt(token.value, 3))) {
return 0;
}
return 1;
}
// https://drafts.csswg.org/css-syntax/#any-value
// It represents the entirety of what a valid declaration can have as its value.
function declarationValue(token, getNextToken) {
if (!token) {
return 0;
}
let balanceCloseType = 0;
let balanceStash = [];
let length = 0;
// The <declaration-value> production matches any sequence of one or more tokens,
// so long as the sequence does not contain ...
scan:
do {
switch (token.type) {
// ... <bad-string-token>, <bad-url-token>,
case BadString:
case BadUrl:
break scan;
// ... unmatched <)-token>, <]-token>, or <}-token>,
case RightCurlyBracket:
case RightParenthesis:
case RightSquareBracket:
if (token.type !== balanceCloseType) {
break scan;
}
balanceCloseType = balanceStash.pop();
break;
// ... or top-level <semicolon-token> tokens
case Semicolon:
if (balanceCloseType === 0) {
break scan;
}
break;
// ... or <delim-token> tokens with a value of "!"
case Delim:
if (balanceCloseType === 0 && token.value === '!') {
break scan;
}
break;
case FunctionToken:
case LeftParenthesis:
case LeftSquareBracket:
case LeftCurlyBracket:
balanceStash.push(balanceCloseType);
balanceCloseType = balancePair.get(token.type);
break;
}
length++;
} while (token = getNextToken(length));
return length;
}
// https://drafts.csswg.org/css-syntax/#any-value
// The <any-value> production is identical to <declaration-value>, but also
// allows top-level <semicolon-token> tokens and <delim-token> tokens
// with a value of "!". It represents the entirety of what valid CSS can be in any context.
function anyValue(token, getNextToken) {
if (!token) {
return 0;
}
let balanceCloseType = 0;
let balanceStash = [];
let length = 0;
// The <any-value> production matches any sequence of one or more tokens,
// so long as the sequence ...
scan:
do {
switch (token.type) {
// ... does not contain <bad-string-token>, <bad-url-token>,
case BadString:
case BadUrl:
break scan;
// ... unmatched <)-token>, <]-token>, or <}-token>,
case RightCurlyBracket:
case RightParenthesis:
case RightSquareBracket:
if (token.type !== balanceCloseType) {
break scan;
}
balanceCloseType = balanceStash.pop();
break;
case FunctionToken:
case LeftParenthesis:
case LeftSquareBracket:
case LeftCurlyBracket:
balanceStash.push(balanceCloseType);
balanceCloseType = balancePair.get(token.type);
break;
}
length++;
} while (token = getNextToken(length));
return length;
}
// =========================
// Dimensions
//
function dimension(type) {
if (type) {
type = new Set(type);
}
return function(token, getNextToken, opts) {
if (token === null || token.type !== Dimension) {
return 0;
}
const numberEnd = consumeNumber(token.value, 0);
// check unit
if (type !== null) {
// check for IE postfix hack, i.e. 123px\0 or 123px\9
const reverseSolidusOffset = token.value.indexOf('\\', numberEnd);
const unit = reverseSolidusOffset === -1 || !isPostfixIeHack(token.value, reverseSolidusOffset)
? token.value.substr(numberEnd)
: token.value.substring(numberEnd, reverseSolidusOffset);
if (type.has(unit.toLowerCase()) === false) {
return 0;
}
}
// check range if specified
if (outOfRange(opts, token.value, numberEnd)) {
return 0;
}
return 1;
};
}
// =========================
// Percentage
//
// §5.5. Percentages: the <percentage> type
// https://drafts.csswg.org/css-values-4/#percentages
function percentage(token, getNextToken, opts) {
// ... corresponds to the <percentage-token> production
if (token === null || token.type !== Percentage) {
return 0;
}
// check range if specified
if (outOfRange(opts, token.value, token.value.length - 1)) {
return 0;
}
return 1;
}
// =========================
// Numeric
//
// https://drafts.csswg.org/css-values-4/#numbers
// The value <zero> represents a literal number with the value 0. Expressions that merely
// evaluate to a <number> with the value 0 (for example, calc(0)) do not match <zero>;
// only literal <number-token>s do.
function zero(next) {
if (typeof next !== 'function') {
next = function() {
return 0;
};
}
return function(token, getNextToken, opts) {
if (token !== null && token.type === NumberToken) {
if (Number(token.value) === 0) {
return 1;
}
}
return next(token, getNextToken, opts);
};
}
// § 5.3. Real Numbers: the <number> type
// https://drafts.csswg.org/css-values-4/#numbers
// Number values are denoted by <number>, and represent real numbers, possibly with a fractional component.
// ... It corresponds to the <number-token> production
function number(token, getNextToken, opts) {
if (token === null) {
return 0;
}
const numberEnd = consumeNumber(token.value, 0);
const isNumber = numberEnd === token.value.length;
if (!isNumber && !isPostfixIeHack(token.value, numberEnd)) {
return 0;
}
// check range if specified
if (outOfRange(opts, token.value, numberEnd)) {
return 0;
}
return 1;
}
// §5.2. Integers: the <integer> type
// https://drafts.csswg.org/css-values-4/#integers
function integer(token, getNextToken, opts) {
// ... corresponds to a subset of the <number-token> production
if (token === null || token.type !== NumberToken) {
return 0;
}
// The first digit of an integer may be immediately preceded by `-` or `+` to indicate the integers sign.
let i = charCodeAt(token.value, 0) === 0x002B || // U+002B PLUS SIGN (+)
charCodeAt(token.value, 0) === 0x002D ? 1 : 0; // U+002D HYPHEN-MINUS (-)
// When written literally, an integer is one or more decimal digits 0 through 9 ...
for (; i < token.value.length; i++) {
if (!isDigit(charCodeAt(token.value, i))) {
return 0;
}
}
// check range if specified
if (outOfRange(opts, token.value, i)) {
return 0;
}
return 1;
}
export default {
// token types
'ident-token': tokenType(Ident),
'function-token': tokenType(FunctionToken),
'at-keyword-token': tokenType(AtKeyword),
'hash-token': tokenType(Hash),
'string-token': tokenType(StringToken),
'bad-string-token': tokenType(BadString),
'url-token': tokenType(Url),
'bad-url-token': tokenType(BadUrl),
'delim-token': tokenType(Delim),
'number-token': tokenType(NumberToken),
'percentage-token': tokenType(Percentage),
'dimension-token': tokenType(Dimension),
'whitespace-token': tokenType(WhiteSpace),
'CDO-token': tokenType(CDO),
'CDC-token': tokenType(CDC),
'colon-token': tokenType(Colon),
'semicolon-token': tokenType(Semicolon),
'comma-token': tokenType(Comma),
'[-token': tokenType(LeftSquareBracket),
']-token': tokenType(RightSquareBracket),
'(-token': tokenType(LeftParenthesis),
')-token': tokenType(RightParenthesis),
'{-token': tokenType(LeftCurlyBracket),
'}-token': tokenType(RightCurlyBracket),
// token type aliases
'string': tokenType(StringToken),
'ident': tokenType(Ident),
// complex types
'custom-ident': customIdent,
'custom-property-name': customPropertyName,
'hex-color': hexColor,
'id-selector': idSelector, // element( <id-selector> )
'an-plus-b': anPlusB,
'urange': urange,
'declaration-value': declarationValue,
'any-value': anyValue,
// dimensions
'dimension': calc(dimension(null)),
'angle': calc(dimension(ANGLE)),
'decibel': calc(dimension(DECIBEL)),
'frequency': calc(dimension(FREQUENCY)),
'flex': calc(dimension(FLEX)),
'length': calc(zero(dimension(LENGTH))),
'resolution': calc(dimension(RESOLUTION)),
'semitones': calc(dimension(SEMITONES)),
'time': calc(dimension(TIME)),
// percentage
'percentage': calc(percentage),
// numeric
'zero': zero(),
'number': calc(number),
'integer': calc(integer)
};

1
node_modules/css-tree/lib/lexer/index.js generated vendored Normal file
View File

@ -0,0 +1 @@
export { Lexer } from './Lexer.js';

456
node_modules/css-tree/lib/lexer/match-graph.js generated vendored Normal file
View File

@ -0,0 +1,456 @@
import { parse } from '../definition-syntax/parse.js';
export const MATCH = { type: 'Match' };
export const MISMATCH = { type: 'Mismatch' };
export const DISALLOW_EMPTY = { type: 'DisallowEmpty' };
const LEFTPARENTHESIS = 40; // (
const RIGHTPARENTHESIS = 41; // )
function createCondition(match, thenBranch, elseBranch) {
// reduce node count
if (thenBranch === MATCH && elseBranch === MISMATCH) {
return match;
}
if (match === MATCH && thenBranch === MATCH && elseBranch === MATCH) {
return match;
}
if (match.type === 'If' && match.else === MISMATCH && thenBranch === MATCH) {
thenBranch = match.then;
match = match.match;
}
return {
type: 'If',
match,
then: thenBranch,
else: elseBranch
};
}
function isFunctionType(name) {
return (
name.length > 2 &&
name.charCodeAt(name.length - 2) === LEFTPARENTHESIS &&
name.charCodeAt(name.length - 1) === RIGHTPARENTHESIS
);
}
function isEnumCapatible(term) {
return (
term.type === 'Keyword' ||
term.type === 'AtKeyword' ||
term.type === 'Function' ||
term.type === 'Type' && isFunctionType(term.name)
);
}
function buildGroupMatchGraph(combinator, terms, atLeastOneTermMatched) {
switch (combinator) {
case ' ': {
// Juxtaposing components means that all of them must occur, in the given order.
//
// a b c
// =
// match a
// then match b
// then match c
// then MATCH
// else MISMATCH
// else MISMATCH
// else MISMATCH
let result = MATCH;
for (let i = terms.length - 1; i >= 0; i--) {
const term = terms[i];
result = createCondition(
term,
result,
MISMATCH
);
};
return result;
}
case '|': {
// A bar (|) separates two or more alternatives: exactly one of them must occur.
//
// a | b | c
// =
// match a
// then MATCH
// else match b
// then MATCH
// else match c
// then MATCH
// else MISMATCH
let result = MISMATCH;
let map = null;
for (let i = terms.length - 1; i >= 0; i--) {
let term = terms[i];
// reduce sequence of keywords into a Enum
if (isEnumCapatible(term)) {
if (map === null && i > 0 && isEnumCapatible(terms[i - 1])) {
map = Object.create(null);
result = createCondition(
{
type: 'Enum',
map
},
MATCH,
result
);
}
if (map !== null) {
const key = (isFunctionType(term.name) ? term.name.slice(0, -1) : term.name).toLowerCase();
if (key in map === false) {
map[key] = term;
continue;
}
}
}
map = null;
// create a new conditonal node
result = createCondition(
term,
MATCH,
result
);
};
return result;
}
case '&&': {
// A double ampersand (&&) separates two or more components,
// all of which must occur, in any order.
// Use MatchOnce for groups with a large number of terms,
// since &&-groups produces at least N!-node trees
if (terms.length > 5) {
return {
type: 'MatchOnce',
terms,
all: true
};
}
// Use a combination tree for groups with small number of terms
//
// a && b && c
// =
// match a
// then [b && c]
// else match b
// then [a && c]
// else match c
// then [a && b]
// else MISMATCH
//
// a && b
// =
// match a
// then match b
// then MATCH
// else MISMATCH
// else match b
// then match a
// then MATCH
// else MISMATCH
// else MISMATCH
let result = MISMATCH;
for (let i = terms.length - 1; i >= 0; i--) {
const term = terms[i];
let thenClause;
if (terms.length > 1) {
thenClause = buildGroupMatchGraph(
combinator,
terms.filter(function(newGroupTerm) {
return newGroupTerm !== term;
}),
false
);
} else {
thenClause = MATCH;
}
result = createCondition(
term,
thenClause,
result
);
};
return result;
}
case '||': {
// A double bar (||) separates two or more options:
// one or more of them must occur, in any order.
// Use MatchOnce for groups with a large number of terms,
// since ||-groups produces at least N!-node trees
if (terms.length > 5) {
return {
type: 'MatchOnce',
terms,
all: false
};
}
// Use a combination tree for groups with small number of terms
//
// a || b || c
// =
// match a
// then [b || c]
// else match b
// then [a || c]
// else match c
// then [a || b]
// else MISMATCH
//
// a || b
// =
// match a
// then match b
// then MATCH
// else MATCH
// else match b
// then match a
// then MATCH
// else MATCH
// else MISMATCH
let result = atLeastOneTermMatched ? MATCH : MISMATCH;
for (let i = terms.length - 1; i >= 0; i--) {
const term = terms[i];
let thenClause;
if (terms.length > 1) {
thenClause = buildGroupMatchGraph(
combinator,
terms.filter(function(newGroupTerm) {
return newGroupTerm !== term;
}),
true
);
} else {
thenClause = MATCH;
}
result = createCondition(
term,
thenClause,
result
);
};
return result;
}
}
}
function buildMultiplierMatchGraph(node) {
let result = MATCH;
let matchTerm = buildMatchGraphInternal(node.term);
if (node.max === 0) {
// disable repeating of empty match to prevent infinite loop
matchTerm = createCondition(
matchTerm,
DISALLOW_EMPTY,
MISMATCH
);
// an occurrence count is not limited, make a cycle;
// to collect more terms on each following matching mismatch
result = createCondition(
matchTerm,
null, // will be a loop
MISMATCH
);
result.then = createCondition(
MATCH,
MATCH,
result // make a loop
);
if (node.comma) {
result.then.else = createCondition(
{ type: 'Comma', syntax: node },
result,
MISMATCH
);
}
} else {
// create a match node chain for [min .. max] interval with optional matches
for (let i = node.min || 1; i <= node.max; i++) {
if (node.comma && result !== MATCH) {
result = createCondition(
{ type: 'Comma', syntax: node },
result,
MISMATCH
);
}
result = createCondition(
matchTerm,
createCondition(
MATCH,
MATCH,
result
),
MISMATCH
);
}
}
if (node.min === 0) {
// allow zero match
result = createCondition(
MATCH,
MATCH,
result
);
} else {
// create a match node chain to collect [0 ... min - 1] required matches
for (let i = 0; i < node.min - 1; i++) {
if (node.comma && result !== MATCH) {
result = createCondition(
{ type: 'Comma', syntax: node },
result,
MISMATCH
);
}
result = createCondition(
matchTerm,
result,
MISMATCH
);
}
}
return result;
}
function buildMatchGraphInternal(node) {
if (typeof node === 'function') {
return {
type: 'Generic',
fn: node
};
}
switch (node.type) {
case 'Group': {
let result = buildGroupMatchGraph(
node.combinator,
node.terms.map(buildMatchGraphInternal),
false
);
if (node.disallowEmpty) {
result = createCondition(
result,
DISALLOW_EMPTY,
MISMATCH
);
}
return result;
}
case 'Multiplier':
return buildMultiplierMatchGraph(node);
case 'Type':
case 'Property':
return {
type: node.type,
name: node.name,
syntax: node
};
case 'Keyword':
return {
type: node.type,
name: node.name.toLowerCase(),
syntax: node
};
case 'AtKeyword':
return {
type: node.type,
name: '@' + node.name.toLowerCase(),
syntax: node
};
case 'Function':
return {
type: node.type,
name: node.name.toLowerCase() + '(',
syntax: node
};
case 'String':
// convert a one char length String to a Token
if (node.value.length === 3) {
return {
type: 'Token',
value: node.value.charAt(1),
syntax: node
};
}
// otherwise use it as is
return {
type: node.type,
value: node.value.substr(1, node.value.length - 2).replace(/\\'/g, '\''),
syntax: node
};
case 'Token':
return {
type: node.type,
value: node.value,
syntax: node
};
case 'Comma':
return {
type: node.type,
syntax: node
};
default:
throw new Error('Unknown node type:', node.type);
}
}
export function buildMatchGraph(syntaxTree, ref) {
if (typeof syntaxTree === 'string') {
syntaxTree = parse(syntaxTree);
}
return {
type: 'MatchGraph',
match: buildMatchGraphInternal(syntaxTree),
syntax: ref || null,
source: syntaxTree
};
}

630
node_modules/css-tree/lib/lexer/match.js generated vendored Normal file
View File

@ -0,0 +1,630 @@
import { MATCH, MISMATCH, DISALLOW_EMPTY } from './match-graph.js';
import * as TYPE from '../tokenizer/types.js';
const { hasOwnProperty } = Object.prototype;
const STUB = 0;
const TOKEN = 1;
const OPEN_SYNTAX = 2;
const CLOSE_SYNTAX = 3;
const EXIT_REASON_MATCH = 'Match';
const EXIT_REASON_MISMATCH = 'Mismatch';
const EXIT_REASON_ITERATION_LIMIT = 'Maximum iteration number exceeded (please fill an issue on https://github.com/csstree/csstree/issues)';
const ITERATION_LIMIT = 15000;
export let totalIterationCount = 0;
function reverseList(list) {
let prev = null;
let next = null;
let item = list;
while (item !== null) {
next = item.prev;
item.prev = prev;
prev = item;
item = next;
}
return prev;
}
function areStringsEqualCaseInsensitive(testStr, referenceStr) {
if (testStr.length !== referenceStr.length) {
return false;
}
for (let i = 0; i < testStr.length; i++) {
const referenceCode = referenceStr.charCodeAt(i);
let testCode = testStr.charCodeAt(i);
// testCode.toLowerCase() for U+0041 LATIN CAPITAL LETTER A (A) .. U+005A LATIN CAPITAL LETTER Z (Z).
if (testCode >= 0x0041 && testCode <= 0x005A) {
testCode = testCode | 32;
}
if (testCode !== referenceCode) {
return false;
}
}
return true;
}
function isContextEdgeDelim(token) {
if (token.type !== TYPE.Delim) {
return false;
}
// Fix matching for unicode-range: U+30??, U+FF00-FF9F
// Probably we need to check out previous match instead
return token.value !== '?';
}
function isCommaContextStart(token) {
if (token === null) {
return true;
}
return (
token.type === TYPE.Comma ||
token.type === TYPE.Function ||
token.type === TYPE.LeftParenthesis ||
token.type === TYPE.LeftSquareBracket ||
token.type === TYPE.LeftCurlyBracket ||
isContextEdgeDelim(token)
);
}
function isCommaContextEnd(token) {
if (token === null) {
return true;
}
return (
token.type === TYPE.RightParenthesis ||
token.type === TYPE.RightSquareBracket ||
token.type === TYPE.RightCurlyBracket ||
(token.type === TYPE.Delim && token.value === '/')
);
}
function internalMatch(tokens, state, syntaxes) {
function moveToNextToken() {
do {
tokenIndex++;
token = tokenIndex < tokens.length ? tokens[tokenIndex] : null;
} while (token !== null && (token.type === TYPE.WhiteSpace || token.type === TYPE.Comment));
}
function getNextToken(offset) {
const nextIndex = tokenIndex + offset;
return nextIndex < tokens.length ? tokens[nextIndex] : null;
}
function stateSnapshotFromSyntax(nextState, prev) {
return {
nextState,
matchStack,
syntaxStack,
thenStack,
tokenIndex,
prev
};
}
function pushThenStack(nextState) {
thenStack = {
nextState,
matchStack,
syntaxStack,
prev: thenStack
};
}
function pushElseStack(nextState) {
elseStack = stateSnapshotFromSyntax(nextState, elseStack);
}
function addTokenToMatch() {
matchStack = {
type: TOKEN,
syntax: state.syntax,
token,
prev: matchStack
};
moveToNextToken();
syntaxStash = null;
if (tokenIndex > longestMatch) {
longestMatch = tokenIndex;
}
}
function openSyntax() {
syntaxStack = {
syntax: state.syntax,
opts: state.syntax.opts || (syntaxStack !== null && syntaxStack.opts) || null,
prev: syntaxStack
};
matchStack = {
type: OPEN_SYNTAX,
syntax: state.syntax,
token: matchStack.token,
prev: matchStack
};
}
function closeSyntax() {
if (matchStack.type === OPEN_SYNTAX) {
matchStack = matchStack.prev;
} else {
matchStack = {
type: CLOSE_SYNTAX,
syntax: syntaxStack.syntax,
token: matchStack.token,
prev: matchStack
};
}
syntaxStack = syntaxStack.prev;
}
let syntaxStack = null;
let thenStack = null;
let elseStack = null;
// null stashing allowed, nothing stashed
// false stashing disabled, nothing stashed
// anithing else fail stashable syntaxes, some syntax stashed
let syntaxStash = null;
let iterationCount = 0; // count iterations and prevent infinite loop
let exitReason = null;
let token = null;
let tokenIndex = -1;
let longestMatch = 0;
let matchStack = {
type: STUB,
syntax: null,
token: null,
prev: null
};
moveToNextToken();
while (exitReason === null && ++iterationCount < ITERATION_LIMIT) {
// function mapList(list, fn) {
// const result = [];
// while (list) {
// result.unshift(fn(list));
// list = list.prev;
// }
// return result;
// }
// console.log('--\n',
// '#' + iterationCount,
// require('util').inspect({
// match: mapList(matchStack, x => x.type === TOKEN ? x.token && x.token.value : x.syntax ? ({ [OPEN_SYNTAX]: '<', [CLOSE_SYNTAX]: '</' }[x.type] || x.type) + '!' + x.syntax.name : null),
// token: token && token.value,
// tokenIndex,
// syntax: syntax.type + (syntax.id ? ' #' + syntax.id : '')
// }, { depth: null })
// );
switch (state.type) {
case 'Match':
if (thenStack === null) {
// turn to MISMATCH when some tokens left unmatched
if (token !== null) {
// doesn't mismatch if just one token left and it's an IE hack
if (tokenIndex !== tokens.length - 1 || (token.value !== '\\0' && token.value !== '\\9')) {
state = MISMATCH;
break;
}
}
// break the main loop, return a result - MATCH
exitReason = EXIT_REASON_MATCH;
break;
}
// go to next syntax (`then` branch)
state = thenStack.nextState;
// check match is not empty
if (state === DISALLOW_EMPTY) {
if (thenStack.matchStack === matchStack) {
state = MISMATCH;
break;
} else {
state = MATCH;
}
}
// close syntax if needed
while (thenStack.syntaxStack !== syntaxStack) {
closeSyntax();
}
// pop stack
thenStack = thenStack.prev;
break;
case 'Mismatch':
// when some syntax is stashed
if (syntaxStash !== null && syntaxStash !== false) {
// there is no else branches or a branch reduce match stack
if (elseStack === null || tokenIndex > elseStack.tokenIndex) {
// restore state from the stash
elseStack = syntaxStash;
syntaxStash = false; // disable stashing
}
} else if (elseStack === null) {
// no else branches -> break the main loop
// return a result - MISMATCH
exitReason = EXIT_REASON_MISMATCH;
break;
}
// go to next syntax (`else` branch)
state = elseStack.nextState;
// restore all the rest stack states
thenStack = elseStack.thenStack;
syntaxStack = elseStack.syntaxStack;
matchStack = elseStack.matchStack;
tokenIndex = elseStack.tokenIndex;
token = tokenIndex < tokens.length ? tokens[tokenIndex] : null;
// pop stack
elseStack = elseStack.prev;
break;
case 'MatchGraph':
state = state.match;
break;
case 'If':
// IMPORTANT: else stack push must go first,
// since it stores the state of thenStack before changes
if (state.else !== MISMATCH) {
pushElseStack(state.else);
}
if (state.then !== MATCH) {
pushThenStack(state.then);
}
state = state.match;
break;
case 'MatchOnce':
state = {
type: 'MatchOnceBuffer',
syntax: state,
index: 0,
mask: 0
};
break;
case 'MatchOnceBuffer': {
const terms = state.syntax.terms;
if (state.index === terms.length) {
// no matches at all or it's required all terms to be matched
if (state.mask === 0 || state.syntax.all) {
state = MISMATCH;
break;
}
// a partial match is ok
state = MATCH;
break;
}
// all terms are matched
if (state.mask === (1 << terms.length) - 1) {
state = MATCH;
break;
}
for (; state.index < terms.length; state.index++) {
const matchFlag = 1 << state.index;
if ((state.mask & matchFlag) === 0) {
// IMPORTANT: else stack push must go first,
// since it stores the state of thenStack before changes
pushElseStack(state);
pushThenStack({
type: 'AddMatchOnce',
syntax: state.syntax,
mask: state.mask | matchFlag
});
// match
state = terms[state.index++];
break;
}
}
break;
}
case 'AddMatchOnce':
state = {
type: 'MatchOnceBuffer',
syntax: state.syntax,
index: 0,
mask: state.mask
};
break;
case 'Enum':
if (token !== null) {
let name = token.value.toLowerCase();
// drop \0 and \9 hack from keyword name
if (name.indexOf('\\') !== -1) {
name = name.replace(/\\[09].*$/, '');
}
if (hasOwnProperty.call(state.map, name)) {
state = state.map[name];
break;
}
}
state = MISMATCH;
break;
case 'Generic': {
const opts = syntaxStack !== null ? syntaxStack.opts : null;
const lastTokenIndex = tokenIndex + Math.floor(state.fn(token, getNextToken, opts));
if (!isNaN(lastTokenIndex) && lastTokenIndex > tokenIndex) {
while (tokenIndex < lastTokenIndex) {
addTokenToMatch();
}
state = MATCH;
} else {
state = MISMATCH;
}
break;
}
case 'Type':
case 'Property': {
const syntaxDict = state.type === 'Type' ? 'types' : 'properties';
const dictSyntax = hasOwnProperty.call(syntaxes, syntaxDict) ? syntaxes[syntaxDict][state.name] : null;
if (!dictSyntax || !dictSyntax.match) {
throw new Error(
'Bad syntax reference: ' +
(state.type === 'Type'
? '<' + state.name + '>'
: '<\'' + state.name + '\'>')
);
}
// stash a syntax for types with low priority
if (syntaxStash !== false && token !== null && state.type === 'Type') {
const lowPriorityMatching =
// https://drafts.csswg.org/css-values-4/#custom-idents
// When parsing positionally-ambiguous keywords in a property value, a <custom-ident> production
// can only claim the keyword if no other unfulfilled production can claim it.
(state.name === 'custom-ident' && token.type === TYPE.Ident) ||
// https://drafts.csswg.org/css-values-4/#lengths
// ... if a `0` could be parsed as either a <number> or a <length> in a property (such as line-height),
// it must parse as a <number>
(state.name === 'length' && token.value === '0');
if (lowPriorityMatching) {
if (syntaxStash === null) {
syntaxStash = stateSnapshotFromSyntax(state, elseStack);
}
state = MISMATCH;
break;
}
}
openSyntax();
state = dictSyntax.match;
break;
}
case 'Keyword': {
const name = state.name;
if (token !== null) {
let keywordName = token.value;
// drop \0 and \9 hack from keyword name
if (keywordName.indexOf('\\') !== -1) {
keywordName = keywordName.replace(/\\[09].*$/, '');
}
if (areStringsEqualCaseInsensitive(keywordName, name)) {
addTokenToMatch();
state = MATCH;
break;
}
}
state = MISMATCH;
break;
}
case 'AtKeyword':
case 'Function':
if (token !== null && areStringsEqualCaseInsensitive(token.value, state.name)) {
addTokenToMatch();
state = MATCH;
break;
}
state = MISMATCH;
break;
case 'Token':
if (token !== null && token.value === state.value) {
addTokenToMatch();
state = MATCH;
break;
}
state = MISMATCH;
break;
case 'Comma':
if (token !== null && token.type === TYPE.Comma) {
if (isCommaContextStart(matchStack.token)) {
state = MISMATCH;
} else {
addTokenToMatch();
state = isCommaContextEnd(token) ? MISMATCH : MATCH;
}
} else {
state = isCommaContextStart(matchStack.token) || isCommaContextEnd(token) ? MATCH : MISMATCH;
}
break;
case 'String':
let string = '';
let lastTokenIndex = tokenIndex;
for (; lastTokenIndex < tokens.length && string.length < state.value.length; lastTokenIndex++) {
string += tokens[lastTokenIndex].value;
}
if (areStringsEqualCaseInsensitive(string, state.value)) {
while (tokenIndex < lastTokenIndex) {
addTokenToMatch();
}
state = MATCH;
} else {
state = MISMATCH;
}
break;
default:
throw new Error('Unknown node type: ' + state.type);
}
}
totalIterationCount += iterationCount;
switch (exitReason) {
case null:
console.warn('[csstree-match] BREAK after ' + ITERATION_LIMIT + ' iterations');
exitReason = EXIT_REASON_ITERATION_LIMIT;
matchStack = null;
break;
case EXIT_REASON_MATCH:
while (syntaxStack !== null) {
closeSyntax();
}
break;
default:
matchStack = null;
}
return {
tokens,
reason: exitReason,
iterations: iterationCount,
match: matchStack,
longestMatch
};
}
export function matchAsList(tokens, matchGraph, syntaxes) {
const matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
if (matchResult.match !== null) {
let item = reverseList(matchResult.match).prev;
matchResult.match = [];
while (item !== null) {
switch (item.type) {
case OPEN_SYNTAX:
case CLOSE_SYNTAX:
matchResult.match.push({
type: item.type,
syntax: item.syntax
});
break;
default:
matchResult.match.push({
token: item.token.value,
node: item.token.node
});
break;
}
item = item.prev;
}
}
return matchResult;
}
export function matchAsTree(tokens, matchGraph, syntaxes) {
const matchResult = internalMatch(tokens, matchGraph, syntaxes || {});
if (matchResult.match === null) {
return matchResult;
}
let item = matchResult.match;
let host = matchResult.match = {
syntax: matchGraph.syntax || null,
match: []
};
const hostStack = [host];
// revert a list and start with 2nd item since 1st is a stub item
item = reverseList(item).prev;
// build a tree
while (item !== null) {
switch (item.type) {
case OPEN_SYNTAX:
host.match.push(host = {
syntax: item.syntax,
match: []
});
hostStack.push(host);
break;
case CLOSE_SYNTAX:
hostStack.pop();
host = hostStack[hostStack.length - 1];
break;
default:
host.match.push({
syntax: item.syntax || null,
token: item.token.value,
node: item.token.node
});
}
item = item.prev;
}
return matchResult;
}

50
node_modules/css-tree/lib/lexer/prepare-tokens.js generated vendored Normal file
View File

@ -0,0 +1,50 @@
import { tokenize } from '../tokenizer/index.js';
const astToTokens = {
decorator(handlers) {
const tokens = [];
let curNode = null;
return {
...handlers,
node(node) {
const tmp = curNode;
curNode = node;
handlers.node.call(this, node);
curNode = tmp;
},
emit(value, type, auto) {
tokens.push({
type,
value,
node: auto ? null : curNode
});
},
result() {
return tokens;
}
};
}
};
function stringToTokens(str) {
const tokens = [];
tokenize(str, (type, start, end) =>
tokens.push({
type,
value: str.slice(start, end),
node: null
})
);
return tokens;
}
export default function(value, syntax) {
if (typeof value === 'string') {
return stringToTokens(value);
}
return syntax.generate(value, astToTokens);
};

61
node_modules/css-tree/lib/lexer/search.js generated vendored Normal file
View File

@ -0,0 +1,61 @@
import { List } from '../utils/List.js';
function getFirstMatchNode(matchNode) {
if ('node' in matchNode) {
return matchNode.node;
}
return getFirstMatchNode(matchNode.match[0]);
}
function getLastMatchNode(matchNode) {
if ('node' in matchNode) {
return matchNode.node;
}
return getLastMatchNode(matchNode.match[matchNode.match.length - 1]);
}
export function matchFragments(lexer, ast, match, type, name) {
function findFragments(matchNode) {
if (matchNode.syntax !== null &&
matchNode.syntax.type === type &&
matchNode.syntax.name === name) {
const start = getFirstMatchNode(matchNode);
const end = getLastMatchNode(matchNode);
lexer.syntax.walk(ast, function(node, item, list) {
if (node === start) {
const nodes = new List();
do {
nodes.appendData(item.data);
if (item.data === end) {
break;
}
item = item.next;
} while (item !== null);
fragments.push({
parent: list,
nodes
});
}
});
}
if (Array.isArray(matchNode.match)) {
matchNode.match.forEach(findFragments);
}
}
const fragments = [];
if (match.matched !== null) {
findFragments(match.matched);
}
return fragments;
}

164
node_modules/css-tree/lib/lexer/structure.js generated vendored Normal file
View File

@ -0,0 +1,164 @@
import { List } from '../utils/List.js';
const { hasOwnProperty } = Object.prototype;
function isValidNumber(value) {
// Number.isInteger(value) && value >= 0
return (
typeof value === 'number' &&
isFinite(value) &&
Math.floor(value) === value &&
value >= 0
);
}
function isValidLocation(loc) {
return (
Boolean(loc) &&
isValidNumber(loc.offset) &&
isValidNumber(loc.line) &&
isValidNumber(loc.column)
);
}
function createNodeStructureChecker(type, fields) {
return function checkNode(node, warn) {
if (!node || node.constructor !== Object) {
return warn(node, 'Type of node should be an Object');
}
for (let key in node) {
let valid = true;
if (hasOwnProperty.call(node, key) === false) {
continue;
}
if (key === 'type') {
if (node.type !== type) {
warn(node, 'Wrong node type `' + node.type + '`, expected `' + type + '`');
}
} else if (key === 'loc') {
if (node.loc === null) {
continue;
} else if (node.loc && node.loc.constructor === Object) {
if (typeof node.loc.source !== 'string') {
key += '.source';
} else if (!isValidLocation(node.loc.start)) {
key += '.start';
} else if (!isValidLocation(node.loc.end)) {
key += '.end';
} else {
continue;
}
}
valid = false;
} else if (fields.hasOwnProperty(key)) {
valid = false;
for (let i = 0; !valid && i < fields[key].length; i++) {
const fieldType = fields[key][i];
switch (fieldType) {
case String:
valid = typeof node[key] === 'string';
break;
case Boolean:
valid = typeof node[key] === 'boolean';
break;
case null:
valid = node[key] === null;
break;
default:
if (typeof fieldType === 'string') {
valid = node[key] && node[key].type === fieldType;
} else if (Array.isArray(fieldType)) {
valid = node[key] instanceof List;
}
}
}
} else {
warn(node, 'Unknown field `' + key + '` for ' + type + ' node type');
}
if (!valid) {
warn(node, 'Bad value for `' + type + '.' + key + '`');
}
}
for (const key in fields) {
if (hasOwnProperty.call(fields, key) &&
hasOwnProperty.call(node, key) === false) {
warn(node, 'Field `' + type + '.' + key + '` is missed');
}
}
};
}
function processStructure(name, nodeType) {
const structure = nodeType.structure;
const fields = {
type: String,
loc: true
};
const docs = {
type: '"' + name + '"'
};
for (const key in structure) {
if (hasOwnProperty.call(structure, key) === false) {
continue;
}
const docsTypes = [];
const fieldTypes = fields[key] = Array.isArray(structure[key])
? structure[key].slice()
: [structure[key]];
for (let i = 0; i < fieldTypes.length; i++) {
const fieldType = fieldTypes[i];
if (fieldType === String || fieldType === Boolean) {
docsTypes.push(fieldType.name);
} else if (fieldType === null) {
docsTypes.push('null');
} else if (typeof fieldType === 'string') {
docsTypes.push('<' + fieldType + '>');
} else if (Array.isArray(fieldType)) {
docsTypes.push('List'); // TODO: use type enum
} else {
throw new Error('Wrong value `' + fieldType + '` in `' + name + '.' + key + '` structure definition');
}
}
docs[key] = docsTypes.join(' | ');
}
return {
docs,
check: createNodeStructureChecker(name, fields)
};
}
export function getStructureFromConfig(config) {
const structure = {};
if (config.node) {
for (const name in config.node) {
if (hasOwnProperty.call(config.node, name)) {
const nodeType = config.node[name];
if (nodeType.structure) {
structure[name] = processStructure(name, nodeType);
} else {
throw new Error('Missed `structure` field in `' + name + '` node type definition');
}
}
}
}
return structure;
};

66
node_modules/css-tree/lib/lexer/trace.js generated vendored Normal file
View File

@ -0,0 +1,66 @@
export function getTrace(node) {
function shouldPutToTrace(syntax) {
if (syntax === null) {
return false;
}
return (
syntax.type === 'Type' ||
syntax.type === 'Property' ||
syntax.type === 'Keyword'
);
}
function hasMatch(matchNode) {
if (Array.isArray(matchNode.match)) {
// use for-loop for better perfomance
for (let i = 0; i < matchNode.match.length; i++) {
if (hasMatch(matchNode.match[i])) {
if (shouldPutToTrace(matchNode.syntax)) {
result.unshift(matchNode.syntax);
}
return true;
}
}
} else if (matchNode.node === node) {
result = shouldPutToTrace(matchNode.syntax)
? [matchNode.syntax]
: [];
return true;
}
return false;
}
let result = null;
if (this.matched !== null) {
hasMatch(this.matched);
}
return result;
}
export function isType(node, type) {
return testNode(this, node, match => match.type === 'Type' && match.name === type);
}
export function isProperty(node, property) {
return testNode(this, node, match => match.type === 'Property' && match.name === property);
}
export function isKeyword(node) {
return testNode(this, node, match => match.type === 'Keyword');
}
function testNode(match, node, fn) {
const trace = getTrace.call(match, node);
if (trace === null) {
return false;
}
return trace.some(fn);
}

65
node_modules/css-tree/lib/parser/SyntaxError.js generated vendored Normal file
View File

@ -0,0 +1,65 @@
import { createCustomError } from '../utils/create-custom-error.js';
const MAX_LINE_LENGTH = 100;
const OFFSET_CORRECTION = 60;
const TAB_REPLACEMENT = ' ';
function sourceFragment({ source, line, column }, extraLines) {
function processLines(start, end) {
return lines
.slice(start, end)
.map((line, idx) =>
String(start + idx + 1).padStart(maxNumLength) + ' |' + line
).join('\n');
}
const lines = source.split(/\r\n?|\n|\f/);
const startLine = Math.max(1, line - extraLines) - 1;
const endLine = Math.min(line + extraLines, lines.length + 1);
const maxNumLength = Math.max(4, String(endLine).length) + 1;
let cutLeft = 0;
// column correction according to replaced tab before column
column += (TAB_REPLACEMENT.length - 1) * (lines[line - 1].substr(0, column - 1).match(/\t/g) || []).length;
if (column > MAX_LINE_LENGTH) {
cutLeft = column - OFFSET_CORRECTION + 3;
column = OFFSET_CORRECTION - 2;
}
for (let i = startLine; i <= endLine; i++) {
if (i >= 0 && i < lines.length) {
lines[i] = lines[i].replace(/\t/g, TAB_REPLACEMENT);
lines[i] =
(cutLeft > 0 && lines[i].length > cutLeft ? '\u2026' : '') +
lines[i].substr(cutLeft, MAX_LINE_LENGTH - 2) +
(lines[i].length > cutLeft + MAX_LINE_LENGTH - 1 ? '\u2026' : '');
}
}
return [
processLines(startLine, line),
new Array(column + maxNumLength + 2).join('-') + '^',
processLines(line, endLine)
].filter(Boolean).join('\n');
}
export function SyntaxError(message, source, offset, line, column) {
const error = Object.assign(createCustomError('SyntaxError', message), {
source,
offset,
line,
column,
sourceFragment(extraLines) {
return sourceFragment({ source, line, column }, isNaN(extraLines) ? 0 : extraLines);
},
get formattedMessage() {
return (
`Parse error: ${message}\n` +
sourceFragment({ source, line, column }, 2)
);
}
});
return error;
}

346
node_modules/css-tree/lib/parser/create.js generated vendored Normal file
View File

@ -0,0 +1,346 @@
import { List } from '../utils/List.js';
import { SyntaxError } from './SyntaxError.js';
import {
tokenize,
OffsetToLocation,
TokenStream,
tokenNames,
consumeNumber,
findWhiteSpaceStart,
cmpChar,
cmpStr,
WhiteSpace,
Comment,
Ident,
Function as FunctionToken,
Url,
Hash,
Percentage,
Number as NumberToken
} from '../tokenizer/index.js';
import { readSequence } from './sequence.js';
const NOOP = () => {};
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
const SEMICOLON = 0x003B; // U+003B SEMICOLON (;)
const LEFTCURLYBRACKET = 0x007B; // U+007B LEFT CURLY BRACKET ({)
const NULL = 0;
function createParseContext(name) {
return function() {
return this[name]();
};
}
function fetchParseValues(dict) {
const result = Object.create(null);
for (const name in dict) {
const item = dict[name];
const fn = item.parse || item;
if (fn) {
result[name] = fn;
}
}
return result;
}
function processConfig(config) {
const parseConfig = {
context: Object.create(null),
scope: Object.assign(Object.create(null), config.scope),
atrule: fetchParseValues(config.atrule),
pseudo: fetchParseValues(config.pseudo),
node: fetchParseValues(config.node)
};
for (const name in config.parseContext) {
switch (typeof config.parseContext[name]) {
case 'function':
parseConfig.context[name] = config.parseContext[name];
break;
case 'string':
parseConfig.context[name] = createParseContext(config.parseContext[name]);
break;
}
}
return {
config: parseConfig,
...parseConfig,
...parseConfig.node
};
}
export function createParser(config) {
let source = '';
let filename = '<unknown>';
let needPositions = false;
let onParseError = NOOP;
let onParseErrorThrow = false;
const locationMap = new OffsetToLocation();
const parser = Object.assign(new TokenStream(), processConfig(config || {}), {
parseAtrulePrelude: true,
parseRulePrelude: true,
parseValue: true,
parseCustomProperty: false,
readSequence,
consumeUntilBalanceEnd: () => 0,
consumeUntilLeftCurlyBracket(code) {
return code === LEFTCURLYBRACKET ? 1 : 0;
},
consumeUntilLeftCurlyBracketOrSemicolon(code) {
return code === LEFTCURLYBRACKET || code === SEMICOLON ? 1 : 0;
},
consumeUntilExclamationMarkOrSemicolon(code) {
return code === EXCLAMATIONMARK || code === SEMICOLON ? 1 : 0;
},
consumeUntilSemicolonIncluded(code) {
return code === SEMICOLON ? 2 : 0;
},
createList() {
return new List();
},
createSingleNodeList(node) {
return new List().appendData(node);
},
getFirstListNode(list) {
return list && list.first;
},
getLastListNode(list) {
return list && list.last;
},
parseWithFallback(consumer, fallback) {
const startToken = this.tokenIndex;
try {
return consumer.call(this);
} catch (e) {
if (onParseErrorThrow) {
throw e;
}
const fallbackNode = fallback.call(this, startToken);
onParseErrorThrow = true;
onParseError(e, fallbackNode);
onParseErrorThrow = false;
return fallbackNode;
}
},
lookupNonWSType(offset) {
let type;
do {
type = this.lookupType(offset++);
if (type !== WhiteSpace) {
return type;
}
} while (type !== NULL);
return NULL;
},
charCodeAt(offset) {
return offset >= 0 && offset < source.length ? source.charCodeAt(offset) : 0;
},
substring(offsetStart, offsetEnd) {
return source.substring(offsetStart, offsetEnd);
},
substrToCursor(start) {
return this.source.substring(start, this.tokenStart);
},
cmpChar(offset, charCode) {
return cmpChar(source, offset, charCode);
},
cmpStr(offsetStart, offsetEnd, str) {
return cmpStr(source, offsetStart, offsetEnd, str);
},
consume(tokenType) {
const start = this.tokenStart;
this.eat(tokenType);
return this.substrToCursor(start);
},
consumeFunctionName() {
const name = source.substring(this.tokenStart, this.tokenEnd - 1);
this.eat(FunctionToken);
return name;
},
consumeNumber(type) {
const number = source.substring(this.tokenStart, consumeNumber(source, this.tokenStart));
this.eat(type);
return number;
},
eat(tokenType) {
if (this.tokenType !== tokenType) {
const tokenName = tokenNames[tokenType].slice(0, -6).replace(/-/g, ' ').replace(/^./, m => m.toUpperCase());
let message = `${/[[\](){}]/.test(tokenName) ? `"${tokenName}"` : tokenName} is expected`;
let offset = this.tokenStart;
// tweak message and offset
switch (tokenType) {
case Ident:
// when identifier is expected but there is a function or url
if (this.tokenType === FunctionToken || this.tokenType === Url) {
offset = this.tokenEnd - 1;
message = 'Identifier is expected but function found';
} else {
message = 'Identifier is expected';
}
break;
case Hash:
if (this.isDelim(NUMBERSIGN)) {
this.next();
offset++;
message = 'Name is expected';
}
break;
case Percentage:
if (this.tokenType === NumberToken) {
offset = this.tokenEnd;
message = 'Percent sign is expected';
}
break;
}
this.error(message, offset);
}
this.next();
},
eatIdent(name) {
if (this.tokenType !== Ident || this.lookupValue(0, name) === false) {
this.error(`Identifier "${name}" is expected`);
}
this.next();
},
eatDelim(code) {
if (!this.isDelim(code)) {
this.error(`Delim "${String.fromCharCode(code)}" is expected`);
}
this.next();
},
getLocation(start, end) {
if (needPositions) {
return locationMap.getLocationRange(
start,
end,
filename
);
}
return null;
},
getLocationFromList(list) {
if (needPositions) {
const head = this.getFirstListNode(list);
const tail = this.getLastListNode(list);
return locationMap.getLocationRange(
head !== null ? head.loc.start.offset - locationMap.startOffset : this.tokenStart,
tail !== null ? tail.loc.end.offset - locationMap.startOffset : this.tokenStart,
filename
);
}
return null;
},
error(message, offset) {
const location = typeof offset !== 'undefined' && offset < source.length
? locationMap.getLocation(offset)
: this.eof
? locationMap.getLocation(findWhiteSpaceStart(source, source.length - 1))
: locationMap.getLocation(this.tokenStart);
throw new SyntaxError(
message || 'Unexpected input',
source,
location.offset,
location.line,
location.column
);
}
});
const parse = function(source_, options) {
source = source_;
options = options || {};
parser.setSource(source, tokenize);
locationMap.setSource(
source,
options.offset,
options.line,
options.column
);
filename = options.filename || '<unknown>';
needPositions = Boolean(options.positions);
onParseError = typeof options.onParseError === 'function' ? options.onParseError : NOOP;
onParseErrorThrow = false;
parser.parseAtrulePrelude = 'parseAtrulePrelude' in options ? Boolean(options.parseAtrulePrelude) : true;
parser.parseRulePrelude = 'parseRulePrelude' in options ? Boolean(options.parseRulePrelude) : true;
parser.parseValue = 'parseValue' in options ? Boolean(options.parseValue) : true;
parser.parseCustomProperty = 'parseCustomProperty' in options ? Boolean(options.parseCustomProperty) : false;
const { context = 'default', onComment } = options;
if (context in parser.context === false) {
throw new Error('Unknown context `' + context + '`');
}
if (typeof onComment === 'function') {
parser.forEachToken((type, start, end) => {
if (type === Comment) {
const loc = parser.getLocation(start, end);
const value = cmpStr(source, end - 2, end, '*/')
? source.slice(start + 2, end - 2)
: source.slice(start + 2, end);
onComment(value, loc);
}
});
}
const ast = parser.context[context].call(parser, options);
if (!parser.eof) {
parser.error();
}
return ast;
};
return Object.assign(parse, {
SyntaxError,
config: parser.config
});
};

4
node_modules/css-tree/lib/parser/index.js generated vendored Normal file
View File

@ -0,0 +1,4 @@
import { createParser } from './create.js';
import config from '../syntax/config/parser.js';
export default createParser(config);

4
node_modules/css-tree/lib/parser/parse-selector.js generated vendored Normal file
View File

@ -0,0 +1,4 @@
import { createParser } from './create.js';
import config from '../syntax/config/parser-selector.js';
export default createParser(config);

43
node_modules/css-tree/lib/parser/sequence.js generated vendored Normal file
View File

@ -0,0 +1,43 @@
import { WhiteSpace, Comment } from '../tokenizer/index.js';
export function readSequence(recognizer) {
const children = this.createList();
let space = false;
const context = {
recognizer
};
while (!this.eof) {
switch (this.tokenType) {
case Comment:
this.next();
continue;
case WhiteSpace:
space = true;
this.next();
continue;
}
let child = recognizer.getNode.call(this, context);
if (child === undefined) {
break;
}
if (space) {
if (recognizer.onWhiteSpace) {
recognizer.onWhiteSpace.call(this, child, children, context);
}
space = false;
}
children.push(child);
}
if (space && recognizer.onWhiteSpace) {
recognizer.onWhiteSpace.call(this, null, children, context);
}
return children;
};

8
node_modules/css-tree/lib/syntax/atrule/font-face.js generated vendored Normal file
View File

@ -0,0 +1,8 @@
export default {
parse: {
prelude: null,
block() {
return this.Block(true);
}
}
};

39
node_modules/css-tree/lib/syntax/atrule/import.js generated vendored Normal file
View File

@ -0,0 +1,39 @@
import {
String as StringToken,
Ident,
Url,
Function as FunctionToken,
LeftParenthesis
} from '../../tokenizer/index.js';
export default {
parse: {
prelude() {
const children = this.createList();
this.skipSC();
switch (this.tokenType) {
case StringToken:
children.push(this.String());
break;
case Url:
case FunctionToken:
children.push(this.Url());
break;
default:
this.error('String or url() is expected');
}
if (this.lookupNonWSType(0) === Ident ||
this.lookupNonWSType(0) === LeftParenthesis) {
children.push(this.MediaQueryList());
}
return children;
},
block: null
}
};

13
node_modules/css-tree/lib/syntax/atrule/index.js generated vendored Normal file
View File

@ -0,0 +1,13 @@
import fontFace from './font-face.js';
import importAtrule from './import.js';
import media from './media.js';
import page from './page.js';
import supports from './supports.js';
export default {
'font-face': fontFace,
'import': importAtrule,
media,
page,
supports
};

12
node_modules/css-tree/lib/syntax/atrule/media.js generated vendored Normal file
View File

@ -0,0 +1,12 @@
export default {
parse: {
prelude() {
return this.createSingleNodeList(
this.MediaQueryList()
);
},
block() {
return this.Block(false);
}
}
};

12
node_modules/css-tree/lib/syntax/atrule/page.js generated vendored Normal file
View File

@ -0,0 +1,12 @@
export default {
parse: {
prelude() {
return this.createSingleNodeList(
this.SelectorList()
);
},
block() {
return this.Block(true);
}
}
};

80
node_modules/css-tree/lib/syntax/atrule/supports.js generated vendored Normal file
View File

@ -0,0 +1,80 @@
import {
WhiteSpace,
Comment,
Ident,
Function,
Colon,
LeftParenthesis
} from '../../tokenizer/index.js';
function consumeRaw() {
return this.createSingleNodeList(
this.Raw(this.tokenIndex, null, false)
);
}
function parentheses() {
this.skipSC();
if (this.tokenType === Ident &&
this.lookupNonWSType(1) === Colon) {
return this.createSingleNodeList(
this.Declaration()
);
}
return readSequence.call(this);
}
function readSequence() {
const children = this.createList();
let child;
this.skipSC();
scan:
while (!this.eof) {
switch (this.tokenType) {
case Comment:
case WhiteSpace:
this.next();
continue;
case Function:
child = this.Function(consumeRaw, this.scope.AtrulePrelude);
break;
case Ident:
child = this.Identifier();
break;
case LeftParenthesis:
child = this.Parentheses(parentheses, this.scope.AtrulePrelude);
break;
default:
break scan;
}
children.push(child);
}
return children;
}
export default {
parse: {
prelude() {
const children = readSequence.call(this);
if (this.getFirstListNode(children) === null) {
this.error('Condition is expected');
}
return children;
},
block() {
return this.Block(false);
}
}
};

5
node_modules/css-tree/lib/syntax/config/generator.js generated vendored Normal file
View File

@ -0,0 +1,5 @@
import * as node from '../node/index-generate.js';
export default {
node
};

8
node_modules/css-tree/lib/syntax/config/lexer.js generated vendored Normal file
View File

@ -0,0 +1,8 @@
import definitions from '../../data.js';
import * as node from '../node/index.js';
export default {
generic: true,
...definitions,
node
};

138
node_modules/css-tree/lib/syntax/config/mix.js generated vendored Normal file
View File

@ -0,0 +1,138 @@
const { hasOwnProperty } = Object.prototype;
const shape = {
generic: true,
types: appendOrAssign,
atrules: {
prelude: appendOrAssignOrNull,
descriptors: appendOrAssignOrNull
},
properties: appendOrAssign,
parseContext: assign,
scope: deepAssign,
atrule: ['parse'],
pseudo: ['parse'],
node: ['name', 'structure', 'parse', 'generate', 'walkContext']
};
function isObject(value) {
return value && value.constructor === Object;
}
function copy(value) {
return isObject(value)
? { ...value }
: value;
}
function assign(dest, src) {
return Object.assign(dest, src);
}
function deepAssign(dest, src) {
for (const key in src) {
if (hasOwnProperty.call(src, key)) {
if (isObject(dest[key])) {
deepAssign(dest[key], src[key]);
} else {
dest[key] = copy(src[key]);
}
}
}
return dest;
}
function append(a, b) {
if (typeof b === 'string' && /^\s*\|/.test(b)) {
return typeof a === 'string'
? a + b
: b.replace(/^\s*\|\s*/, '');
}
return b || null;
}
function appendOrAssign(a, b) {
if (typeof b === 'string') {
return append(a, b);
}
const result = { ...a };
for (let key in b) {
if (hasOwnProperty.call(b, key)) {
result[key] = append(hasOwnProperty.call(a, key) ? a[key] : undefined, b[key]);
}
}
return result;
}
function appendOrAssignOrNull(a, b) {
const result = appendOrAssign(a, b);
return !isObject(result) || Object.keys(result).length
? result
: null;
}
function mix(dest, src, shape) {
for (const key in shape) {
if (hasOwnProperty.call(shape, key) === false) {
continue;
}
if (shape[key] === true) {
if (hasOwnProperty.call(src, key)) {
dest[key] = copy(src[key]);
}
} else if (shape[key]) {
if (typeof shape[key] === 'function') {
const fn = shape[key];
dest[key] = fn({}, dest[key]);
dest[key] = fn(dest[key] || {}, src[key]);
} else if (isObject(shape[key])) {
const result = {};
for (let name in dest[key]) {
result[name] = mix({}, dest[key][name], shape[key]);
}
for (let name in src[key]) {
result[name] = mix(result[name] || {}, src[key][name], shape[key]);
}
dest[key] = result;
} else if (Array.isArray(shape[key])) {
const res = {};
const innerShape = shape[key].reduce(function(s, k) {
s[k] = true;
return s;
}, {});
for (const [name, value] of Object.entries(dest[key] || {})) {
res[name] = {};
if (value) {
mix(res[name], value, innerShape);
}
}
for (const name in src[key]) {
if (hasOwnProperty.call(src[key], name)) {
if (!res[name]) {
res[name] = {};
}
if (src[key] && src[key][name]) {
mix(res[name], src[key][name], innerShape);
}
}
}
dest[key] = res;
}
}
}
return dest;
}
export default (dest, src) => mix(dest, src, shape);

View File

@ -0,0 +1,15 @@
import { Selector } from '../scope/index.js';
import pseudo from '../pseudo/index.js';
import * as node from '../node/index-parse-selector.js';
export default {
parseContext: {
default: 'SelectorList',
selectorList: 'SelectorList',
selector: 'Selector'
},
scope: { Selector },
atrule: {},
pseudo,
node
};

30
node_modules/css-tree/lib/syntax/config/parser.js generated vendored Normal file
View File

@ -0,0 +1,30 @@
import * as scope from '../scope/index.js';
import atrule from '../atrule/index.js';
import pseudo from '../pseudo/index.js';
import * as node from '../node/index-parse.js';
export default {
parseContext: {
default: 'StyleSheet',
stylesheet: 'StyleSheet',
atrule: 'Atrule',
atrulePrelude(options) {
return this.AtrulePrelude(options.atrule ? String(options.atrule) : null);
},
mediaQueryList: 'MediaQueryList',
mediaQuery: 'MediaQuery',
rule: 'Rule',
selectorList: 'SelectorList',
selector: 'Selector',
block() {
return this.Block(true);
},
declarationList: 'DeclarationList',
declaration: 'Declaration',
value: 'Value'
},
scope,
atrule,
pseudo,
node
};

5
node_modules/css-tree/lib/syntax/config/walker.js generated vendored Normal file
View File

@ -0,0 +1,5 @@
import * as node from '../node/index.js';
export default {
node
};

53
node_modules/css-tree/lib/syntax/create.js generated vendored Normal file
View File

@ -0,0 +1,53 @@
import { tokenize } from '../tokenizer/index.js';
import { createParser } from '../parser/create.js';
import { createGenerator } from '../generator/create.js';
import { createConvertor } from '../convertor/create.js';
import { createWalker } from '../walker/create.js';
import { Lexer } from '../lexer/Lexer.js';
import mix from './config/mix.js';
function createSyntax(config) {
const parse = createParser(config);
const walk = createWalker(config);
const generate = createGenerator(config);
const { fromPlainObject, toPlainObject } = createConvertor(walk);
const syntax = {
lexer: null,
createLexer: config => new Lexer(config, syntax, syntax.lexer.structure),
tokenize,
parse,
generate,
walk,
find: walk.find,
findLast: walk.findLast,
findAll: walk.findAll,
fromPlainObject,
toPlainObject,
fork(extension) {
const base = mix({}, config); // copy of config
return createSyntax(
typeof extension === 'function'
? extension(base, Object.assign)
: mix(base, extension)
);
}
};
syntax.lexer = new Lexer({
generic: true,
types: config.types,
atrules: config.atrules,
properties: config.properties,
node: config.node
}, syntax);
return syntax;
};
export default config => createSyntax(mix({}, config));

View File

@ -0,0 +1,7 @@
// legacy IE function
// expression( <any-value> )
export default function() {
return this.createSingleNodeList(
this.Raw(this.tokenIndex, null, false)
);
}

39
node_modules/css-tree/lib/syntax/function/var.js generated vendored Normal file
View File

@ -0,0 +1,39 @@
import { Comma, WhiteSpace } from '../../tokenizer/index.js';
// var( <ident> , <value>? )
export default function() {
const children = this.createList();
this.skipSC();
// NOTE: Don't check more than a first argument is an ident, rest checks are for lexer
children.push(this.Identifier());
this.skipSC();
if (this.tokenType === Comma) {
children.push(this.Operator());
const startIndex = this.tokenIndex;
const value = this.parseCustomProperty
? this.Value(null)
: this.Raw(this.tokenIndex, this.consumeUntilExclamationMarkOrSemicolon, false);
if (value.type === 'Value' && value.children.isEmpty) {
for (let offset = startIndex - this.tokenIndex; offset <= 0; offset++) {
if (this.lookupType(offset) === WhiteSpace) {
value.children.appendData({
type: 'WhiteSpace',
loc: null,
value: ' '
});
break;
}
}
}
children.push(value);
}
return children;
};

10
node_modules/css-tree/lib/syntax/index.js generated vendored Normal file
View File

@ -0,0 +1,10 @@
import createSyntax from './create.js';
import lexerConfig from './config/lexer.js';
import parserConfig from './config/parser.js';
import walkerConfig from './config/walker.js';
export default createSyntax({
...lexerConfig,
...parserConfig,
...walkerConfig
});

292
node_modules/css-tree/lib/syntax/node/AnPlusB.js generated vendored Normal file
View File

@ -0,0 +1,292 @@
import {
isDigit,
WhiteSpace,
Comment,
Ident,
Number,
Dimension
} from '../../tokenizer/index.js';
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const N = 0x006E; // U+006E LATIN SMALL LETTER N (n)
const DISALLOW_SIGN = true;
const ALLOW_SIGN = false;
function checkInteger(offset, disallowSign) {
let pos = this.tokenStart + offset;
const code = this.charCodeAt(pos);
if (code === PLUSSIGN || code === HYPHENMINUS) {
if (disallowSign) {
this.error('Number sign is not allowed');
}
pos++;
}
for (; pos < this.tokenEnd; pos++) {
if (!isDigit(this.charCodeAt(pos))) {
this.error('Integer is expected', pos);
}
}
}
function checkTokenIsInteger(disallowSign) {
return checkInteger.call(this, 0, disallowSign);
}
function expectCharCode(offset, code) {
if (!this.cmpChar(this.tokenStart + offset, code)) {
let msg = '';
switch (code) {
case N:
msg = 'N is expected';
break;
case HYPHENMINUS:
msg = 'HyphenMinus is expected';
break;
}
this.error(msg, this.tokenStart + offset);
}
}
// ... <signed-integer>
// ... ['+' | '-'] <signless-integer>
function consumeB() {
let offset = 0;
let sign = 0;
let type = this.tokenType;
while (type === WhiteSpace || type === Comment) {
type = this.lookupType(++offset);
}
if (type !== Number) {
if (this.isDelim(PLUSSIGN, offset) ||
this.isDelim(HYPHENMINUS, offset)) {
sign = this.isDelim(PLUSSIGN, offset) ? PLUSSIGN : HYPHENMINUS;
do {
type = this.lookupType(++offset);
} while (type === WhiteSpace || type === Comment);
if (type !== Number) {
this.skip(offset);
checkTokenIsInteger.call(this, DISALLOW_SIGN);
}
} else {
return null;
}
}
if (offset > 0) {
this.skip(offset);
}
if (sign === 0) {
type = this.charCodeAt(this.tokenStart);
if (type !== PLUSSIGN && type !== HYPHENMINUS) {
this.error('Number sign is expected');
}
}
checkTokenIsInteger.call(this, sign !== 0);
return sign === HYPHENMINUS ? '-' + this.consume(Number) : this.consume(Number);
}
// An+B microsyntax https://www.w3.org/TR/css-syntax-3/#anb
export const name = 'AnPlusB';
export const structure = {
a: [String, null],
b: [String, null]
};
export function parse() {
/* eslint-disable brace-style*/
const start = this.tokenStart;
let a = null;
let b = null;
// <integer>
if (this.tokenType === Number) {
checkTokenIsInteger.call(this, ALLOW_SIGN);
b = this.consume(Number);
}
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
// -n- <signless-integer>
// <dashndashdigit-ident>
else if (this.tokenType === Ident && this.cmpChar(this.tokenStart, HYPHENMINUS)) {
a = '-1';
expectCharCode.call(this, 1, N);
switch (this.tokenEnd - this.tokenStart) {
// -n
// -n <signed-integer>
// -n ['+' | '-'] <signless-integer>
case 2:
this.next();
b = consumeB.call(this);
break;
// -n- <signless-integer>
case 3:
expectCharCode.call(this, 2, HYPHENMINUS);
this.next();
this.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(Number);
break;
// <dashndashdigit-ident>
default:
expectCharCode.call(this, 2, HYPHENMINUS);
checkInteger.call(this, 3, DISALLOW_SIGN);
this.next();
b = this.substrToCursor(start + 2);
}
}
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
// '+'? n- <signless-integer>
// '+'? <ndashdigit-ident>
else if (this.tokenType === Ident || (this.isDelim(PLUSSIGN) && this.lookupType(1) === Ident)) {
let sign = 0;
a = '1';
// just ignore a plus
if (this.isDelim(PLUSSIGN)) {
sign = 1;
this.next();
}
expectCharCode.call(this, 0, N);
switch (this.tokenEnd - this.tokenStart) {
// '+'? n
// '+'? n <signed-integer>
// '+'? n ['+' | '-'] <signless-integer>
case 1:
this.next();
b = consumeB.call(this);
break;
// '+'? n- <signless-integer>
case 2:
expectCharCode.call(this, 1, HYPHENMINUS);
this.next();
this.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(Number);
break;
// '+'? <ndashdigit-ident>
default:
expectCharCode.call(this, 1, HYPHENMINUS);
checkInteger.call(this, 2, DISALLOW_SIGN);
this.next();
b = this.substrToCursor(start + sign + 1);
}
}
// <ndashdigit-dimension>
// <ndash-dimension> <signless-integer>
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
else if (this.tokenType === Dimension) {
const code = this.charCodeAt(this.tokenStart);
const sign = code === PLUSSIGN || code === HYPHENMINUS;
let i = this.tokenStart + sign;
for (; i < this.tokenEnd; i++) {
if (!isDigit(this.charCodeAt(i))) {
break;
}
}
if (i === this.tokenStart + sign) {
this.error('Integer is expected', this.tokenStart + sign);
}
expectCharCode.call(this, i - this.tokenStart, N);
a = this.substring(start, i);
// <n-dimension>
// <n-dimension> <signed-integer>
// <n-dimension> ['+' | '-'] <signless-integer>
if (i + 1 === this.tokenEnd) {
this.next();
b = consumeB.call(this);
} else {
expectCharCode.call(this, i - this.tokenStart + 1, HYPHENMINUS);
// <ndash-dimension> <signless-integer>
if (i + 2 === this.tokenEnd) {
this.next();
this.skipSC();
checkTokenIsInteger.call(this, DISALLOW_SIGN);
b = '-' + this.consume(Number);
}
// <ndashdigit-dimension>
else {
checkInteger.call(this, i - this.tokenStart + 2, DISALLOW_SIGN);
this.next();
b = this.substrToCursor(i + 1);
}
}
} else {
this.error();
}
if (a !== null && a.charCodeAt(0) === PLUSSIGN) {
a = a.substr(1);
}
if (b !== null && b.charCodeAt(0) === PLUSSIGN) {
b = b.substr(1);
}
return {
type: 'AnPlusB',
loc: this.getLocation(start, this.tokenStart),
a,
b
};
}
export function generate(node) {
if (node.a) {
const a =
node.a === '+1' && 'n' ||
node.a === '1' && 'n' ||
node.a === '-1' && '-n' ||
node.a + 'n';
if (node.b) {
const b = node.b[0] === '-' || node.b[0] === '+'
? node.b
: '+' + node.b;
this.tokenize(a + b);
} else {
this.tokenize(a);
}
} else {
this.tokenize(node.b);
}
}

100
node_modules/css-tree/lib/syntax/node/Atrule.js generated vendored Normal file
View File

@ -0,0 +1,100 @@
import {
AtKeyword,
Semicolon,
LeftCurlyBracket,
RightCurlyBracket
} from '../../tokenizer/index.js';
function consumeRaw(startToken) {
return this.Raw(startToken, this.consumeUntilLeftCurlyBracketOrSemicolon, true);
}
function isDeclarationBlockAtrule() {
for (let offset = 1, type; type = this.lookupType(offset); offset++) {
if (type === RightCurlyBracket) {
return true;
}
if (type === LeftCurlyBracket ||
type === AtKeyword) {
return false;
}
}
return false;
}
export const name = 'Atrule';
export const walkContext = 'atrule';
export const structure = {
name: String,
prelude: ['AtrulePrelude', 'Raw', null],
block: ['Block', null]
};
export function parse() {
const start = this.tokenStart;
let name;
let nameLowerCase;
let prelude = null;
let block = null;
this.eat(AtKeyword);
name = this.substrToCursor(start + 1);
nameLowerCase = name.toLowerCase();
this.skipSC();
// parse prelude
if (this.eof === false &&
this.tokenType !== LeftCurlyBracket &&
this.tokenType !== Semicolon) {
if (this.parseAtrulePrelude) {
prelude = this.parseWithFallback(this.AtrulePrelude.bind(this, name), consumeRaw);
} else {
prelude = consumeRaw.call(this, this.tokenIndex);
}
this.skipSC();
}
switch (this.tokenType) {
case Semicolon:
this.next();
break;
case LeftCurlyBracket:
if (hasOwnProperty.call(this.atrule, nameLowerCase) &&
typeof this.atrule[nameLowerCase].block === 'function') {
block = this.atrule[nameLowerCase].block.call(this);
} else {
// TODO: should consume block content as Raw?
block = this.Block(isDeclarationBlockAtrule.call(this));
}
break;
}
return {
type: 'Atrule',
loc: this.getLocation(start, this.tokenStart),
name,
prelude,
block
};
}
export function generate(node) {
this.token(AtKeyword, '@' + node.name);
if (node.prelude !== null) {
this.node(node.prelude);
}
if (node.block) {
this.node(node.block);
} else {
this.token(Semicolon, ';');
}
}

47
node_modules/css-tree/lib/syntax/node/AtrulePrelude.js generated vendored Normal file
View File

@ -0,0 +1,47 @@
import {
Semicolon,
LeftCurlyBracket
} from '../../tokenizer/index.js';
export const name = 'AtrulePrelude';
export const walkContext = 'atrulePrelude';
export const structure = {
children: [[]]
};
export function parse(name) {
let children = null;
if (name !== null) {
name = name.toLowerCase();
}
this.skipSC();
if (hasOwnProperty.call(this.atrule, name) &&
typeof this.atrule[name].prelude === 'function') {
// custom consumer
children = this.atrule[name].prelude.call(this);
} else {
// default consumer
children = this.readSequence(this.scope.AtrulePrelude);
}
this.skipSC();
if (this.eof !== true &&
this.tokenType !== LeftCurlyBracket &&
this.tokenType !== Semicolon) {
this.error('Semicolon or block is expected');
}
return {
type: 'AtrulePrelude',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node);
}

View File

@ -0,0 +1,147 @@
import {
Ident,
String as StringToken,
Delim,
LeftSquareBracket,
RightSquareBracket
} from '../../tokenizer/index.js';
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const EQUALSSIGN = 0x003D; // U+003D EQUALS SIGN (=)
const CIRCUMFLEXACCENT = 0x005E; // U+005E (^)
const VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
const TILDE = 0x007E; // U+007E TILDE (~)
function getAttributeName() {
if (this.eof) {
this.error('Unexpected end of input');
}
const start = this.tokenStart;
let expectIdent = false;
if (this.isDelim(ASTERISK)) {
expectIdent = true;
this.next();
} else if (!this.isDelim(VERTICALLINE)) {
this.eat(Ident);
}
if (this.isDelim(VERTICALLINE)) {
if (this.charCodeAt(this.tokenStart + 1) !== EQUALSSIGN) {
this.next();
this.eat(Ident);
} else if (expectIdent) {
this.error('Identifier is expected', this.tokenEnd);
}
} else if (expectIdent) {
this.error('Vertical line is expected');
}
return {
type: 'Identifier',
loc: this.getLocation(start, this.tokenStart),
name: this.substrToCursor(start)
};
}
function getOperator() {
const start = this.tokenStart;
const code = this.charCodeAt(start);
if (code !== EQUALSSIGN && // =
code !== TILDE && // ~=
code !== CIRCUMFLEXACCENT && // ^=
code !== DOLLARSIGN && // $=
code !== ASTERISK && // *=
code !== VERTICALLINE // |=
) {
this.error('Attribute selector (=, ~=, ^=, $=, *=, |=) is expected');
}
this.next();
if (code !== EQUALSSIGN) {
if (!this.isDelim(EQUALSSIGN)) {
this.error('Equal sign is expected');
}
this.next();
}
return this.substrToCursor(start);
}
// '[' <wq-name> ']'
// '[' <wq-name> <attr-matcher> [ <string-token> | <ident-token> ] <attr-modifier>? ']'
export const name = 'AttributeSelector';
export const structure = {
name: 'Identifier',
matcher: [String, null],
value: ['String', 'Identifier', null],
flags: [String, null]
};
export function parse() {
const start = this.tokenStart;
let name;
let matcher = null;
let value = null;
let flags = null;
this.eat(LeftSquareBracket);
this.skipSC();
name = getAttributeName.call(this);
this.skipSC();
if (this.tokenType !== RightSquareBracket) {
// avoid case `[name i]`
if (this.tokenType !== Ident) {
matcher = getOperator.call(this);
this.skipSC();
value = this.tokenType === StringToken
? this.String()
: this.Identifier();
this.skipSC();
}
// attribute flags
if (this.tokenType === Ident) {
flags = this.consume(Ident);
this.skipSC();
}
}
this.eat(RightSquareBracket);
return {
type: 'AttributeSelector',
loc: this.getLocation(start, this.tokenStart),
name,
matcher,
value,
flags
};
}
export function generate(node) {
this.token(Delim, '[');
this.node(node.name);
if (node.matcher !== null) {
this.tokenize(node.matcher);
this.node(node.value);
}
if (node.flags !== null) {
this.token(Ident, node.flags);
}
this.token(Delim, ']');
}

89
node_modules/css-tree/lib/syntax/node/Block.js generated vendored Normal file
View File

@ -0,0 +1,89 @@
import {
WhiteSpace,
Comment,
Semicolon,
AtKeyword,
LeftCurlyBracket,
RightCurlyBracket
} from '../../tokenizer/index.js';
function consumeRaw(startToken) {
return this.Raw(startToken, null, true);
}
function consumeRule() {
return this.parseWithFallback(this.Rule, consumeRaw);
}
function consumeRawDeclaration(startToken) {
return this.Raw(startToken, this.consumeUntilSemicolonIncluded, true);
}
function consumeDeclaration() {
if (this.tokenType === Semicolon) {
return consumeRawDeclaration.call(this, this.tokenIndex);
}
const node = this.parseWithFallback(this.Declaration, consumeRawDeclaration);
if (this.tokenType === Semicolon) {
this.next();
}
return node;
}
export const name = 'Block';
export const walkContext = 'block';
export const structure = {
children: [[
'Atrule',
'Rule',
'Declaration'
]]
};
export function parse(isDeclaration) {
const consumer = isDeclaration ? consumeDeclaration : consumeRule;
const start = this.tokenStart;
let children = this.createList();
this.eat(LeftCurlyBracket);
scan:
while (!this.eof) {
switch (this.tokenType) {
case RightCurlyBracket:
break scan;
case WhiteSpace:
case Comment:
this.next();
break;
case AtKeyword:
children.push(this.parseWithFallback(this.Atrule, consumeRaw));
break;
default:
children.push(consumer.call(this));
}
}
if (!this.eof) {
this.eat(RightCurlyBracket);
}
return {
type: 'Block',
loc: this.getLocation(start, this.tokenStart),
children
};
}
export function generate(node) {
this.token(LeftCurlyBracket, '{');
this.children(node, prev => {
if (prev.type === 'Declaration') {
this.token(Semicolon, ';');
}
});
this.token(RightCurlyBracket, '}');
}

35
node_modules/css-tree/lib/syntax/node/Brackets.js generated vendored Normal file
View File

@ -0,0 +1,35 @@
import {
Delim,
LeftSquareBracket,
RightSquareBracket
} from '../../tokenizer/index.js';
export const name = 'Brackets';
export const structure = {
children: [[]]
};
export function parse(readSequence, recognizer) {
const start = this.tokenStart;
let children = null;
this.eat(LeftSquareBracket);
children = readSequence.call(this, recognizer);
if (!this.eof) {
this.eat(RightSquareBracket);
}
return {
type: 'Brackets',
loc: this.getLocation(start, this.tokenStart),
children
};
}
export function generate(node) {
this.token(Delim, '[');
this.children(node);
this.token(Delim, ']');
}

19
node_modules/css-tree/lib/syntax/node/CDC.js generated vendored Normal file
View File

@ -0,0 +1,19 @@
import { CDC } from '../../tokenizer/index.js';
export const name = 'CDC';
export const structure = [];
export function parse() {
const start = this.tokenStart;
this.eat(CDC); // -->
return {
type: 'CDC',
loc: this.getLocation(start, this.tokenStart)
};
}
export function generate() {
this.token(CDC, '-->');
}

19
node_modules/css-tree/lib/syntax/node/CDO.js generated vendored Normal file
View File

@ -0,0 +1,19 @@
import { CDO } from '../../tokenizer/index.js';
export const name = 'CDO';
export const structure = [];
export function parse() {
const start = this.tokenStart;
this.eat(CDO); // <!--
return {
type: 'CDO',
loc: this.getLocation(start, this.tokenStart)
};
}
export function generate() {
this.token(CDO, '<!--');
}

24
node_modules/css-tree/lib/syntax/node/ClassSelector.js generated vendored Normal file
View File

@ -0,0 +1,24 @@
import { Delim, Ident } from '../../tokenizer/index.js';
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
// '.' ident
export const name = 'ClassSelector';
export const structure = {
name: String
};
export function parse() {
this.eatDelim(FULLSTOP);
return {
type: 'ClassSelector',
loc: this.getLocation(this.tokenStart - 1, this.tokenEnd),
name: this.consume(Ident)
};
}
export function generate(node) {
this.token(Delim, '.');
this.token(Ident, node.name);
}

54
node_modules/css-tree/lib/syntax/node/Combinator.js generated vendored Normal file
View File

@ -0,0 +1,54 @@
import { WhiteSpace, Delim } from '../../tokenizer/index.js';
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const GREATERTHANSIGN = 0x003E; // U+003E GREATER-THAN SIGN (>)
const TILDE = 0x007E; // U+007E TILDE (~)
export const name = 'Combinator';
export const structure = {
name: String
};
// + | > | ~ | /deep/
export function parse() {
const start = this.tokenStart;
let name;
switch (this.tokenType) {
case WhiteSpace:
name = ' ';
break;
case Delim:
switch (this.charCodeAt(this.tokenStart)) {
case GREATERTHANSIGN:
case PLUSSIGN:
case TILDE:
this.next();
break;
case SOLIDUS:
this.next();
this.eatIdent('deep');
this.eatDelim(SOLIDUS);
break;
default:
this.error('Combinator is expected');
}
name = this.substrToCursor(start);
break;
}
return {
type: 'Combinator',
loc: this.getLocation(start, this.tokenStart),
name
};
}
export function generate(node) {
this.tokenize(node.name);
}

33
node_modules/css-tree/lib/syntax/node/Comment.js generated vendored Normal file
View File

@ -0,0 +1,33 @@
import { Comment } from '../../tokenizer/index.js';
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
export const name = 'Comment';
export const structure = {
value: String
};
export function parse() {
const start = this.tokenStart;
let end = this.tokenEnd;
this.eat(Comment);
if ((end - start + 2) >= 2 &&
this.charCodeAt(end - 2) === ASTERISK &&
this.charCodeAt(end - 1) === SOLIDUS) {
end -= 2;
}
return {
type: 'Comment',
loc: this.getLocation(start, this.tokenStart),
value: this.substring(start + 2, end)
};
}
export function generate(node) {
this.token(Comment, '/*' + node.value + '*/');
}

165
node_modules/css-tree/lib/syntax/node/Declaration.js generated vendored Normal file
View File

@ -0,0 +1,165 @@
import { isCustomProperty } from '../../utils/names.js';
import {
Ident,
Hash,
Colon,
Semicolon,
Delim,
WhiteSpace
} from '../../tokenizer/index.js';
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
const DOLLARSIGN = 0x0024; // U+0024 DOLLAR SIGN ($)
const AMPERSAND = 0x0026; // U+0026 AMPERSAND (&)
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
function consumeValueRaw(startToken) {
return this.Raw(startToken, this.consumeUntilExclamationMarkOrSemicolon, true);
}
function consumeCustomPropertyRaw(startToken) {
return this.Raw(startToken, this.consumeUntilExclamationMarkOrSemicolon, false);
}
function consumeValue() {
const startValueToken = this.tokenIndex;
const value = this.Value();
if (value.type !== 'Raw' &&
this.eof === false &&
this.tokenType !== Semicolon &&
this.isDelim(EXCLAMATIONMARK) === false &&
this.isBalanceEdge(startValueToken) === false) {
this.error();
}
return value;
}
export const name = 'Declaration';
export const walkContext = 'declaration';
export const structure = {
important: [Boolean, String],
property: String,
value: ['Value', 'Raw']
};
export function parse() {
const start = this.tokenStart;
const startToken = this.tokenIndex;
const property = readProperty.call(this);
const customProperty = isCustomProperty(property);
const parseValue = customProperty ? this.parseCustomProperty : this.parseValue;
const consumeRaw = customProperty ? consumeCustomPropertyRaw : consumeValueRaw;
let important = false;
let value;
this.skipSC();
this.eat(Colon);
const valueStart = this.tokenIndex;
if (!customProperty) {
this.skipSC();
}
if (parseValue) {
value = this.parseWithFallback(consumeValue, consumeRaw);
} else {
value = consumeRaw.call(this, this.tokenIndex);
}
if (customProperty && value.type === 'Value' && value.children.isEmpty) {
for (let offset = valueStart - this.tokenIndex; offset <= 0; offset++) {
if (this.lookupType(offset) === WhiteSpace) {
value.children.appendData({
type: 'WhiteSpace',
loc: null,
value: ' '
});
break;
}
}
}
if (this.isDelim(EXCLAMATIONMARK)) {
important = getImportant.call(this);
this.skipSC();
}
// Do not include semicolon to range per spec
// https://drafts.csswg.org/css-syntax/#declaration-diagram
if (this.eof === false &&
this.tokenType !== Semicolon &&
this.isBalanceEdge(startToken) === false) {
this.error();
}
return {
type: 'Declaration',
loc: this.getLocation(start, this.tokenStart),
important,
property,
value
};
}
export function generate(node) {
this.token(Ident, node.property);
this.token(Colon, ':');
this.node(node.value);
if (node.important) {
this.token(Delim, '!');
this.token(Ident, node.important === true ? 'important' : node.important);
}
}
function readProperty() {
const start = this.tokenStart;
// hacks
if (this.tokenType === Delim) {
switch (this.charCodeAt(this.tokenStart)) {
case ASTERISK:
case DOLLARSIGN:
case PLUSSIGN:
case NUMBERSIGN:
case AMPERSAND:
this.next();
break;
// TODO: not sure we should support this hack
case SOLIDUS:
this.next();
if (this.isDelim(SOLIDUS)) {
this.next();
}
break;
}
}
if (this.tokenType === Hash) {
this.eat(Hash);
} else {
this.eat(Ident);
}
return this.substrToCursor(start);
}
// ! ws* important
function getImportant() {
this.eat(Delim);
this.skipSC();
const important = this.consume(Ident);
// store original value in case it differ from `important`
// for better original source restoring and hacks like `!ie` support
return important === 'important' ? true : important;
}

View File

@ -0,0 +1,49 @@
import {
WhiteSpace,
Comment,
Semicolon
} from '../../tokenizer/index.js';
function consumeRaw(startToken) {
return this.Raw(startToken, this.consumeUntilSemicolonIncluded, true);
}
export const name = 'DeclarationList';
export const structure = {
children: [[
'Declaration'
]]
};
export function parse() {
const children = this.createList();
scan:
while (!this.eof) {
switch (this.tokenType) {
case WhiteSpace:
case Comment:
case Semicolon:
this.next();
break;
default:
children.push(this.parseWithFallback(this.Declaration, consumeRaw));
}
}
return {
type: 'DeclarationList',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node, prev => {
if (prev.type === 'Declaration') {
this.token(Semicolon, ';');
}
});
}

23
node_modules/css-tree/lib/syntax/node/Dimension.js generated vendored Normal file
View File

@ -0,0 +1,23 @@
import { Dimension } from '../../tokenizer/index.js';
export const name = 'Dimension';
export const structure = {
value: String,
unit: String
};
export function parse() {
const start = this.tokenStart;
const value = this.consumeNumber(Dimension);
return {
type: 'Dimension',
loc: this.getLocation(start, this.tokenStart),
value,
unit: this.substring(start + value.length, this.tokenStart)
};
}
export function generate(node) {
this.token(Dimension, node.value + node.unit);
}

41
node_modules/css-tree/lib/syntax/node/Function.js generated vendored Normal file
View File

@ -0,0 +1,41 @@
import {
Function as FunctionToken,
RightParenthesis
} from '../../tokenizer/index.js';
export const name = 'Function';
export const walkContext = 'function';
export const structure = {
name: String,
children: [[]]
};
// <function-token> <sequence> )
export function parse(readSequence, recognizer) {
const start = this.tokenStart;
const name = this.consumeFunctionName();
const nameLowerCase = name.toLowerCase();
let children;
children = recognizer.hasOwnProperty(nameLowerCase)
? recognizer[nameLowerCase].call(this, recognizer)
: readSequence.call(this, recognizer);
if (!this.eof) {
this.eat(RightParenthesis);
}
return {
type: 'Function',
loc: this.getLocation(start, this.tokenStart),
name,
children
};
}
export function generate(node) {
this.token(FunctionToken, node.name + '(');
this.children(node);
this.token(RightParenthesis, ')');
}

23
node_modules/css-tree/lib/syntax/node/Hash.js generated vendored Normal file
View File

@ -0,0 +1,23 @@
import { Hash } from '../../tokenizer/index.js';
// '#' ident
export const xxx = 'XXX';
export const name = 'Hash';
export const structure = {
value: String
};
export function parse() {
const start = this.tokenStart;
this.eat(Hash);
return {
type: 'Hash',
loc: this.getLocation(start, this.tokenStart),
value: this.substrToCursor(start + 1)
};
}
export function generate(node) {
this.token(Hash, '#' + node.value);
}

26
node_modules/css-tree/lib/syntax/node/IdSelector.js generated vendored Normal file
View File

@ -0,0 +1,26 @@
import { Hash, Delim } from '../../tokenizer/index.js';
export const name = 'IdSelector';
export const structure = {
name: String
};
export function parse() {
const start = this.tokenStart;
// TODO: check value is an ident
this.eat(Hash);
return {
type: 'IdSelector',
loc: this.getLocation(start, this.tokenStart),
name: this.substrToCursor(start + 1)
};
}
export function generate(node) {
// Using Delim instead of Hash is a hack to avoid for a whitespace between ident and id-selector
// in safe mode (e.g. "a#id"), because IE11 doesn't allow a sequence <ident-token> <hash-token>
// without a whitespace in values (e.g. "1px solid#000")
this.token(Delim, '#' + node.name);
}

18
node_modules/css-tree/lib/syntax/node/Identifier.js generated vendored Normal file
View File

@ -0,0 +1,18 @@
import { Ident } from '../../tokenizer/index.js';
export const name = 'Identifier';
export const structure = {
name: String
};
export function parse() {
return {
type: 'Identifier',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
name: this.consume(Ident)
};
}
export function generate(node) {
this.token(Ident, node.name);
}

77
node_modules/css-tree/lib/syntax/node/MediaFeature.js generated vendored Normal file
View File

@ -0,0 +1,77 @@
import {
Ident,
Number,
Dimension,
LeftParenthesis,
RightParenthesis,
Colon,
Delim
} from '../../tokenizer/index.js';
export const name = 'MediaFeature';
export const structure = {
name: String,
value: ['Identifier', 'Number', 'Dimension', 'Ratio', null]
};
export function parse() {
const start = this.tokenStart;
let name;
let value = null;
this.eat(LeftParenthesis);
this.skipSC();
name = this.consume(Ident);
this.skipSC();
if (this.tokenType !== RightParenthesis) {
this.eat(Colon);
this.skipSC();
switch (this.tokenType) {
case Number:
if (this.lookupNonWSType(1) === Delim) {
value = this.Ratio();
} else {
value = this.Number();
}
break;
case Dimension:
value = this.Dimension();
break;
case Ident:
value = this.Identifier();
break;
default:
this.error('Number, dimension, ratio or identifier is expected');
}
this.skipSC();
}
this.eat(RightParenthesis);
return {
type: 'MediaFeature',
loc: this.getLocation(start, this.tokenStart),
name,
value
};
}
export function generate(node) {
this.token(LeftParenthesis, '(');
this.token(Ident, node.name);
if (node.value !== null) {
this.token(Colon, ':');
this.node(node.value);
}
this.token(RightParenthesis, ')');
}

60
node_modules/css-tree/lib/syntax/node/MediaQuery.js generated vendored Normal file
View File

@ -0,0 +1,60 @@
import {
WhiteSpace,
Comment,
Ident,
LeftParenthesis
} from '../../tokenizer/index.js';
export const name = 'MediaQuery';
export const structure = {
children: [[
'Identifier',
'MediaFeature',
'WhiteSpace'
]]
};
export function parse() {
const children = this.createList();
let child = null;
this.skipSC();
scan:
while (!this.eof) {
switch (this.tokenType) {
case Comment:
case WhiteSpace:
this.next();
continue;
case Ident:
child = this.Identifier();
break;
case LeftParenthesis:
child = this.MediaFeature();
break;
default:
break scan;
}
children.push(child);
}
if (child === null) {
this.error('Identifier or parenthesis is expected');
}
return {
type: 'MediaQuery',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node);
}

View File

@ -0,0 +1,34 @@
import { Comma } from '../../tokenizer/index.js';
export const name = 'MediaQueryList';
export const structure = {
children: [[
'MediaQuery'
]]
};
export function parse() {
const children = this.createList();
this.skipSC();
while (!this.eof) {
children.push(this.MediaQuery());
if (this.tokenType !== Comma) {
break;
}
this.next();
}
return {
type: 'MediaQueryList',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node, () => this.token(Comma, ','));
}

47
node_modules/css-tree/lib/syntax/node/Nth.js generated vendored Normal file
View File

@ -0,0 +1,47 @@
import { Ident } from '../../tokenizer/index.js';
export const name = 'Nth';
export const structure = {
nth: ['AnPlusB', 'Identifier'],
selector: ['SelectorList', null]
};
export function parse() {
this.skipSC();
const start = this.tokenStart;
let end = start;
let selector = null;
let nth;
if (this.lookupValue(0, 'odd') || this.lookupValue(0, 'even')) {
nth = this.Identifier();
} else {
nth = this.AnPlusB();
}
end = this.tokenStart;
this.skipSC();
if (this.lookupValue(0, 'of')) {
this.next();
selector = this.SelectorList();
end = this.tokenStart;
}
return {
type: 'Nth',
loc: this.getLocation(start, end),
nth,
selector
};
}
export function generate(node) {
this.node(node.nth);
if (node.selector !== null) {
this.token(Ident, 'of');
this.node(node.selector);
}
}

18
node_modules/css-tree/lib/syntax/node/Number.js generated vendored Normal file
View File

@ -0,0 +1,18 @@
import { Number as NumberToken } from '../../tokenizer/index.js';
export const name = 'Number';
export const structure = {
value: String
};
export function parse() {
return {
type: 'Number',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
value: this.consume(NumberToken)
};
}
export function generate(node) {
this.token(NumberToken, node.value);
}

21
node_modules/css-tree/lib/syntax/node/Operator.js generated vendored Normal file
View File

@ -0,0 +1,21 @@
// '/' | '*' | ',' | ':' | '+' | '-'
export const name = 'Operator';
export const structure = {
value: String
};
export function parse() {
const start = this.tokenStart;
this.next();
return {
type: 'Operator',
loc: this.getLocation(start, this.tokenStart),
value: this.substrToCursor(start)
};
}
export function generate(node) {
this.tokenize(node.value);
}

34
node_modules/css-tree/lib/syntax/node/Parentheses.js generated vendored Normal file
View File

@ -0,0 +1,34 @@
import {
LeftParenthesis,
RightParenthesis
} from '../../tokenizer/index.js';
export const name = 'Parentheses';
export const structure = {
children: [[]]
};
export function parse(readSequence, recognizer) {
const start = this.tokenStart;
let children = null;
this.eat(LeftParenthesis);
children = readSequence.call(this, recognizer);
if (!this.eof) {
this.eat(RightParenthesis);
}
return {
type: 'Parentheses',
loc: this.getLocation(start, this.tokenStart),
children
};
}
export function generate(node) {
this.token(LeftParenthesis, '(');
this.children(node);
this.token(RightParenthesis, ')');
}

18
node_modules/css-tree/lib/syntax/node/Percentage.js generated vendored Normal file
View File

@ -0,0 +1,18 @@
import { Percentage } from '../../tokenizer/index.js';
export const name = 'Percentage';
export const structure = {
value: String
};
export function parse() {
return {
type: 'Percentage',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
value: this.consumeNumber(Percentage)
};
}
export function generate(node) {
this.token(Percentage, node.value + '%');
}

View File

@ -0,0 +1,63 @@
import {
Ident,
Function as FunctionToken,
Colon,
RightParenthesis
} from '../../tokenizer/index.js';
export const name = 'PseudoClassSelector';
export const walkContext = 'function';
export const structure = {
name: String,
children: [['Raw'], null]
};
// : [ <ident> | <function-token> <any-value>? ) ]
export function parse() {
const start = this.tokenStart;
let children = null;
let name;
let nameLowerCase;
this.eat(Colon);
if (this.tokenType === FunctionToken) {
name = this.consumeFunctionName();
nameLowerCase = name.toLowerCase();
if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
this.skipSC();
children = this.pseudo[nameLowerCase].call(this);
this.skipSC();
} else {
children = this.createList();
children.push(
this.Raw(this.tokenIndex, null, false)
);
}
this.eat(RightParenthesis);
} else {
name = this.consume(Ident);
}
return {
type: 'PseudoClassSelector',
loc: this.getLocation(start, this.tokenStart),
name,
children
};
}
export function generate(node) {
this.token(Colon, ':');
if (node.children === null) {
this.token(Ident, node.name);
} else {
this.token(FunctionToken, node.name + '(');
this.children(node);
this.token(RightParenthesis, ')');
}
}

View File

@ -0,0 +1,64 @@
import {
Ident,
Function as FunctionToken,
Colon,
RightParenthesis
} from '../../tokenizer/index.js';
export const name = 'PseudoElementSelector';
export const walkContext = 'function';
export const structure = {
name: String,
children: [['Raw'], null]
};
// :: [ <ident> | <function-token> <any-value>? ) ]
export function parse() {
const start = this.tokenStart;
let children = null;
let name;
let nameLowerCase;
this.eat(Colon);
this.eat(Colon);
if (this.tokenType === FunctionToken) {
name = this.consumeFunctionName();
nameLowerCase = name.toLowerCase();
if (hasOwnProperty.call(this.pseudo, nameLowerCase)) {
this.skipSC();
children = this.pseudo[nameLowerCase].call(this);
this.skipSC();
} else {
children = this.createList();
children.push(
this.Raw(this.tokenIndex, null, false)
);
}
this.eat(RightParenthesis);
} else {
name = this.consume(Ident);
}
return {
type: 'PseudoElementSelector',
loc: this.getLocation(start, this.tokenStart),
name,
children
};
}
export function generate(node) {
this.token(Colon, ':');
this.token(Colon, ':');
if (node.children === null) {
this.token(Ident, node.name);
} else {
this.token(FunctionToken, node.name + '(');
this.children(node);
this.token(RightParenthesis, ')');
}
}

59
node_modules/css-tree/lib/syntax/node/Ratio.js generated vendored Normal file
View File

@ -0,0 +1,59 @@
import { isDigit, Delim, Number as NumberToken } from '../../tokenizer/index.js';
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
// Terms of <ratio> should be a positive numbers (not zero or negative)
// (see https://drafts.csswg.org/mediaqueries-3/#values)
// However, -o-min-device-pixel-ratio takes fractional values as a ratio's term
// and this is using by various sites. Therefore we relax checking on parse
// to test a term is unsigned number without an exponent part.
// Additional checking may be applied on lexer validation.
function consumeNumber() {
this.skipSC();
const value = this.consume(NumberToken);
for (let i = 0; i < value.length; i++) {
const code = value.charCodeAt(i);
if (!isDigit(code) && code !== FULLSTOP) {
this.error('Unsigned number is expected', this.tokenStart - value.length + i);
}
}
if (Number(value) === 0) {
this.error('Zero number is not allowed', this.tokenStart - value.length);
}
return value;
}
export const name = 'Ratio';
export const structure = {
left: String,
right: String
};
// <positive-integer> S* '/' S* <positive-integer>
export function parse() {
const start = this.tokenStart;
const left = consumeNumber.call(this);
let right;
this.skipSC();
this.eatDelim(SOLIDUS);
right = consumeNumber.call(this);
return {
type: 'Ratio',
loc: this.getLocation(start, this.tokenStart),
left,
right
};
}
export function generate(node) {
this.token(NumberToken, node.left);
this.token(Delim, '/');
this.token(NumberToken, node.right);
}

41
node_modules/css-tree/lib/syntax/node/Raw.js generated vendored Normal file
View File

@ -0,0 +1,41 @@
import { WhiteSpace } from '../../tokenizer/index.js';
function getOffsetExcludeWS() {
if (this.tokenIndex > 0) {
if (this.lookupType(-1) === WhiteSpace) {
return this.tokenIndex > 1
? this.getTokenStart(this.tokenIndex - 1)
: this.firstCharOffset;
}
}
return this.tokenStart;
}
export const name = 'Raw';
export const structure = {
value: String
};
export function parse(startToken, consumeUntil, excludeWhiteSpace) {
const startOffset = this.getTokenStart(startToken);
let endOffset;
this.skipUntilBalanced(startToken, consumeUntil || this.consumeUntilBalanceEnd);
if (excludeWhiteSpace && this.tokenStart > startOffset) {
endOffset = getOffsetExcludeWS.call(this);
} else {
endOffset = this.tokenStart;
}
return {
type: 'Raw',
loc: this.getLocation(startOffset, endOffset),
value: this.substring(startOffset, endOffset)
};
}
export function generate(node) {
this.tokenize(node.value);
}

51
node_modules/css-tree/lib/syntax/node/Rule.js generated vendored Normal file
View File

@ -0,0 +1,51 @@
import { LeftCurlyBracket } from '../../tokenizer/index.js';
function consumeRaw(startToken) {
return this.Raw(startToken, this.consumeUntilLeftCurlyBracket, true);
}
function consumePrelude() {
const prelude = this.SelectorList();
if (prelude.type !== 'Raw' &&
this.eof === false &&
this.tokenType !== LeftCurlyBracket) {
this.error();
}
return prelude;
}
export const name = 'Rule';
export const walkContext = 'rule';
export const structure = {
prelude: ['SelectorList', 'Raw'],
block: ['Block']
};
export function parse() {
const startToken = this.tokenIndex;
const startOffset = this.tokenStart;
let prelude;
let block;
if (this.parseRulePrelude) {
prelude = this.parseWithFallback(consumePrelude, consumeRaw);
} else {
prelude = consumeRaw.call(this, startToken);
}
block = this.Block(true);
return {
type: 'Rule',
loc: this.getLocation(startOffset, this.tokenStart),
prelude,
block
};
}
export function generate(node) {
this.node(node.prelude);
this.node(node.block);
}

32
node_modules/css-tree/lib/syntax/node/Selector.js generated vendored Normal file
View File

@ -0,0 +1,32 @@
export const name = 'Selector';
export const structure = {
children: [[
'TypeSelector',
'IdSelector',
'ClassSelector',
'AttributeSelector',
'PseudoClassSelector',
'PseudoElementSelector',
'Combinator',
'WhiteSpace'
]]
};
export function parse() {
const children = this.readSequence(this.scope.Selector);
// nothing were consumed
if (this.getFirstListNode(children) === null) {
this.error('Selector is expected');
}
return {
type: 'Selector',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node);
}

35
node_modules/css-tree/lib/syntax/node/SelectorList.js generated vendored Normal file
View File

@ -0,0 +1,35 @@
import { Comma } from '../../tokenizer/index.js';
export const name = 'SelectorList';
export const walkContext = 'selector';
export const structure = {
children: [[
'Selector',
'Raw'
]]
};
export function parse() {
const children = this.createList();
while (!this.eof) {
children.push(this.Selector());
if (this.tokenType === Comma) {
this.next();
continue;
}
break;
}
return {
type: 'SelectorList',
loc: this.getLocationFromList(children),
children
};
}
export function generate(node) {
this.children(node, () => this.token(Comma, ','));
}

19
node_modules/css-tree/lib/syntax/node/String.js generated vendored Normal file
View File

@ -0,0 +1,19 @@
import { String as StringToken } from '../../tokenizer/index.js';
import { decode, encode } from '../../utils/string.js';
export const name = 'String';
export const structure = {
value: String
};
export function parse() {
return {
type: 'String',
loc: this.getLocation(this.tokenStart, this.tokenEnd),
value: decode(this.consume(StringToken))
};
}
export function generate(node) {
this.token(StringToken, encode(node.value));
}

82
node_modules/css-tree/lib/syntax/node/StyleSheet.js generated vendored Normal file
View File

@ -0,0 +1,82 @@
import {
WhiteSpace,
Comment,
AtKeyword,
CDO,
CDC
} from '../../tokenizer/index.js';
const EXCLAMATIONMARK = 0x0021; // U+0021 EXCLAMATION MARK (!)
function consumeRaw(startToken) {
return this.Raw(startToken, null, false);
}
export const name = 'StyleSheet';
export const walkContext = 'stylesheet';
export const structure = {
children: [[
'Comment',
'CDO',
'CDC',
'Atrule',
'Rule',
'Raw'
]]
};
export function parse() {
const start = this.tokenStart;
const children = this.createList();
let child;
scan:
while (!this.eof) {
switch (this.tokenType) {
case WhiteSpace:
this.next();
continue;
case Comment:
// ignore comments except exclamation comments (i.e. /*! .. */) on top level
if (this.charCodeAt(this.tokenStart + 2) !== EXCLAMATIONMARK) {
this.next();
continue;
}
child = this.Comment();
break;
case CDO: // <!--
child = this.CDO();
break;
case CDC: // -->
child = this.CDC();
break;
// CSS Syntax Module Level 3
// §2.2 Error handling
// At the "top level" of a stylesheet, an <at-keyword-token> starts an at-rule.
case AtKeyword:
child = this.parseWithFallback(this.Atrule, consumeRaw);
break;
// Anything else starts a qualified rule ...
default:
child = this.parseWithFallback(this.Rule, consumeRaw);
}
children.push(child);
}
return {
type: 'StyleSheet',
loc: this.getLocation(start, this.tokenStart),
children
};
}
export function generate(node) {
this.children(node);
}

52
node_modules/css-tree/lib/syntax/node/TypeSelector.js generated vendored Normal file
View File

@ -0,0 +1,52 @@
import { Ident } from '../../tokenizer/index.js';
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
function eatIdentifierOrAsterisk() {
if (this.tokenType !== Ident &&
this.isDelim(ASTERISK) === false) {
this.error('Identifier or asterisk is expected');
}
this.next();
}
export const name = 'TypeSelector';
export const structure = {
name: String
};
// ident
// ident|ident
// ident|*
// *
// *|ident
// *|*
// |ident
// |*
export function parse() {
const start = this.tokenStart;
if (this.isDelim(VERTICALLINE)) {
this.next();
eatIdentifierOrAsterisk.call(this);
} else {
eatIdentifierOrAsterisk.call(this);
if (this.isDelim(VERTICALLINE)) {
this.next();
eatIdentifierOrAsterisk.call(this);
}
}
return {
type: 'TypeSelector',
loc: this.getLocation(start, this.tokenStart),
name: this.substrToCursor(start)
};
}
export function generate(node) {
this.tokenize(node.name);
}

156
node_modules/css-tree/lib/syntax/node/UnicodeRange.js generated vendored Normal file
View File

@ -0,0 +1,156 @@
import {
isHexDigit,
Ident,
Number,
Dimension
} from '../../tokenizer/index.js';
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const QUESTIONMARK = 0x003F; // U+003F QUESTION MARK (?)
function eatHexSequence(offset, allowDash) {
let len = 0;
for (let pos = this.tokenStart + offset; pos < this.tokenEnd; pos++) {
const code = this.charCodeAt(pos);
if (code === HYPHENMINUS && allowDash && len !== 0) {
eatHexSequence.call(this, offset + len + 1, false);
return -1;
}
if (!isHexDigit(code)) {
this.error(
allowDash && len !== 0
? 'Hyphen minus' + (len < 6 ? ' or hex digit' : '') + ' is expected'
: (len < 6 ? 'Hex digit is expected' : 'Unexpected input'),
pos
);
}
if (++len > 6) {
this.error('Too many hex digits', pos);
};
}
this.next();
return len;
}
function eatQuestionMarkSequence(max) {
let count = 0;
while (this.isDelim(QUESTIONMARK)) {
if (++count > max) {
this.error('Too many question marks');
}
this.next();
}
}
function startsWith(code) {
if (this.charCodeAt(this.tokenStart) !== code) {
this.error((code === PLUSSIGN ? 'Plus sign' : 'Hyphen minus') + ' is expected');
}
}
// https://drafts.csswg.org/css-syntax/#urange
// Informally, the <urange> production has three forms:
// U+0001
// Defines a range consisting of a single code point, in this case the code point "1".
// U+0001-00ff
// Defines a range of codepoints between the first and the second value, in this case
// the range between "1" and "ff" (255 in decimal) inclusive.
// U+00??
// Defines a range of codepoints where the "?" characters range over all hex digits,
// in this case defining the same as the value U+0000-00ff.
// In each form, a maximum of 6 digits is allowed for each hexadecimal number (if you treat "?" as a hexadecimal digit).
//
// <urange> =
// u '+' <ident-token> '?'* |
// u <dimension-token> '?'* |
// u <number-token> '?'* |
// u <number-token> <dimension-token> |
// u <number-token> <number-token> |
// u '+' '?'+
function scanUnicodeRange() {
let hexLength = 0;
switch (this.tokenType) {
case Number:
// u <number-token> '?'*
// u <number-token> <dimension-token>
// u <number-token> <number-token>
hexLength = eatHexSequence.call(this, 1, true);
if (this.isDelim(QUESTIONMARK)) {
eatQuestionMarkSequence.call(this, 6 - hexLength);
break;
}
if (this.tokenType === Dimension ||
this.tokenType === Number) {
startsWith.call(this, HYPHENMINUS);
eatHexSequence.call(this, 1, false);
break;
}
break;
case Dimension:
// u <dimension-token> '?'*
hexLength = eatHexSequence.call(this, 1, true);
if (hexLength > 0) {
eatQuestionMarkSequence.call(this, 6 - hexLength);
}
break;
default:
// u '+' <ident-token> '?'*
// u '+' '?'+
this.eatDelim(PLUSSIGN);
if (this.tokenType === Ident) {
hexLength = eatHexSequence.call(this, 0, true);
if (hexLength > 0) {
eatQuestionMarkSequence.call(this, 6 - hexLength);
}
break;
}
if (this.isDelim(QUESTIONMARK)) {
this.next();
eatQuestionMarkSequence.call(this, 5);
break;
}
this.error('Hex digit or question mark is expected');
}
}
export const name = 'UnicodeRange';
export const structure = {
value: String
};
export function parse() {
const start = this.tokenStart;
// U or u
this.eatIdent('u');
scanUnicodeRange.call(this);
return {
type: 'UnicodeRange',
loc: this.getLocation(start, this.tokenStart),
value: this.substrToCursor(start)
};
}
export function generate(node) {
this.tokenize(node.value);
}

52
node_modules/css-tree/lib/syntax/node/Url.js generated vendored Normal file
View File

@ -0,0 +1,52 @@
import * as url from '../../utils/url.js';
import * as string from '../../utils/string.js';
import {
Function as FunctionToken,
String as StringToken,
Url,
RightParenthesis
} from '../../tokenizer/index.js';
export const name = 'Url';
export const structure = {
value: String
};
// <url-token> | <function-token> <string> )
export function parse() {
const start = this.tokenStart;
let value;
switch (this.tokenType) {
case Url:
value = url.decode(this.consume(Url));
break;
case FunctionToken:
if (!this.cmpStr(this.tokenStart, this.tokenEnd, 'url(')) {
this.error('Function name must be `url`');
}
this.eat(FunctionToken);
this.skipSC();
value = string.decode(this.consume(StringToken));
this.skipSC();
if (!this.eof) {
this.eat(RightParenthesis);
}
break;
default:
this.error('Url or Function is expected');
}
return {
type: 'Url',
loc: this.getLocation(start, this.tokenStart),
value
};
}
export function generate(node) {
this.token(Url, url.encode(node.value));
}

19
node_modules/css-tree/lib/syntax/node/Value.js generated vendored Normal file
View File

@ -0,0 +1,19 @@
export const name = 'Value';
export const structure = {
children: [[]]
};
export function parse() {
const start = this.tokenStart;
const children = this.readSequence(this.scope.Value);
return {
type: 'Value',
loc: this.getLocation(start, this.tokenStart),
children
};
}
export function generate(node) {
this.children(node);
}

27
node_modules/css-tree/lib/syntax/node/WhiteSpace.js generated vendored Normal file
View File

@ -0,0 +1,27 @@
import { WhiteSpace } from '../../tokenizer/index.js';
const SPACE = Object.freeze({
type: 'WhiteSpace',
loc: null,
value: ' '
});
export const name = 'WhiteSpace';
export const structure = {
value: String
};
export function parse() {
this.eat(WhiteSpace);
return SPACE;
// return {
// type: 'WhiteSpace',
// loc: this.getLocation(this.tokenStart, this.tokenEnd),
// value: this.consume(WHITESPACE)
// };
}
export function generate(node) {
this.token(WhiteSpace, node.value);
}

View File

@ -0,0 +1,40 @@
export { generate as AnPlusB } from './AnPlusB.js';
export { generate as Atrule } from './Atrule.js';
export { generate as AtrulePrelude } from './AtrulePrelude.js';
export { generate as AttributeSelector } from './AttributeSelector.js';
export { generate as Block } from './Block.js';
export { generate as Brackets } from './Brackets.js';
export { generate as CDC } from './CDC.js';
export { generate as CDO } from './CDO.js';
export { generate as ClassSelector } from './ClassSelector.js';
export { generate as Combinator } from './Combinator.js';
export { generate as Comment } from './Comment.js';
export { generate as Declaration } from './Declaration.js';
export { generate as DeclarationList } from './DeclarationList.js';
export { generate as Dimension } from './Dimension.js';
export { generate as Function } from './Function.js';
export { generate as Hash } from './Hash.js';
export { generate as Identifier } from './Identifier.js';
export { generate as IdSelector } from './IdSelector.js';
export { generate as MediaFeature } from './MediaFeature.js';
export { generate as MediaQuery } from './MediaQuery.js';
export { generate as MediaQueryList } from './MediaQueryList.js';
export { generate as Nth } from './Nth.js';
export { generate as Number } from './Number.js';
export { generate as Operator } from './Operator.js';
export { generate as Parentheses } from './Parentheses.js';
export { generate as Percentage } from './Percentage.js';
export { generate as PseudoClassSelector } from './PseudoClassSelector.js';
export { generate as PseudoElementSelector } from './PseudoElementSelector.js';
export { generate as Ratio } from './Ratio.js';
export { generate as Raw } from './Raw.js';
export { generate as Rule } from './Rule.js';
export { generate as Selector } from './Selector.js';
export { generate as SelectorList } from './SelectorList.js';
export { generate as String } from './String.js';
export { generate as StyleSheet } from './StyleSheet.js';
export { generate as TypeSelector } from './TypeSelector.js';
export { generate as UnicodeRange } from './UnicodeRange.js';
export { generate as Url } from './Url.js';
export { generate as Value } from './Value.js';
export { generate as WhiteSpace } from './WhiteSpace.js';

View File

@ -0,0 +1,15 @@
export { parse as AnPlusB } from './AnPlusB.js';
export { parse as AttributeSelector } from './AttributeSelector.js';
export { parse as ClassSelector } from './ClassSelector.js';
export { parse as Combinator } from './Combinator.js';
export { parse as Identifier } from './Identifier.js';
export { parse as IdSelector } from './IdSelector.js';
export { parse as Nth } from './Nth.js';
export { parse as Percentage } from './Percentage.js';
export { parse as PseudoClassSelector } from './PseudoClassSelector.js';
export { parse as PseudoElementSelector } from './PseudoElementSelector.js';
export { parse as Raw } from './Raw.js';
export { parse as Selector } from './Selector.js';
export { parse as SelectorList } from './SelectorList.js';
export { parse as String } from './String.js';
export { parse as TypeSelector } from './TypeSelector.js';

40
node_modules/css-tree/lib/syntax/node/index-parse.js generated vendored Normal file
View File

@ -0,0 +1,40 @@
export { parse as AnPlusB } from './AnPlusB.js';
export { parse as Atrule } from './Atrule.js';
export { parse as AtrulePrelude } from './AtrulePrelude.js';
export { parse as AttributeSelector } from './AttributeSelector.js';
export { parse as Block } from './Block.js';
export { parse as Brackets } from './Brackets.js';
export { parse as CDC } from './CDC.js';
export { parse as CDO } from './CDO.js';
export { parse as ClassSelector } from './ClassSelector.js';
export { parse as Combinator } from './Combinator.js';
export { parse as Comment } from './Comment.js';
export { parse as Declaration } from './Declaration.js';
export { parse as DeclarationList } from './DeclarationList.js';
export { parse as Dimension } from './Dimension.js';
export { parse as Function } from './Function.js';
export { parse as Hash } from './Hash.js';
export { parse as Identifier } from './Identifier.js';
export { parse as IdSelector } from './IdSelector.js';
export { parse as MediaFeature } from './MediaFeature.js';
export { parse as MediaQuery } from './MediaQuery.js';
export { parse as MediaQueryList } from './MediaQueryList.js';
export { parse as Nth } from './Nth.js';
export { parse as Number } from './Number.js';
export { parse as Operator } from './Operator.js';
export { parse as Parentheses } from './Parentheses.js';
export { parse as Percentage } from './Percentage.js';
export { parse as PseudoClassSelector } from './PseudoClassSelector.js';
export { parse as PseudoElementSelector } from './PseudoElementSelector.js';
export { parse as Ratio } from './Ratio.js';
export { parse as Raw } from './Raw.js';
export { parse as Rule } from './Rule.js';
export { parse as Selector } from './Selector.js';
export { parse as SelectorList } from './SelectorList.js';
export { parse as String } from './String.js';
export { parse as StyleSheet } from './StyleSheet.js';
export { parse as TypeSelector } from './TypeSelector.js';
export { parse as UnicodeRange } from './UnicodeRange.js';
export { parse as Url } from './Url.js';
export { parse as Value } from './Value.js';
export { parse as WhiteSpace } from './WhiteSpace.js';

40
node_modules/css-tree/lib/syntax/node/index.js generated vendored Normal file
View File

@ -0,0 +1,40 @@
export * as AnPlusB from './AnPlusB.js';
export * as Atrule from './Atrule.js';
export * as AtrulePrelude from './AtrulePrelude.js';
export * as AttributeSelector from './AttributeSelector.js';
export * as Block from './Block.js';
export * as Brackets from './Brackets.js';
export * as CDC from './CDC.js';
export * as CDO from './CDO.js';
export * as ClassSelector from './ClassSelector.js';
export * as Combinator from './Combinator.js';
export * as Comment from './Comment.js';
export * as Declaration from './Declaration.js';
export * as DeclarationList from './DeclarationList.js';
export * as Dimension from './Dimension.js';
export * as Function from './Function.js';
export * as Hash from './Hash.js';
export * as Identifier from './Identifier.js';
export * as IdSelector from './IdSelector.js';
export * as MediaFeature from './MediaFeature.js';
export * as MediaQuery from './MediaQuery.js';
export * as MediaQueryList from './MediaQueryList.js';
export * as Nth from './Nth.js';
export * as Number from './Number.js';
export * as Operator from './Operator.js';
export * as Parentheses from './Parentheses.js';
export * as Percentage from './Percentage.js';
export * as PseudoClassSelector from './PseudoClassSelector.js';
export * as PseudoElementSelector from './PseudoElementSelector.js';
export * as Ratio from './Ratio.js';
export * as Raw from './Raw.js';
export * as Rule from './Rule.js';
export * as Selector from './Selector.js';
export * as SelectorList from './SelectorList.js';
export * as String from './String.js';
export * as StyleSheet from './StyleSheet.js';
export * as TypeSelector from './TypeSelector.js';
export * as UnicodeRange from './UnicodeRange.js';
export * as Url from './Url.js';
export * as Value from './Value.js';
export * as WhiteSpace from './WhiteSpace.js';

48
node_modules/css-tree/lib/syntax/pseudo/index.js generated vendored Normal file
View File

@ -0,0 +1,48 @@
const selectorList = {
parse() {
return this.createSingleNodeList(
this.SelectorList()
);
}
};
const selector = {
parse() {
return this.createSingleNodeList(
this.Selector()
);
}
};
const identList = {
parse() {
return this.createSingleNodeList(
this.Identifier()
);
}
};
const nth = {
parse() {
return this.createSingleNodeList(
this.Nth()
);
}
};
export default {
'dir': identList,
'has': selectorList,
'lang': identList,
'matches': selectorList,
'is': selectorList,
'-moz-any': selectorList,
'-webkit-any': selectorList,
'where': selectorList,
'not': selectorList,
'nth-child': nth,
'nth-last-child': nth,
'nth-last-of-type': nth,
'nth-of-type': nth,
'slotted': selector
};

View File

@ -0,0 +1,5 @@
import getNode from './default.js';
export default {
getNode
};

85
node_modules/css-tree/lib/syntax/scope/default.js generated vendored Normal file
View File

@ -0,0 +1,85 @@
import {
Ident,
String as StringToken,
Number as NumberToken,
Function as FunctionToken,
Url,
Hash,
Dimension,
Percentage,
LeftParenthesis,
LeftSquareBracket,
Comma,
Delim
} from '../../tokenizer/index.js';
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const HYPHENMINUS = 0x002D; // U+002D HYPHEN-MINUS (-)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const U = 0x0075; // U+0075 LATIN SMALL LETTER U (u)
export default function defaultRecognizer(context) {
switch (this.tokenType) {
case Hash:
return this.Hash();
case Comma:
return this.Operator();
case LeftParenthesis:
return this.Parentheses(this.readSequence, context.recognizer);
case LeftSquareBracket:
return this.Brackets(this.readSequence, context.recognizer);
case StringToken:
return this.String();
case Dimension:
return this.Dimension();
case Percentage:
return this.Percentage();
case NumberToken:
return this.Number();
case FunctionToken:
return this.cmpStr(this.tokenStart, this.tokenEnd, 'url(')
? this.Url()
: this.Function(this.readSequence, context.recognizer);
case Url:
return this.Url();
case Ident:
// check for unicode range, it should start with u+ or U+
if (this.cmpChar(this.tokenStart, U) &&
this.cmpChar(this.tokenStart + 1, PLUSSIGN)) {
return this.UnicodeRange();
} else {
return this.Identifier();
}
case Delim: {
const code = this.charCodeAt(this.tokenStart);
if (code === SOLIDUS ||
code === ASTERISK ||
code === PLUSSIGN ||
code === HYPHENMINUS) {
return this.Operator(); // TODO: replace with Delim
}
// TODO: produce a node with Delim node type
if (code === NUMBERSIGN) {
this.error('Hex or identifier is expected', this.tokenStart + 1);
}
break;
}
}
};

3
node_modules/css-tree/lib/syntax/scope/index.js generated vendored Normal file
View File

@ -0,0 +1,3 @@
export { default as AtrulePrelude } from './atrulePrelude.js';
export { default as Selector } from './selector.js';
export { default as Value } from './value.js';

90
node_modules/css-tree/lib/syntax/scope/selector.js generated vendored Normal file
View File

@ -0,0 +1,90 @@
import {
Delim,
Ident,
Dimension,
Percentage,
Number as NumberToken,
Hash,
Colon,
LeftSquareBracket
} from '../../tokenizer/index.js';
const NUMBERSIGN = 0x0023; // U+0023 NUMBER SIGN (#)
const ASTERISK = 0x002A; // U+002A ASTERISK (*)
const PLUSSIGN = 0x002B; // U+002B PLUS SIGN (+)
const SOLIDUS = 0x002F; // U+002F SOLIDUS (/)
const FULLSTOP = 0x002E; // U+002E FULL STOP (.)
const GREATERTHANSIGN = 0x003E; // U+003E GREATER-THAN SIGN (>)
const VERTICALLINE = 0x007C; // U+007C VERTICAL LINE (|)
const TILDE = 0x007E; // U+007E TILDE (~)
function onWhiteSpace(next, children) {
if (children.last !== null && children.last.type !== 'Combinator' &&
next !== null && next.type !== 'Combinator') {
children.push({ // FIXME: this.Combinator() should be used instead
type: 'Combinator',
loc: null,
name: ' '
});
}
}
function getNode() {
switch (this.tokenType) {
case LeftSquareBracket:
return this.AttributeSelector();
case Hash:
return this.IdSelector();
case Colon:
if (this.lookupType(1) === Colon) {
return this.PseudoElementSelector();
} else {
return this.PseudoClassSelector();
}
case Ident:
return this.TypeSelector();
case NumberToken:
case Percentage:
return this.Percentage();
case Dimension:
// throws when .123ident
if (this.charCodeAt(this.tokenStart) === FULLSTOP) {
this.error('Identifier is expected', this.tokenStart + 1);
}
break;
case Delim: {
const code = this.charCodeAt(this.tokenStart);
switch (code) {
case PLUSSIGN:
case GREATERTHANSIGN:
case TILDE:
case SOLIDUS: // /deep/
return this.Combinator();
case FULLSTOP:
return this.ClassSelector();
case ASTERISK:
case VERTICALLINE:
return this.TypeSelector();
case NUMBERSIGN:
return this.IdSelector();
}
break;
}
}
};
export default {
onWhiteSpace,
getNode
};

25
node_modules/css-tree/lib/syntax/scope/value.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
import getNode from './default.js';
import expressionFn from '../function/expression.js';
import varFn from '../function/var.js';
function isPlusMinusOperator(node) {
return (
node !== null &&
node.type === 'Operator' &&
(node.value[node.value.length - 1] === '-' || node.value[node.value.length - 1] === '+')
);
}
export default {
getNode,
onWhiteSpace(next, children) {
if (isPlusMinusOperator(next)) {
next.value = ' ' + next.value;
}
if (isPlusMinusOperator(children.last)) {
children.last.value += ' ';
}
},
'expression': expressionFn,
'var': varFn
};

View File

@ -0,0 +1,87 @@
import { adoptBuffer } from './adopt-buffer.js';
import { isBOM } from './char-code-definitions.js';
const N = 10;
const F = 12;
const R = 13;
function computeLinesAndColumns(host) {
const source = host.source;
const sourceLength = source.length;
const startOffset = source.length > 0 ? isBOM(source.charCodeAt(0)) : 0;
const lines = adoptBuffer(host.lines, sourceLength);
const columns = adoptBuffer(host.columns, sourceLength);
let line = host.startLine;
let column = host.startColumn;
for (let i = startOffset; i < sourceLength; i++) {
const code = source.charCodeAt(i);
lines[i] = line;
columns[i] = column++;
if (code === N || code === R || code === F) {
if (code === R && i + 1 < sourceLength && source.charCodeAt(i + 1) === N) {
i++;
lines[i] = line;
columns[i] = column;
}
line++;
column = 1;
}
}
lines[sourceLength] = line;
columns[sourceLength] = column;
host.lines = lines;
host.columns = columns;
host.computed = true;
}
export class OffsetToLocation {
constructor() {
this.lines = null;
this.columns = null;
this.computed = false;
}
setSource(source, startOffset = 0, startLine = 1, startColumn = 1) {
this.source = source;
this.startOffset = startOffset;
this.startLine = startLine;
this.startColumn = startColumn;
this.computed = false;
}
getLocation(offset, filename) {
if (!this.computed) {
computeLinesAndColumns(this);
}
return {
source: filename,
offset: this.startOffset + offset,
line: this.lines[offset],
column: this.columns[offset]
};
}
getLocationRange(start, end, filename) {
if (!this.computed) {
computeLinesAndColumns(this);
}
return {
source: filename,
start: {
offset: this.startOffset + start,
line: this.lines[start],
column: this.columns[start]
},
end: {
offset: this.startOffset + end,
line: this.lines[end],
column: this.columns[end]
}
};
}
};

Some files were not shown because too many files have changed in this diff Show More