🔧 npm update

This commit is contained in:
2025-04-15 20:50:11 +02:00
parent ce5b9ac0c8
commit 94a90edabd
828 changed files with 256807 additions and 197099 deletions

View File

@@ -1,11 +1,3 @@
import { fileURLToPath as __cjs_fileURLToPath } from 'node:url';
import { dirname as __cjs_dirname } from 'node:path';
import { createRequire as __cjs_createRequire } from 'node:module';
const __filename = __cjs_fileURLToPath(import.meta.url);
const __dirname = __cjs_dirname(__filename);
const require = __cjs_createRequire(import.meta.url);
const __require = require;
var openParentheses = "(".charCodeAt(0);
var closeParentheses = ")".charCodeAt(0);
var singleQuote = "'".charCodeAt(0);

File diff suppressed because one or more lines are too long

822
node_modules/vite/dist/node/chunks/dep-BXMtZB7a.js generated vendored Normal file
View File

@@ -0,0 +1,822 @@
import { P as getDefaultExportFromCjs } from './dep-Bid9ssRr.js';
import require$$0 from 'path';
import { l as lib } from './dep-3RmXg9uo.js';
import { createRequire as __cjs_createRequire } from 'node:module';
const __require = __cjs_createRequire(import.meta.url);
function _mergeNamespaces(n, m) {
for (var i = 0; i < m.length; i++) {
var e = m[i];
if (typeof e !== 'string' && !Array.isArray(e)) { for (var k in e) {
if (k !== 'default' && !(k in n)) {
n[k] = e[k];
}
} }
}
return n;
}
var formatImportPrelude$2 = function formatImportPrelude(layer, media, supports) {
const parts = [];
if (typeof layer !== "undefined") {
let layerParams = "layer";
if (layer) {
layerParams = `layer(${layer})`;
}
parts.push(layerParams);
}
if (typeof supports !== "undefined") {
parts.push(`supports(${supports})`);
}
if (typeof media !== "undefined") {
parts.push(media);
}
return parts.join(" ")
};
const formatImportPrelude$1 = formatImportPrelude$2;
// Base64 encode an import with conditions
// The order of conditions is important and is interleaved with cascade layer declarations
// Each group of conditions and cascade layers needs to be interpreted in order
// To achieve this we create a list of base64 encoded imports, where each import contains a stylesheet with another import.
// Each import can define a single group of conditions and a single cascade layer.
var base64EncodedImport = function base64EncodedConditionalImport(prelude, conditions) {
conditions.reverse();
const first = conditions.pop();
let params = `${prelude} ${formatImportPrelude$1(
first.layer,
first.media,
first.supports,
)}`;
for (const condition of conditions) {
params = `'data:text/css;base64,${Buffer.from(`@import ${params}`).toString(
"base64",
)}' ${formatImportPrelude$1(
condition.layer,
condition.media,
condition.supports,
)}`;
}
return params
};
const base64EncodedConditionalImport = base64EncodedImport;
var applyConditions$1 = function applyConditions(bundle, atRule) {
bundle.forEach(stmt => {
if (
stmt.type === "charset" ||
stmt.type === "warning" ||
!stmt.conditions?.length
) {
return
}
if (stmt.type === "import") {
stmt.node.params = base64EncodedConditionalImport(
stmt.fullUri,
stmt.conditions,
);
return
}
const { nodes } = stmt;
const { parent } = nodes[0];
const atRules = [];
// Convert conditions to at-rules
for (const condition of stmt.conditions) {
if (typeof condition.media !== "undefined") {
const mediaNode = atRule({
name: "media",
params: condition.media,
source: parent.source,
});
atRules.push(mediaNode);
}
if (typeof condition.supports !== "undefined") {
const supportsNode = atRule({
name: "supports",
params: `(${condition.supports})`,
source: parent.source,
});
atRules.push(supportsNode);
}
if (typeof condition.layer !== "undefined") {
const layerNode = atRule({
name: "layer",
params: condition.layer,
source: parent.source,
});
atRules.push(layerNode);
}
}
// Add nodes to AST
const outerAtRule = atRules.shift();
const innerAtRule = atRules.reduce((previous, next) => {
previous.append(next);
return next
}, outerAtRule);
parent.insertBefore(nodes[0], outerAtRule);
// remove nodes
nodes.forEach(node => {
node.parent = undefined;
});
// better output
nodes[0].raws.before = nodes[0].raws.before || "\n";
// wrap new rules with media query and/or layer at rule
innerAtRule.append(nodes);
stmt.type = "nodes";
stmt.nodes = [outerAtRule];
delete stmt.node;
});
};
var applyRaws$1 = function applyRaws(bundle) {
bundle.forEach((stmt, index) => {
if (index === 0) return
if (stmt.parent) {
const { before } = stmt.parent.node.raws;
if (stmt.type === "nodes") stmt.nodes[0].raws.before = before;
else stmt.node.raws.before = before;
} else if (stmt.type === "nodes") {
stmt.nodes[0].raws.before = stmt.nodes[0].raws.before || "\n";
}
});
};
var applyStyles$1 = function applyStyles(bundle, styles) {
styles.nodes = [];
// Strip additional statements.
bundle.forEach(stmt => {
if (["charset", "import"].includes(stmt.type)) {
stmt.node.parent = undefined;
styles.append(stmt.node);
} else if (stmt.type === "nodes") {
stmt.nodes.forEach(node => {
node.parent = undefined;
styles.append(node);
});
}
});
};
const anyDataURLRegexp = /^data:text\/css(?:;(base64|plain))?,/i;
const base64DataURLRegexp = /^data:text\/css;base64,/i;
const plainDataURLRegexp = /^data:text\/css;plain,/i;
function isValid(url) {
return anyDataURLRegexp.test(url)
}
function contents(url) {
if (base64DataURLRegexp.test(url)) {
// "data:text/css;base64,".length === 21
return Buffer.from(url.slice(21), "base64").toString()
}
if (plainDataURLRegexp.test(url)) {
// "data:text/css;plain,".length === 20
return decodeURIComponent(url.slice(20))
}
// "data:text/css,".length === 14
return decodeURIComponent(url.slice(14))
}
var dataUrl = {
isValid,
contents,
};
// external tooling
const valueParser = lib;
// extended tooling
const { stringify } = valueParser;
var parseStatements$1 = function parseStatements(result, styles, conditions, from) {
const statements = [];
let nodes = [];
styles.each(node => {
let stmt;
if (node.type === "atrule") {
if (node.name === "import")
stmt = parseImport(result, node, conditions, from);
else if (node.name === "charset")
stmt = parseCharset(result, node, conditions, from);
}
if (stmt) {
if (nodes.length) {
statements.push({
type: "nodes",
nodes,
conditions: [...conditions],
from,
});
nodes = [];
}
statements.push(stmt);
} else nodes.push(node);
});
if (nodes.length) {
statements.push({
type: "nodes",
nodes,
conditions: [...conditions],
from,
});
}
return statements
};
function parseCharset(result, atRule, conditions, from) {
if (atRule.prev()) {
return result.warn("@charset must precede all other statements", {
node: atRule,
})
}
return {
type: "charset",
node: atRule,
conditions: [...conditions],
from,
}
}
function parseImport(result, atRule, conditions, from) {
let prev = atRule.prev();
// `@import` statements may follow other `@import` statements.
if (prev) {
do {
if (
prev.type === "comment" ||
(prev.type === "atrule" && prev.name === "import")
) {
prev = prev.prev();
continue
}
break
} while (prev)
}
// All `@import` statements may be preceded by `@charset` or `@layer` statements.
// But the `@import` statements must be consecutive.
if (prev) {
do {
if (
prev.type === "comment" ||
(prev.type === "atrule" &&
(prev.name === "charset" || (prev.name === "layer" && !prev.nodes)))
) {
prev = prev.prev();
continue
}
return result.warn(
"@import must precede all other statements (besides @charset or empty @layer)",
{ node: atRule },
)
} while (prev)
}
if (atRule.nodes) {
return result.warn(
"It looks like you didn't end your @import statement correctly. " +
"Child nodes are attached to it.",
{ node: atRule },
)
}
const params = valueParser(atRule.params).nodes;
const stmt = {
type: "import",
uri: "",
fullUri: "",
node: atRule,
conditions: [...conditions],
from,
};
let layer;
let media;
let supports;
for (let i = 0; i < params.length; i++) {
const node = params[i];
if (node.type === "space" || node.type === "comment") continue
if (node.type === "string") {
if (stmt.uri) {
return result.warn(`Multiple url's in '${atRule.toString()}'`, {
node: atRule,
})
}
if (!node.value) {
return result.warn(`Unable to find uri in '${atRule.toString()}'`, {
node: atRule,
})
}
stmt.uri = node.value;
stmt.fullUri = stringify(node);
continue
}
if (node.type === "function" && /^url$/i.test(node.value)) {
if (stmt.uri) {
return result.warn(`Multiple url's in '${atRule.toString()}'`, {
node: atRule,
})
}
if (!node.nodes?.[0]?.value) {
return result.warn(`Unable to find uri in '${atRule.toString()}'`, {
node: atRule,
})
}
stmt.uri = node.nodes[0].value;
stmt.fullUri = stringify(node);
continue
}
if (!stmt.uri) {
return result.warn(`Unable to find uri in '${atRule.toString()}'`, {
node: atRule,
})
}
if (
(node.type === "word" || node.type === "function") &&
/^layer$/i.test(node.value)
) {
if (typeof layer !== "undefined") {
return result.warn(`Multiple layers in '${atRule.toString()}'`, {
node: atRule,
})
}
if (typeof supports !== "undefined") {
return result.warn(
`layers must be defined before support conditions in '${atRule.toString()}'`,
{
node: atRule,
},
)
}
if (node.nodes) {
layer = stringify(node.nodes);
} else {
layer = "";
}
continue
}
if (node.type === "function" && /^supports$/i.test(node.value)) {
if (typeof supports !== "undefined") {
return result.warn(
`Multiple support conditions in '${atRule.toString()}'`,
{
node: atRule,
},
)
}
supports = stringify(node.nodes);
continue
}
media = stringify(params.slice(i));
break
}
if (!stmt.uri) {
return result.warn(`Unable to find uri in '${atRule.toString()}'`, {
node: atRule,
})
}
if (
typeof media !== "undefined" ||
typeof layer !== "undefined" ||
typeof supports !== "undefined"
) {
stmt.conditions.push({
layer,
media,
supports,
});
}
return stmt
}
// builtin tooling
const path$2 = require$$0;
// placeholder tooling
let sugarss;
var processContent$1 = function processContent(
result,
content,
filename,
options,
postcss,
) {
const { plugins } = options;
const ext = path$2.extname(filename);
const parserList = [];
// SugarSS support:
if (ext === ".sss") {
if (!sugarss) {
/* c8 ignore next 3 */
try {
sugarss = __require('sugarss');
} catch {} // Ignore
}
if (sugarss)
return runPostcss(postcss, content, filename, plugins, [sugarss])
}
// Syntax support:
if (result.opts.syntax?.parse) {
parserList.push(result.opts.syntax.parse);
}
// Parser support:
if (result.opts.parser) parserList.push(result.opts.parser);
// Try the default as a last resort:
parserList.push(null);
return runPostcss(postcss, content, filename, plugins, parserList)
};
function runPostcss(postcss, content, filename, plugins, parsers, index) {
if (!index) index = 0;
return postcss(plugins)
.process(content, {
from: filename,
parser: parsers[index],
})
.catch(err => {
// If there's an error, try the next parser
index++;
// If there are no parsers left, throw it
if (index === parsers.length) throw err
return runPostcss(postcss, content, filename, plugins, parsers, index)
})
}
const path$1 = require$$0;
const dataURL = dataUrl;
const parseStatements = parseStatements$1;
const processContent = processContent$1;
const resolveId$1 = (id) => id;
const formatImportPrelude = formatImportPrelude$2;
async function parseStyles$1(
result,
styles,
options,
state,
conditions,
from,
postcss,
) {
const statements = parseStatements(result, styles, conditions, from);
for (const stmt of statements) {
if (stmt.type !== "import" || !isProcessableURL(stmt.uri)) {
continue
}
if (options.filter && !options.filter(stmt.uri)) {
// rejected by filter
continue
}
await resolveImportId(result, stmt, options, state, postcss);
}
let charset;
const imports = [];
const bundle = [];
function handleCharset(stmt) {
if (!charset) charset = stmt;
// charsets aren't case-sensitive, so convert to lower case to compare
else if (
stmt.node.params.toLowerCase() !== charset.node.params.toLowerCase()
) {
throw stmt.node.error(
`Incompatible @charset statements:
${stmt.node.params} specified in ${stmt.node.source.input.file}
${charset.node.params} specified in ${charset.node.source.input.file}`,
)
}
}
// squash statements and their children
statements.forEach(stmt => {
if (stmt.type === "charset") handleCharset(stmt);
else if (stmt.type === "import") {
if (stmt.children) {
stmt.children.forEach((child, index) => {
if (child.type === "import") imports.push(child);
else if (child.type === "charset") handleCharset(child);
else bundle.push(child);
// For better output
if (index === 0) child.parent = stmt;
});
} else imports.push(stmt);
} else if (stmt.type === "nodes") {
bundle.push(stmt);
}
});
return charset ? [charset, ...imports.concat(bundle)] : imports.concat(bundle)
}
async function resolveImportId(result, stmt, options, state, postcss) {
if (dataURL.isValid(stmt.uri)) {
// eslint-disable-next-line require-atomic-updates
stmt.children = await loadImportContent(
result,
stmt,
stmt.uri,
options,
state,
postcss,
);
return
} else if (dataURL.isValid(stmt.from.slice(-1))) {
// Data urls can't be used as a base url to resolve imports.
throw stmt.node.error(
`Unable to import '${stmt.uri}' from a stylesheet that is embedded in a data url`,
)
}
const atRule = stmt.node;
let sourceFile;
if (atRule.source?.input?.file) {
sourceFile = atRule.source.input.file;
}
const base = sourceFile
? path$1.dirname(atRule.source.input.file)
: options.root;
const paths = [await options.resolve(stmt.uri, base, options, atRule)].flat();
// Ensure that each path is absolute:
const resolved = await Promise.all(
paths.map(file => {
return !path$1.isAbsolute(file)
? resolveId$1(file)
: file
}),
);
// Add dependency messages:
resolved.forEach(file => {
result.messages.push({
type: "dependency",
plugin: "postcss-import",
file,
parent: sourceFile,
});
});
const importedContent = await Promise.all(
resolved.map(file => {
return loadImportContent(result, stmt, file, options, state, postcss)
}),
);
// Merge loaded statements
// eslint-disable-next-line require-atomic-updates
stmt.children = importedContent.flat().filter(x => !!x);
}
async function loadImportContent(
result,
stmt,
filename,
options,
state,
postcss,
) {
const atRule = stmt.node;
const { conditions, from } = stmt;
const stmtDuplicateCheckKey = conditions
.map(condition =>
formatImportPrelude(condition.layer, condition.media, condition.supports),
)
.join(":");
if (options.skipDuplicates) {
// skip files already imported at the same scope
if (state.importedFiles[filename]?.[stmtDuplicateCheckKey]) {
return
}
// save imported files to skip them next time
if (!state.importedFiles[filename]) {
state.importedFiles[filename] = {};
}
state.importedFiles[filename][stmtDuplicateCheckKey] = true;
}
if (from.includes(filename)) {
return
}
const content = await options.load(filename, options);
if (content.trim() === "" && options.warnOnEmpty) {
result.warn(`${filename} is empty`, { node: atRule });
return
}
// skip previous imported files not containing @import rules
if (
options.skipDuplicates &&
state.hashFiles[content]?.[stmtDuplicateCheckKey]
) {
return
}
const importedResult = await processContent(
result,
content,
filename,
options,
postcss,
);
const styles = importedResult.root;
result.messages = result.messages.concat(importedResult.messages);
if (options.skipDuplicates) {
const hasImport = styles.some(child => {
return child.type === "atrule" && child.name === "import"
});
if (!hasImport) {
// save hash files to skip them next time
if (!state.hashFiles[content]) {
state.hashFiles[content] = {};
}
state.hashFiles[content][stmtDuplicateCheckKey] = true;
}
}
// recursion: import @import from imported file
return parseStyles$1(
result,
styles,
options,
state,
conditions,
[...from, filename],
postcss,
)
}
function isProcessableURL(uri) {
// skip protocol base uri (protocol://url) or protocol-relative
if (/^(?:[a-z]+:)?\/\//i.test(uri)) {
return false
}
// check for fragment or query
try {
// needs a base to parse properly
const url = new URL(uri, "https://example.com");
if (url.search) {
return false
}
} catch {} // Ignore
return true
}
var parseStyles_1 = parseStyles$1;
// builtin tooling
const path = require$$0;
// internal tooling
const applyConditions = applyConditions$1;
const applyRaws = applyRaws$1;
const applyStyles = applyStyles$1;
const loadContent = () => "";
const parseStyles = parseStyles_1;
const resolveId = (id) => id;
function AtImport(options) {
options = {
root: process.cwd(),
path: [],
skipDuplicates: true,
resolve: resolveId,
load: loadContent,
plugins: [],
addModulesDirectories: [],
warnOnEmpty: true,
...options,
};
options.root = path.resolve(options.root);
// convert string to an array of a single element
if (typeof options.path === "string") options.path = [options.path];
if (!Array.isArray(options.path)) options.path = [];
options.path = options.path.map(p => path.resolve(options.root, p));
return {
postcssPlugin: "postcss-import",
async Once(styles, { result, atRule, postcss }) {
const state = {
importedFiles: {},
hashFiles: {},
};
if (styles.source?.input?.file) {
state.importedFiles[styles.source.input.file] = {};
}
if (options.plugins && !Array.isArray(options.plugins)) {
throw new Error("plugins option must be an array")
}
const bundle = await parseStyles(
result,
styles,
options,
state,
[],
[],
postcss,
);
applyRaws(bundle);
applyConditions(bundle, atRule);
applyStyles(bundle, styles);
},
}
}
AtImport.postcss = true;
var postcssImport = AtImport;
var index = /*@__PURE__*/getDefaultExportFromCjs(postcssImport);
var index$1 = /*#__PURE__*/_mergeNamespaces({
__proto__: null,
default: index
}, [postcssImport]);
export { index$1 as i };

54763
node_modules/vite/dist/node/chunks/dep-Bid9ssRr.js generated vendored Normal file

File diff suppressed because one or more lines are too long

View File

@@ -1,914 +0,0 @@
import { y as getDefaultExportFromCjs } from './dep-9A4-l-43.js';
import require$$0 from 'path';
import require$$0__default from 'fs';
import { l as lib } from './dep-8a-6Quh6.js';
import { fileURLToPath as __cjs_fileURLToPath } from 'node:url';
import { dirname as __cjs_dirname } from 'node:path';
import { createRequire as __cjs_createRequire } from 'node:module';
const __filename = __cjs_fileURLToPath(import.meta.url);
const __dirname = __cjs_dirname(__filename);
const require = __cjs_createRequire(import.meta.url);
const __require = require;
function _mergeNamespaces(n, m) {
for (var i = 0; i < m.length; i++) {
var e = m[i];
if (typeof e !== 'string' && !Array.isArray(e)) { for (var k in e) {
if (k !== 'default' && !(k in n)) {
n[k] = e[k];
}
} }
}
return n;
}
const startsWithKeywordRegexp = /^(all|not|only|print|screen)/i;
var joinMedia$1 = function (parentMedia, childMedia) {
if (!parentMedia.length && childMedia.length) return childMedia
if (parentMedia.length && !childMedia.length) return parentMedia
if (!parentMedia.length && !childMedia.length) return []
const media = [];
parentMedia.forEach(parentItem => {
const parentItemStartsWithKeyword = startsWithKeywordRegexp.test(parentItem);
childMedia.forEach(childItem => {
const childItemStartsWithKeyword = startsWithKeywordRegexp.test(childItem);
if (parentItem !== childItem) {
if (childItemStartsWithKeyword && !parentItemStartsWithKeyword) {
media.push(`${childItem} and ${parentItem}`);
} else {
media.push(`${parentItem} and ${childItem}`);
}
}
});
});
return media
};
var joinLayer$1 = function (parentLayer, childLayer) {
if (!parentLayer.length && childLayer.length) return childLayer
if (parentLayer.length && !childLayer.length) return parentLayer
if (!parentLayer.length && !childLayer.length) return []
return parentLayer.concat(childLayer)
};
var readCache$1 = {exports: {}};
var pify$2 = {exports: {}};
var processFn = function (fn, P, opts) {
return function () {
var that = this;
var args = new Array(arguments.length);
for (var i = 0; i < arguments.length; i++) {
args[i] = arguments[i];
}
return new P(function (resolve, reject) {
args.push(function (err, result) {
if (err) {
reject(err);
} else if (opts.multiArgs) {
var results = new Array(arguments.length - 1);
for (var i = 1; i < arguments.length; i++) {
results[i - 1] = arguments[i];
}
resolve(results);
} else {
resolve(result);
}
});
fn.apply(that, args);
});
};
};
var pify$1 = pify$2.exports = function (obj, P, opts) {
if (typeof P !== 'function') {
opts = P;
P = Promise;
}
opts = opts || {};
opts.exclude = opts.exclude || [/.+Sync$/];
var filter = function (key) {
var match = function (pattern) {
return typeof pattern === 'string' ? key === pattern : pattern.test(key);
};
return opts.include ? opts.include.some(match) : !opts.exclude.some(match);
};
var ret = typeof obj === 'function' ? function () {
if (opts.excludeMain) {
return obj.apply(this, arguments);
}
return processFn(obj, P, opts).apply(this, arguments);
} : {};
return Object.keys(obj).reduce(function (ret, key) {
var x = obj[key];
ret[key] = typeof x === 'function' && filter(key) ? processFn(x, P, opts) : x;
return ret;
}, ret);
};
pify$1.all = pify$1;
var pifyExports = pify$2.exports;
var fs = require$$0__default;
var path$2 = require$$0;
var pify = pifyExports;
var stat = pify(fs.stat);
var readFile = pify(fs.readFile);
var resolve = path$2.resolve;
var cache = Object.create(null);
function convert(content, encoding) {
if (Buffer.isEncoding(encoding)) {
return content.toString(encoding);
}
return content;
}
readCache$1.exports = function (path, encoding) {
path = resolve(path);
return stat(path).then(function (stats) {
var item = cache[path];
if (item && item.mtime.getTime() === stats.mtime.getTime()) {
return convert(item.content, encoding);
}
return readFile(path).then(function (data) {
cache[path] = {
mtime: stats.mtime,
content: data
};
return convert(data, encoding);
});
}).catch(function (err) {
cache[path] = null;
return Promise.reject(err);
});
};
readCache$1.exports.sync = function (path, encoding) {
path = resolve(path);
try {
var stats = fs.statSync(path);
var item = cache[path];
if (item && item.mtime.getTime() === stats.mtime.getTime()) {
return convert(item.content, encoding);
}
var data = fs.readFileSync(path);
cache[path] = {
mtime: stats.mtime,
content: data
};
return convert(data, encoding);
} catch (err) {
cache[path] = null;
throw err;
}
};
readCache$1.exports.get = function (path, encoding) {
path = resolve(path);
if (cache[path]) {
return convert(cache[path].content, encoding);
}
return null;
};
readCache$1.exports.clear = function () {
cache = Object.create(null);
};
var readCacheExports = readCache$1.exports;
const dataURLRegexp = /^data:text\/css;base64,/i;
function isValid(url) {
return dataURLRegexp.test(url)
}
function contents(url) {
// "data:text/css;base64,".length === 21
return Buffer.from(url.slice(21), "base64").toString()
}
var dataUrl = {
isValid,
contents,
};
const readCache = readCacheExports;
const dataURL$1 = dataUrl;
var loadContent$1 = filename => {
if (dataURL$1.isValid(filename)) {
return dataURL$1.contents(filename)
}
return readCache(filename, "utf-8")
};
// builtin tooling
const path$1 = require$$0;
// placeholder tooling
let sugarss;
var processContent$1 = function processContent(
result,
content,
filename,
options,
postcss
) {
const { plugins } = options;
const ext = path$1.extname(filename);
const parserList = [];
// SugarSS support:
if (ext === ".sss") {
if (!sugarss) {
try {
sugarss = __require('sugarss');
} catch {} // Ignore
}
if (sugarss)
return runPostcss(postcss, content, filename, plugins, [sugarss])
}
// Syntax support:
if (result.opts.syntax?.parse) {
parserList.push(result.opts.syntax.parse);
}
// Parser support:
if (result.opts.parser) parserList.push(result.opts.parser);
// Try the default as a last resort:
parserList.push(null);
return runPostcss(postcss, content, filename, plugins, parserList)
};
function runPostcss(postcss, content, filename, plugins, parsers, index) {
if (!index) index = 0;
return postcss(plugins)
.process(content, {
from: filename,
parser: parsers[index],
})
.catch(err => {
// If there's an error, try the next parser
index++;
// If there are no parsers left, throw it
if (index === parsers.length) throw err
return runPostcss(postcss, content, filename, plugins, parsers, index)
})
}
// external tooling
const valueParser = lib;
// extended tooling
const { stringify } = valueParser;
function split(params, start) {
const list = [];
const last = params.reduce((item, node, index) => {
if (index < start) return ""
if (node.type === "div" && node.value === ",") {
list.push(item);
return ""
}
return item + stringify(node)
}, "");
list.push(last);
return list
}
var parseStatements$1 = function (result, styles) {
const statements = [];
let nodes = [];
styles.each(node => {
let stmt;
if (node.type === "atrule") {
if (node.name === "import") stmt = parseImport(result, node);
else if (node.name === "media") stmt = parseMedia(result, node);
else if (node.name === "charset") stmt = parseCharset(result, node);
}
if (stmt) {
if (nodes.length) {
statements.push({
type: "nodes",
nodes,
media: [],
layer: [],
});
nodes = [];
}
statements.push(stmt);
} else nodes.push(node);
});
if (nodes.length) {
statements.push({
type: "nodes",
nodes,
media: [],
layer: [],
});
}
return statements
};
function parseMedia(result, atRule) {
const params = valueParser(atRule.params).nodes;
return {
type: "media",
node: atRule,
media: split(params, 0),
layer: [],
}
}
function parseCharset(result, atRule) {
if (atRule.prev()) {
return result.warn("@charset must precede all other statements", {
node: atRule,
})
}
return {
type: "charset",
node: atRule,
media: [],
layer: [],
}
}
function parseImport(result, atRule) {
let prev = atRule.prev();
if (prev) {
do {
if (
prev.type !== "comment" &&
(prev.type !== "atrule" ||
(prev.name !== "import" &&
prev.name !== "charset" &&
!(prev.name === "layer" && !prev.nodes)))
) {
return result.warn(
"@import must precede all other statements (besides @charset or empty @layer)",
{ node: atRule }
)
}
prev = prev.prev();
} while (prev)
}
if (atRule.nodes) {
return result.warn(
"It looks like you didn't end your @import statement correctly. " +
"Child nodes are attached to it.",
{ node: atRule }
)
}
const params = valueParser(atRule.params).nodes;
const stmt = {
type: "import",
node: atRule,
media: [],
layer: [],
};
// prettier-ignore
if (
!params.length ||
(
params[0].type !== "string" ||
!params[0].value
) &&
(
params[0].type !== "function" ||
params[0].value !== "url" ||
!params[0].nodes.length ||
!params[0].nodes[0].value
)
) {
return result.warn(`Unable to find uri in '${ atRule.toString() }'`, {
node: atRule,
})
}
if (params[0].type === "string") stmt.uri = params[0].value;
else stmt.uri = params[0].nodes[0].value;
stmt.fullUri = stringify(params[0]);
let remainder = params;
if (remainder.length > 2) {
if (
(remainder[2].type === "word" || remainder[2].type === "function") &&
remainder[2].value === "layer"
) {
if (remainder[1].type !== "space") {
return result.warn("Invalid import layer statement", { node: atRule })
}
if (remainder[2].nodes) {
stmt.layer = [stringify(remainder[2].nodes)];
} else {
stmt.layer = [""];
}
remainder = remainder.slice(2);
}
}
if (remainder.length > 2) {
if (remainder[1].type !== "space") {
return result.warn("Invalid import media statement", { node: atRule })
}
stmt.media = split(remainder, 2);
}
return stmt
}
var assignLayerNames$1 = function (layer, node, state, options) {
layer.forEach((layerPart, i) => {
if (layerPart.trim() === "") {
if (options.nameLayer) {
layer[i] = options
.nameLayer(state.anonymousLayerCounter++, state.rootFilename)
.toString();
} else {
throw node.error(
`When using anonymous layers in @import you must also set the "nameLayer" plugin option`
)
}
}
});
};
// builtin tooling
const path = require$$0;
// internal tooling
const joinMedia = joinMedia$1;
const joinLayer = joinLayer$1;
const resolveId = (id) => id;
const loadContent = loadContent$1;
const processContent = processContent$1;
const parseStatements = parseStatements$1;
const assignLayerNames = assignLayerNames$1;
const dataURL = dataUrl;
function AtImport(options) {
options = {
root: process.cwd(),
path: [],
skipDuplicates: true,
resolve: resolveId,
load: loadContent,
plugins: [],
addModulesDirectories: [],
nameLayer: null,
...options,
};
options.root = path.resolve(options.root);
// convert string to an array of a single element
if (typeof options.path === "string") options.path = [options.path];
if (!Array.isArray(options.path)) options.path = [];
options.path = options.path.map(p => path.resolve(options.root, p));
return {
postcssPlugin: "postcss-import",
Once(styles, { result, atRule, postcss }) {
const state = {
importedFiles: {},
hashFiles: {},
rootFilename: null,
anonymousLayerCounter: 0,
};
if (styles.source?.input?.file) {
state.rootFilename = styles.source.input.file;
state.importedFiles[styles.source.input.file] = {};
}
if (options.plugins && !Array.isArray(options.plugins)) {
throw new Error("plugins option must be an array")
}
if (options.nameLayer && typeof options.nameLayer !== "function") {
throw new Error("nameLayer option must be a function")
}
return parseStyles(result, styles, options, state, [], []).then(
bundle => {
applyRaws(bundle);
applyMedia(bundle);
applyStyles(bundle, styles);
}
)
function applyRaws(bundle) {
bundle.forEach((stmt, index) => {
if (index === 0) return
if (stmt.parent) {
const { before } = stmt.parent.node.raws;
if (stmt.type === "nodes") stmt.nodes[0].raws.before = before;
else stmt.node.raws.before = before;
} else if (stmt.type === "nodes") {
stmt.nodes[0].raws.before = stmt.nodes[0].raws.before || "\n";
}
});
}
function applyMedia(bundle) {
bundle.forEach(stmt => {
if (
(!stmt.media.length && !stmt.layer.length) ||
stmt.type === "charset"
) {
return
}
if (stmt.layer.length > 1) {
assignLayerNames(stmt.layer, stmt.node, state, options);
}
if (stmt.type === "import") {
const parts = [stmt.fullUri];
const media = stmt.media.join(", ");
if (stmt.layer.length) {
const layerName = stmt.layer.join(".");
let layerParams = "layer";
if (layerName) {
layerParams = `layer(${layerName})`;
}
parts.push(layerParams);
}
if (media) {
parts.push(media);
}
stmt.node.params = parts.join(" ");
} else if (stmt.type === "media") {
if (stmt.layer.length) {
const layerNode = atRule({
name: "layer",
params: stmt.layer.join("."),
source: stmt.node.source,
});
if (stmt.parentMedia?.length) {
const mediaNode = atRule({
name: "media",
params: stmt.parentMedia.join(", "),
source: stmt.node.source,
});
mediaNode.append(layerNode);
layerNode.append(stmt.node);
stmt.node = mediaNode;
} else {
layerNode.append(stmt.node);
stmt.node = layerNode;
}
} else {
stmt.node.params = stmt.media.join(", ");
}
} else {
const { nodes } = stmt;
const { parent } = nodes[0];
let outerAtRule;
let innerAtRule;
if (stmt.media.length && stmt.layer.length) {
const mediaNode = atRule({
name: "media",
params: stmt.media.join(", "),
source: parent.source,
});
const layerNode = atRule({
name: "layer",
params: stmt.layer.join("."),
source: parent.source,
});
mediaNode.append(layerNode);
innerAtRule = layerNode;
outerAtRule = mediaNode;
} else if (stmt.media.length) {
const mediaNode = atRule({
name: "media",
params: stmt.media.join(", "),
source: parent.source,
});
innerAtRule = mediaNode;
outerAtRule = mediaNode;
} else if (stmt.layer.length) {
const layerNode = atRule({
name: "layer",
params: stmt.layer.join("."),
source: parent.source,
});
innerAtRule = layerNode;
outerAtRule = layerNode;
}
parent.insertBefore(nodes[0], outerAtRule);
// remove nodes
nodes.forEach(node => {
node.parent = undefined;
});
// better output
nodes[0].raws.before = nodes[0].raws.before || "\n";
// wrap new rules with media query and/or layer at rule
innerAtRule.append(nodes);
stmt.type = "media";
stmt.node = outerAtRule;
delete stmt.nodes;
}
});
}
function applyStyles(bundle, styles) {
styles.nodes = [];
// Strip additional statements.
bundle.forEach(stmt => {
if (["charset", "import", "media"].includes(stmt.type)) {
stmt.node.parent = undefined;
styles.append(stmt.node);
} else if (stmt.type === "nodes") {
stmt.nodes.forEach(node => {
node.parent = undefined;
styles.append(node);
});
}
});
}
function parseStyles(result, styles, options, state, media, layer) {
const statements = parseStatements(result, styles);
return Promise.resolve(statements)
.then(stmts => {
// process each statement in series
return stmts.reduce((promise, stmt) => {
return promise.then(() => {
stmt.media = joinMedia(media, stmt.media || []);
stmt.parentMedia = media;
stmt.layer = joinLayer(layer, stmt.layer || []);
// skip protocol base uri (protocol://url) or protocol-relative
if (
stmt.type !== "import" ||
/^(?:[a-z]+:)?\/\//i.test(stmt.uri)
) {
return
}
if (options.filter && !options.filter(stmt.uri)) {
// rejected by filter
return
}
return resolveImportId(result, stmt, options, state)
})
}, Promise.resolve())
})
.then(() => {
let charset;
const imports = [];
const bundle = [];
function handleCharset(stmt) {
if (!charset) charset = stmt;
// charsets aren't case-sensitive, so convert to lower case to compare
else if (
stmt.node.params.toLowerCase() !==
charset.node.params.toLowerCase()
) {
throw new Error(
`Incompatable @charset statements:
${stmt.node.params} specified in ${stmt.node.source.input.file}
${charset.node.params} specified in ${charset.node.source.input.file}`
)
}
}
// squash statements and their children
statements.forEach(stmt => {
if (stmt.type === "charset") handleCharset(stmt);
else if (stmt.type === "import") {
if (stmt.children) {
stmt.children.forEach((child, index) => {
if (child.type === "import") imports.push(child);
else if (child.type === "charset") handleCharset(child);
else bundle.push(child);
// For better output
if (index === 0) child.parent = stmt;
});
} else imports.push(stmt);
} else if (stmt.type === "media" || stmt.type === "nodes") {
bundle.push(stmt);
}
});
return charset
? [charset, ...imports.concat(bundle)]
: imports.concat(bundle)
})
}
function resolveImportId(result, stmt, options, state) {
if (dataURL.isValid(stmt.uri)) {
return loadImportContent(result, stmt, stmt.uri, options, state).then(
result => {
stmt.children = result;
}
)
}
const atRule = stmt.node;
let sourceFile;
if (atRule.source?.input?.file) {
sourceFile = atRule.source.input.file;
}
const base = sourceFile
? path.dirname(atRule.source.input.file)
: options.root;
return Promise.resolve(options.resolve(stmt.uri, base, options))
.then(paths => {
if (!Array.isArray(paths)) paths = [paths];
// Ensure that each path is absolute:
return Promise.all(
paths.map(file => {
return !path.isAbsolute(file)
? resolveId(file)
: file
})
)
})
.then(resolved => {
// Add dependency messages:
resolved.forEach(file => {
result.messages.push({
type: "dependency",
plugin: "postcss-import",
file,
parent: sourceFile,
});
});
return Promise.all(
resolved.map(file => {
return loadImportContent(result, stmt, file, options, state)
})
)
})
.then(result => {
// Merge loaded statements
stmt.children = result.reduce((result, statements) => {
return statements ? result.concat(statements) : result
}, []);
})
}
function loadImportContent(result, stmt, filename, options, state) {
const atRule = stmt.node;
const { media, layer } = stmt;
assignLayerNames(layer, atRule, state, options);
if (options.skipDuplicates) {
// skip files already imported at the same scope
if (state.importedFiles[filename]?.[media]?.[layer]) {
return
}
// save imported files to skip them next time
if (!state.importedFiles[filename]) {
state.importedFiles[filename] = {};
}
if (!state.importedFiles[filename][media]) {
state.importedFiles[filename][media] = {};
}
state.importedFiles[filename][media][layer] = true;
}
return Promise.resolve(options.load(filename, options)).then(
content => {
if (content.trim() === "") {
result.warn(`${filename} is empty`, { node: atRule });
return
}
// skip previous imported files not containing @import rules
if (state.hashFiles[content]?.[media]?.[layer]) {
return
}
return processContent(
result,
content,
filename,
options,
postcss
).then(importedResult => {
const styles = importedResult.root;
result.messages = result.messages.concat(importedResult.messages);
if (options.skipDuplicates) {
const hasImport = styles.some(child => {
return child.type === "atrule" && child.name === "import"
});
if (!hasImport) {
// save hash files to skip them next time
if (!state.hashFiles[content]) {
state.hashFiles[content] = {};
}
if (!state.hashFiles[content][media]) {
state.hashFiles[content][media] = {};
}
state.hashFiles[content][media][layer] = true;
}
}
// recursion: import @import from imported file
return parseStyles(result, styles, options, state, media, layer)
})
}
)
}
},
}
}
AtImport.postcss = true;
var postcssImport = AtImport;
var index = /*@__PURE__*/getDefaultExportFromCjs(postcssImport);
var index$1 = /*#__PURE__*/_mergeNamespaces({
__proto__: null,
default: index
}, [postcssImport]);
export { index$1 as i };

531
node_modules/vite/dist/node/cli.js generated vendored
View File

@@ -1,20 +1,21 @@
import path from 'node:path';
import fs from 'node:fs';
import fs__default from 'node:fs';
import { performance } from 'node:perf_hooks';
import { EventEmitter } from 'events';
import { x as colors, k as createLogger, r as resolveConfig } from './chunks/dep-9A4-l-43.js';
import { O as colors, I as createLogger, r as resolveConfig } from './chunks/dep-Bid9ssRr.js';
import { VERSION } from './constants.js';
import 'node:fs/promises';
import 'node:url';
import 'node:util';
import 'node:module';
import 'tty';
import 'path';
import 'node:crypto';
import 'esbuild';
import 'path';
import 'fs';
import 'assert';
import 'node:child_process';
import 'node:http';
import 'node:https';
import 'tty';
import 'util';
import 'net';
import 'url';
@@ -23,26 +24,26 @@ import 'stream';
import 'os';
import 'child_process';
import 'node:os';
import 'node:child_process';
import 'node:crypto';
import 'node:net';
import 'node:dns';
import 'crypto';
import 'module';
import 'node:assert';
import 'node:process';
import 'node:v8';
import 'node:buffer';
import 'rollup';
import 'vite/module-runner';
import 'rollup/parseAst';
import 'querystring';
import 'node:buffer';
import 'module';
import 'node:readline';
import 'node:process';
import 'node:events';
import 'zlib';
import 'buffer';
import 'crypto';
import 'node:assert';
import 'node:v8';
import 'node:worker_threads';
import 'https';
import 'tls';
import 'zlib';
import 'buffer';
import 'assert';
import 'node:querystring';
import 'node:zlib';
import 'worker_threads';
function toArr(any) {
return any == null ? [] : Array.isArray(any) ? any : [any];
@@ -657,271 +658,287 @@ class CAC extends EventEmitter {
const cac = (name = "") => new CAC(name);
const cli = cac('vite');
const cli = cac("vite");
let profileSession = global.__vite_profile_session;
let profileCount = 0;
const stopProfiler = (log) => {
if (!profileSession)
return;
return new Promise((res, rej) => {
profileSession.post('Profiler.stop', (err, { profile }) => {
// Write profile to disk, upload, etc.
if (!err) {
const outPath = path.resolve(`./vite-profile-${profileCount++}.cpuprofile`);
fs.writeFileSync(outPath, JSON.stringify(profile));
log(colors.yellow(`CPU profile written to ${colors.white(colors.dim(outPath))}`));
profileSession = undefined;
res();
}
else {
rej(err);
}
});
if (!profileSession) return;
return new Promise((res, rej) => {
profileSession.post("Profiler.stop", (err, { profile }) => {
if (!err) {
const outPath = path.resolve(
`./vite-profile-${profileCount++}.cpuprofile`
);
fs__default.writeFileSync(outPath, JSON.stringify(profile));
log(
colors.yellow(
`CPU profile written to ${colors.white(colors.dim(outPath))}`
)
);
profileSession = void 0;
res();
} else {
rej(err);
}
});
});
};
const filterDuplicateOptions = (options) => {
for (const [key, value] of Object.entries(options)) {
if (Array.isArray(value)) {
options[key] = value[value.length - 1];
}
for (const [key, value] of Object.entries(options)) {
if (Array.isArray(value)) {
options[key] = value[value.length - 1];
}
}
};
/**
* removing global flags before passing as command specific sub-configs
*/
function cleanOptions(options) {
const ret = { ...options };
delete ret['--'];
delete ret.c;
delete ret.config;
delete ret.base;
delete ret.l;
delete ret.logLevel;
delete ret.clearScreen;
delete ret.d;
delete ret.debug;
delete ret.f;
delete ret.filter;
delete ret.m;
delete ret.mode;
// convert the sourcemap option to a boolean if necessary
if ('sourcemap' in ret) {
const sourcemap = ret.sourcemap;
ret.sourcemap =
sourcemap === 'true'
? true
: sourcemap === 'false'
? false
: ret.sourcemap;
}
return ret;
function cleanGlobalCLIOptions(options) {
const ret = { ...options };
delete ret["--"];
delete ret.c;
delete ret.config;
delete ret.base;
delete ret.l;
delete ret.logLevel;
delete ret.clearScreen;
delete ret.configLoader;
delete ret.d;
delete ret.debug;
delete ret.f;
delete ret.filter;
delete ret.m;
delete ret.mode;
delete ret.w;
if ("sourcemap" in ret) {
const sourcemap = ret.sourcemap;
ret.sourcemap = sourcemap === "true" ? true : sourcemap === "false" ? false : ret.sourcemap;
}
if ("watch" in ret) {
const watch = ret.watch;
ret.watch = watch ? {} : void 0;
}
return ret;
}
function cleanBuilderCLIOptions(options) {
const ret = { ...options };
delete ret.app;
return ret;
}
/**
* host may be a number (like 0), should convert to string
*/
const convertHost = (v) => {
if (typeof v === 'number') {
return String(v);
}
return v;
if (typeof v === "number") {
return String(v);
}
return v;
};
/**
* base may be a number (like 0), should convert to empty string
*/
const convertBase = (v) => {
if (v === 0) {
return '';
}
return v;
if (v === 0) {
return "";
}
return v;
};
cli
.option('-c, --config <file>', `[string] use specified config file`)
.option('--base <path>', `[string] public base path (default: /)`, {
type: [convertBase],
})
.option('-l, --logLevel <level>', `[string] info | warn | error | silent`)
.option('--clearScreen', `[boolean] allow/disable clear screen when logging`)
.option('-d, --debug [feat]', `[string | boolean] show debug logs`)
.option('-f, --filter <filter>', `[string] filter debug logs`)
.option('-m, --mode <mode>', `[string] set env mode`);
// dev
cli
.command('[root]', 'start dev server') // default command
.alias('serve') // the command is called 'serve' in Vite's API
.alias('dev') // alias to align with the script name
.option('--host [host]', `[string] specify hostname`, { type: [convertHost] })
.option('--port <port>', `[number] specify port`)
.option('--open [path]', `[boolean | string] open browser on startup`)
.option('--cors', `[boolean] enable CORS`)
.option('--strictPort', `[boolean] exit if specified port is already in use`)
.option('--force', `[boolean] force the optimizer to ignore the cache and re-bundle`)
.action(async (root, options) => {
filterDuplicateOptions(options);
// output structure is preserved even after bundling so require()
// is ok here
const { createServer } = await import('./chunks/dep-9A4-l-43.js').then(function (n) { return n.A; });
try {
const server = await createServer({
root,
base: options.base,
mode: options.mode,
configFile: options.config,
logLevel: options.logLevel,
clearScreen: options.clearScreen,
optimizeDeps: { force: options.force },
server: cleanOptions(options),
});
if (!server.httpServer) {
throw new Error('HTTP server not available');
}
await server.listen();
const info = server.config.logger.info;
const viteStartTime = global.__vite_start_time ?? false;
const startupDurationString = viteStartTime
? colors.dim(`ready in ${colors.reset(colors.bold(Math.ceil(performance.now() - viteStartTime)))} ms`)
: '';
const hasExistingLogs = process.stdout.bytesWritten > 0 || process.stderr.bytesWritten > 0;
info(`\n ${colors.green(`${colors.bold('VITE')} v${VERSION}`)} ${startupDurationString}\n`, {
clear: !hasExistingLogs,
});
server.printUrls();
const customShortcuts = [];
if (profileSession) {
customShortcuts.push({
key: 'p',
description: 'start/stop the profiler',
async action(server) {
if (profileSession) {
await stopProfiler(server.config.logger.info);
}
else {
const inspector = await import('node:inspector').then((r) => r.default);
await new Promise((res) => {
profileSession = new inspector.Session();
profileSession.connect();
profileSession.post('Profiler.enable', () => {
profileSession.post('Profiler.start', () => {
server.config.logger.info('Profiler started');
res();
});
});
});
}
},
cli.option("-c, --config <file>", `[string] use specified config file`).option("--base <path>", `[string] public base path (default: /)`, {
type: [convertBase]
}).option("-l, --logLevel <level>", `[string] info | warn | error | silent`).option("--clearScreen", `[boolean] allow/disable clear screen when logging`).option(
"--configLoader <loader>",
`[string] use 'bundle' to bundle the config with esbuild, or 'runner' (experimental) to process it on the fly, or 'native' (experimental) to load using the native runtime (default: bundle)`
).option("-d, --debug [feat]", `[string | boolean] show debug logs`).option("-f, --filter <filter>", `[string] filter debug logs`).option("-m, --mode <mode>", `[string] set env mode`);
cli.command("[root]", "start dev server").alias("serve").alias("dev").option("--host [host]", `[string] specify hostname`, { type: [convertHost] }).option("--port <port>", `[number] specify port`).option("--open [path]", `[boolean | string] open browser on startup`).option("--cors", `[boolean] enable CORS`).option("--strictPort", `[boolean] exit if specified port is already in use`).option(
"--force",
`[boolean] force the optimizer to ignore the cache and re-bundle`
).action(async (root, options) => {
filterDuplicateOptions(options);
const { createServer } = await import('./chunks/dep-Bid9ssRr.js').then(function (n) { return n.S; });
try {
const server = await createServer({
root,
base: options.base,
mode: options.mode,
configFile: options.config,
configLoader: options.configLoader,
logLevel: options.logLevel,
clearScreen: options.clearScreen,
server: cleanGlobalCLIOptions(options),
forceOptimizeDeps: options.force
});
if (!server.httpServer) {
throw new Error("HTTP server not available");
}
await server.listen();
const info = server.config.logger.info;
const modeString = options.mode && options.mode !== "development" ? ` ${colors.bgGreen(` ${colors.bold(options.mode)} `)}` : "";
const viteStartTime = global.__vite_start_time ?? false;
const startupDurationString = viteStartTime ? colors.dim(
`ready in ${colors.reset(
colors.bold(Math.ceil(performance.now() - viteStartTime))
)} ms`
) : "";
const hasExistingLogs = process.stdout.bytesWritten > 0 || process.stderr.bytesWritten > 0;
info(
`
${colors.green(
`${colors.bold("VITE")} v${VERSION}`
)}${modeString} ${startupDurationString}
`,
{
clear: !hasExistingLogs
}
);
server.printUrls();
const customShortcuts = [];
if (profileSession) {
customShortcuts.push({
key: "p",
description: "start/stop the profiler",
async action(server2) {
if (profileSession) {
await stopProfiler(server2.config.logger.info);
} else {
const inspector = await import('node:inspector').then(
(r) => r.default
);
await new Promise((res) => {
profileSession = new inspector.Session();
profileSession.connect();
profileSession.post("Profiler.enable", () => {
profileSession.post("Profiler.start", () => {
server2.config.logger.info("Profiler started");
res();
});
});
});
}
}
server.bindCLIShortcuts({ print: true, customShortcuts });
}
catch (e) {
const logger = createLogger(options.logLevel);
logger.error(colors.red(`error when starting dev server:\n${e.stack}`), {
error: e,
});
stopProfiler(logger.info);
process.exit(1);
});
}
server.bindCLIShortcuts({ print: true, customShortcuts });
} catch (e) {
const logger = createLogger(options.logLevel);
logger.error(colors.red(`error when starting dev server:
${e.stack}`), {
error: e
});
stopProfiler(logger.info);
process.exit(1);
}
});
// build
cli
.command('build [root]', 'build for production')
.option('--target <target>', `[string] transpile target (default: 'modules')`)
.option('--outDir <dir>', `[string] output directory (default: dist)`)
.option('--assetsDir <dir>', `[string] directory under outDir to place assets in (default: assets)`)
.option('--assetsInlineLimit <number>', `[number] static asset base64 inline threshold in bytes (default: 4096)`)
.option('--ssr [entry]', `[string] build specified entry for server-side rendering`)
.option('--sourcemap [output]', `[boolean | "inline" | "hidden"] output source maps for build (default: false)`)
.option('--minify [minifier]', `[boolean | "terser" | "esbuild"] enable/disable minification, ` +
`or specify minifier to use (default: esbuild)`)
.option('--manifest [name]', `[boolean | string] emit build manifest json`)
.option('--ssrManifest [name]', `[boolean | string] emit ssr manifest json`)
.option('--force', `[boolean] force the optimizer to ignore the cache and re-bundle (experimental)`)
.option('--emptyOutDir', `[boolean] force empty outDir when it's outside of root`)
.option('-w, --watch', `[boolean] rebuilds when modules have changed on disk`)
.action(async (root, options) => {
cli.command("build [root]", "build for production").option("--target <target>", `[string] transpile target (default: 'modules')`).option("--outDir <dir>", `[string] output directory (default: dist)`).option(
"--assetsDir <dir>",
`[string] directory under outDir to place assets in (default: assets)`
).option(
"--assetsInlineLimit <number>",
`[number] static asset base64 inline threshold in bytes (default: 4096)`
).option(
"--ssr [entry]",
`[string] build specified entry for server-side rendering`
).option(
"--sourcemap [output]",
`[boolean | "inline" | "hidden"] output source maps for build (default: false)`
).option(
"--minify [minifier]",
`[boolean | "terser" | "esbuild"] enable/disable minification, or specify minifier to use (default: esbuild)`
).option("--manifest [name]", `[boolean | string] emit build manifest json`).option("--ssrManifest [name]", `[boolean | string] emit ssr manifest json`).option(
"--emptyOutDir",
`[boolean] force empty outDir when it's outside of root`
).option("-w, --watch", `[boolean] rebuilds when modules have changed on disk`).option("--app", `[boolean] same as \`builder: {}\``).action(
async (root, options) => {
filterDuplicateOptions(options);
const { build } = await import('./chunks/dep-9A4-l-43.js').then(function (n) { return n.C; });
const buildOptions = cleanOptions(options);
const { createBuilder } = await import('./chunks/dep-Bid9ssRr.js').then(function (n) { return n.T; });
const buildOptions = cleanGlobalCLIOptions(
cleanBuilderCLIOptions(options)
);
try {
await build({
root,
base: options.base,
mode: options.mode,
configFile: options.config,
logLevel: options.logLevel,
clearScreen: options.clearScreen,
optimizeDeps: { force: options.force },
build: buildOptions,
});
const inlineConfig = {
root,
base: options.base,
mode: options.mode,
configFile: options.config,
configLoader: options.configLoader,
logLevel: options.logLevel,
clearScreen: options.clearScreen,
build: buildOptions,
...options.app ? { builder: {} } : {}
};
const builder = await createBuilder(inlineConfig, null);
await builder.buildApp();
} catch (e) {
createLogger(options.logLevel).error(
colors.red(`error during build:
${e.stack}`),
{ error: e }
);
process.exit(1);
} finally {
stopProfiler((message) => createLogger(options.logLevel).info(message));
}
catch (e) {
createLogger(options.logLevel).error(colors.red(`error during build:\n${e.stack}`), { error: e });
process.exit(1);
}
finally {
stopProfiler((message) => createLogger(options.logLevel).info(message));
}
});
// optimize
cli
.command('optimize [root]', 'pre-bundle dependencies')
.option('--force', `[boolean] force the optimizer to ignore the cache and re-bundle`)
.action(async (root, options) => {
}
);
cli.command(
"optimize [root]",
"pre-bundle dependencies (deprecated, the pre-bundle process runs automatically and does not need to be called)"
).option(
"--force",
`[boolean] force the optimizer to ignore the cache and re-bundle`
).action(
async (root, options) => {
filterDuplicateOptions(options);
const { optimizeDeps } = await import('./chunks/dep-9A4-l-43.js').then(function (n) { return n.B; });
const { optimizeDeps } = await import('./chunks/dep-Bid9ssRr.js').then(function (n) { return n.R; });
try {
const config = await resolveConfig({
root,
base: options.base,
configFile: options.config,
logLevel: options.logLevel,
mode: options.mode,
}, 'serve');
await optimizeDeps(config, options.force, true);
const config = await resolveConfig(
{
root,
base: options.base,
configFile: options.config,
configLoader: options.configLoader,
logLevel: options.logLevel,
mode: options.mode
},
"serve"
);
await optimizeDeps(config, options.force, true);
} catch (e) {
createLogger(options.logLevel).error(
colors.red(`error when optimizing deps:
${e.stack}`),
{ error: e }
);
process.exit(1);
}
catch (e) {
createLogger(options.logLevel).error(colors.red(`error when optimizing deps:\n${e.stack}`), { error: e });
process.exit(1);
}
});
// preview
cli
.command('preview [root]', 'locally preview production build')
.option('--host [host]', `[string] specify hostname`, { type: [convertHost] })
.option('--port <port>', `[number] specify port`)
.option('--strictPort', `[boolean] exit if specified port is already in use`)
.option('--open [path]', `[boolean | string] open browser on startup`)
.option('--outDir <dir>', `[string] output directory (default: dist)`)
.action(async (root, options) => {
}
);
cli.command("preview [root]", "locally preview production build").option("--host [host]", `[string] specify hostname`, { type: [convertHost] }).option("--port <port>", `[number] specify port`).option("--strictPort", `[boolean] exit if specified port is already in use`).option("--open [path]", `[boolean | string] open browser on startup`).option("--outDir <dir>", `[string] output directory (default: dist)`).action(
async (root, options) => {
filterDuplicateOptions(options);
const { preview } = await import('./chunks/dep-9A4-l-43.js').then(function (n) { return n.D; });
const { preview } = await import('./chunks/dep-Bid9ssRr.js').then(function (n) { return n.U; });
try {
const server = await preview({
root,
base: options.base,
configFile: options.config,
logLevel: options.logLevel,
mode: options.mode,
build: {
outDir: options.outDir,
},
preview: {
port: options.port,
strictPort: options.strictPort,
host: options.host,
open: options.open,
},
});
server.printUrls();
server.bindCLIShortcuts({ print: true });
const server = await preview({
root,
base: options.base,
configFile: options.config,
configLoader: options.configLoader,
logLevel: options.logLevel,
mode: options.mode,
build: {
outDir: options.outDir
},
preview: {
port: options.port,
strictPort: options.strictPort,
host: options.host,
open: options.open
}
});
server.printUrls();
server.bindCLIShortcuts({ print: true });
} catch (e) {
createLogger(options.logLevel).error(
colors.red(`error when starting preview server:
${e.stack}`),
{ error: e }
);
process.exit(1);
} finally {
stopProfiler((message) => createLogger(options.logLevel).info(message));
}
catch (e) {
createLogger(options.logLevel).error(colors.red(`error when starting preview server:\n${e.stack}`), { error: e });
process.exit(1);
}
finally {
stopProfiler((message) => createLogger(options.logLevel).info(message));
}
});
}
);
cli.help();
cli.version(VERSION);
cli.parse();

View File

@@ -2,126 +2,147 @@ import path, { resolve } from 'node:path';
import { fileURLToPath } from 'node:url';
import { readFileSync } from 'node:fs';
const { version } = JSON.parse(readFileSync(new URL('../../package.json', import.meta.url)).toString());
const { version } = JSON.parse(
readFileSync(new URL("../../package.json", import.meta.url)).toString()
);
const ROLLUP_HOOKS = [
"options",
"buildStart",
"buildEnd",
"renderStart",
"renderError",
"renderChunk",
"writeBundle",
"generateBundle",
"banner",
"footer",
"augmentChunkHash",
"outputOptions",
"renderDynamicImport",
"resolveFileUrl",
"resolveImportMeta",
"intro",
"outro",
"closeBundle",
"closeWatcher",
"load",
"moduleParsed",
"watchChange",
"resolveDynamicImport",
"resolveId",
"shouldTransformCachedModule",
"transform",
"onLog"
];
const VERSION = version;
const DEFAULT_MAIN_FIELDS = [
'browser',
'module',
'jsnext:main',
'jsnext',
"browser",
"module",
"jsnext:main",
// moment still uses this...
"jsnext"
];
// Baseline support browserslist
// "defaults and supports es6-module and supports es6-module-dynamic-import"
// Higher browser versions may be needed for extra features.
const DEFAULT_CLIENT_MAIN_FIELDS = Object.freeze(DEFAULT_MAIN_FIELDS);
const DEFAULT_SERVER_MAIN_FIELDS = Object.freeze(
DEFAULT_MAIN_FIELDS.filter((f) => f !== "browser")
);
const DEV_PROD_CONDITION = `development|production`;
const DEFAULT_CONDITIONS = ["module", "browser", "node", DEV_PROD_CONDITION];
const DEFAULT_CLIENT_CONDITIONS = Object.freeze(
DEFAULT_CONDITIONS.filter((c) => c !== "node")
);
const DEFAULT_SERVER_CONDITIONS = Object.freeze(
DEFAULT_CONDITIONS.filter((c) => c !== "browser")
);
const ESBUILD_MODULES_TARGET = [
'es2020',
'edge88',
'firefox78',
'chrome87',
'safari14',
];
const DEFAULT_EXTENSIONS = [
'.mjs',
'.js',
'.mts',
'.ts',
'.jsx',
'.tsx',
'.json',
"es2020",
"edge88",
"firefox78",
"chrome87",
"safari14"
];
const DEFAULT_CONFIG_FILES = [
'vite.config.js',
'vite.config.mjs',
'vite.config.ts',
'vite.config.cjs',
'vite.config.mts',
'vite.config.cts',
"vite.config.js",
"vite.config.mjs",
"vite.config.ts",
"vite.config.cjs",
"vite.config.mts",
"vite.config.cts"
];
const JS_TYPES_RE = /\.(?:j|t)sx?$|\.mjs$/;
const CSS_LANGS_RE = /\.(css|less|sass|scss|styl|stylus|pcss|postcss|sss)(?:$|\?)/;
const OPTIMIZABLE_ENTRY_RE = /\.[cm]?[jt]s$/;
const SPECIAL_QUERY_RE = /[?&](?:worker|sharedworker|raw|url)\b/;
/**
* Prefix for resolved fs paths, since windows paths may not be valid as URLs.
*/
const FS_PREFIX = `/@fs/`;
/**
* Prefix for resolved Ids that are not valid browser import specifiers
*/
const VALID_ID_PREFIX = `/@id/`;
/**
* Plugins that use 'virtual modules' (e.g. for helper functions), prefix the
* module ID with `\0`, a convention from the rollup ecosystem.
* This prevents other plugins from trying to process the id (like node resolution),
* and core features like sourcemaps can use this info to differentiate between
* virtual modules and regular files.
* `\0` is not a permitted char in import URLs so we have to replace them during
* import analysis. The id will be decoded back before entering the plugins pipeline.
* These encoded virtual ids are also prefixed by the VALID_ID_PREFIX, so virtual
* modules in the browser end up encoded as `/@id/__x00__{id}`
*/
const NULL_BYTE_PLACEHOLDER = `__x00__`;
const CLIENT_PUBLIC_PATH = `/@vite/client`;
const ENV_PUBLIC_PATH = `/@vite/env`;
const VITE_PACKAGE_DIR = resolve(
// import.meta.url is `dist/node/constants.js` after bundle
fileURLToPath(import.meta.url), '../../..');
const CLIENT_ENTRY = resolve(VITE_PACKAGE_DIR, 'dist/client/client.mjs');
const ENV_ENTRY = resolve(VITE_PACKAGE_DIR, 'dist/client/env.mjs');
// import.meta.url is `dist/node/constants.js` after bundle
fileURLToPath(import.meta.url),
"../../.."
);
const CLIENT_ENTRY = resolve(VITE_PACKAGE_DIR, "dist/client/client.mjs");
const ENV_ENTRY = resolve(VITE_PACKAGE_DIR, "dist/client/env.mjs");
const CLIENT_DIR = path.dirname(CLIENT_ENTRY);
// ** READ THIS ** before editing `KNOWN_ASSET_TYPES`.
// If you add an asset to `KNOWN_ASSET_TYPES`, make sure to also add it
// to the TypeScript declaration file `packages/vite/client.d.ts` and
// add a mime type to the `registerCustomMime` in
// `packages/vite/src/node/plugin/assets.ts` if mime type cannot be
// looked up by mrmime.
const KNOWN_ASSET_TYPES = [
// images
'apng',
'png',
'jpe?g',
'jfif',
'pjpeg',
'pjp',
'gif',
'svg',
'ico',
'webp',
'avif',
// media
'mp4',
'webm',
'ogg',
'mp3',
'wav',
'flac',
'aac',
'opus',
'mov',
// fonts
'woff2?',
'eot',
'ttf',
'otf',
// other
'webmanifest',
'pdf',
'txt',
// images
"apng",
"bmp",
"png",
"jpe?g",
"jfif",
"pjpeg",
"pjp",
"gif",
"svg",
"ico",
"webp",
"avif",
"cur",
"jxl",
// media
"mp4",
"webm",
"ogg",
"mp3",
"wav",
"flac",
"aac",
"opus",
"mov",
"m4a",
"vtt",
// fonts
"woff2?",
"eot",
"ttf",
"otf",
// other
"webmanifest",
"pdf",
"txt"
];
const DEFAULT_ASSETS_RE = new RegExp(`\\.(` + KNOWN_ASSET_TYPES.join('|') + `)(\\?.*)?$`);
const DEFAULT_ASSETS_RE = new RegExp(
`\\.(` + KNOWN_ASSET_TYPES.join("|") + `)(\\?.*)?$`
);
const DEP_VERSION_RE = /[?&](v=[\w.-]+)\b/;
const loopbackHosts = new Set([
'localhost',
'127.0.0.1',
'::1',
'0000:0000:0000:0000:0000:0000:0000:0001',
const loopbackHosts = /* @__PURE__ */ new Set([
"localhost",
"127.0.0.1",
"::1",
"0000:0000:0000:0000:0000:0000:0000:0001"
]);
const wildcardHosts = new Set([
'0.0.0.0',
'::',
'0000:0000:0000:0000:0000:0000:0000:0000',
const wildcardHosts = /* @__PURE__ */ new Set([
"0.0.0.0",
"::",
"0000:0000:0000:0000:0000:0000:0000:0000"
]);
const DEFAULT_DEV_PORT = 5173;
const DEFAULT_PREVIEW_PORT = 4173;
const DEFAULT_ASSETS_INLINE_LIMIT = 4096;
const defaultAllowedOrigins = /^https?:\/\/(?:(?:[^:]+\.)?localhost|127\.0\.0\.1|\[::1\])(?::\d+)?$/;
const METADATA_FILENAME = "_metadata.json";
const ERR_OPTIMIZE_DEPS_PROCESSING_ERROR = "ERR_OPTIMIZE_DEPS_PROCESSING_ERROR";
const ERR_FILE_NOT_FOUND_IN_OPTIMIZED_DEP_DIR = "ERR_FILE_NOT_FOUND_IN_OPTIMIZED_DEP_DIR";
export { CLIENT_DIR, CLIENT_ENTRY, CLIENT_PUBLIC_PATH, CSS_LANGS_RE, DEFAULT_ASSETS_RE, DEFAULT_CONFIG_FILES, DEFAULT_DEV_PORT, DEFAULT_EXTENSIONS, DEFAULT_MAIN_FIELDS, DEFAULT_PREVIEW_PORT, DEP_VERSION_RE, ENV_ENTRY, ENV_PUBLIC_PATH, ESBUILD_MODULES_TARGET, FS_PREFIX, JS_TYPES_RE, KNOWN_ASSET_TYPES, NULL_BYTE_PLACEHOLDER, OPTIMIZABLE_ENTRY_RE, SPECIAL_QUERY_RE, VALID_ID_PREFIX, VERSION, VITE_PACKAGE_DIR, loopbackHosts, wildcardHosts };
export { CLIENT_DIR, CLIENT_ENTRY, CLIENT_PUBLIC_PATH, CSS_LANGS_RE, DEFAULT_ASSETS_INLINE_LIMIT, DEFAULT_ASSETS_RE, DEFAULT_CLIENT_CONDITIONS, DEFAULT_CLIENT_MAIN_FIELDS, DEFAULT_CONFIG_FILES, DEFAULT_DEV_PORT, DEFAULT_PREVIEW_PORT, DEFAULT_SERVER_CONDITIONS, DEFAULT_SERVER_MAIN_FIELDS, DEP_VERSION_RE, DEV_PROD_CONDITION, ENV_ENTRY, ENV_PUBLIC_PATH, ERR_FILE_NOT_FOUND_IN_OPTIMIZED_DEP_DIR, ERR_OPTIMIZE_DEPS_PROCESSING_ERROR, ESBUILD_MODULES_TARGET, FS_PREFIX, JS_TYPES_RE, KNOWN_ASSET_TYPES, METADATA_FILENAME, OPTIMIZABLE_ENTRY_RE, ROLLUP_HOOKS, SPECIAL_QUERY_RE, VERSION, VITE_PACKAGE_DIR, defaultAllowedOrigins, loopbackHosts, wildcardHosts };

2407
node_modules/vite/dist/node/index.d.ts generated vendored

File diff suppressed because it is too large Load Diff

229
node_modules/vite/dist/node/index.js generated vendored
View File

@@ -1,160 +1,153 @@
export { parseAst, parseAstAsync } from 'rollup/parseAst';
import { i as isInNodeModules } from './chunks/dep-9A4-l-43.js';
export { b as build, e as buildErrorMessage, h as createFilter, k as createLogger, c as createServer, d as defineConfig, f as formatPostcssSourceMap, u as isFileServingAllowed, l as loadConfigFromFile, v as loadEnv, g as mergeAlias, m as mergeConfig, n as normalizePath, o as optimizeDeps, a as preprocessCSS, p as preview, r as resolveConfig, w as resolveEnvPrefix, q as searchForWorkspaceRoot, j as send, s as sortUserPlugins, t as transformWithEsbuild } from './chunks/dep-9A4-l-43.js';
export { VERSION as version } from './constants.js';
import { a as arraify, i as isInNodeModules } from './chunks/dep-Bid9ssRr.js';
export { B as BuildEnvironment, D as DevEnvironment, f as build, m as buildErrorMessage, g as createBuilder, F as createFilter, h as createIdResolver, I as createLogger, n as createRunnableDevEnvironment, c as createServer, y as createServerHotChannel, w as createServerModuleRunner, x as createServerModuleRunnerTransport, d as defineConfig, v as fetchModule, j as formatPostcssSourceMap, L as isFileLoadingAllowed, K as isFileServingAllowed, q as isRunnableDevEnvironment, l as loadConfigFromFile, M as loadEnv, E as mergeAlias, C as mergeConfig, z as moduleRunnerTransform, A as normalizePath, o as optimizeDeps, p as perEnvironmentPlugin, b as perEnvironmentState, k as preprocessCSS, e as preview, r as resolveConfig, N as resolveEnvPrefix, G as rollupVersion, u as runnerImport, J as searchForWorkspaceRoot, H as send, s as sortUserPlugins, t as transformWithEsbuild } from './chunks/dep-Bid9ssRr.js';
export { defaultAllowedOrigins, DEFAULT_CLIENT_CONDITIONS as defaultClientConditions, DEFAULT_CLIENT_MAIN_FIELDS as defaultClientMainFields, DEFAULT_SERVER_CONDITIONS as defaultServerConditions, DEFAULT_SERVER_MAIN_FIELDS as defaultServerMainFields, VERSION as version } from './constants.js';
export { version as esbuildVersion } from 'esbuild';
export { VERSION as rollupVersion } from 'rollup';
import 'node:fs';
import 'node:fs/promises';
import 'node:path';
import 'node:fs/promises';
import 'node:url';
import 'node:util';
import 'node:perf_hooks';
import 'node:module';
import 'tty';
import 'node:crypto';
import 'path';
import 'fs';
import 'events';
import 'assert';
import 'node:child_process';
import 'node:http';
import 'node:https';
import 'tty';
import 'util';
import 'net';
import 'events';
import 'url';
import 'http';
import 'stream';
import 'os';
import 'child_process';
import 'node:os';
import 'node:child_process';
import 'node:crypto';
import 'node:net';
import 'node:dns';
import 'crypto';
import 'module';
import 'node:assert';
import 'node:process';
import 'node:v8';
import 'vite/module-runner';
import 'node:buffer';
import 'querystring';
import 'module';
import 'node:readline';
import 'node:process';
import 'node:events';
import 'zlib';
import 'buffer';
import 'crypto';
import 'node:assert';
import 'node:v8';
import 'node:worker_threads';
import 'https';
import 'tls';
import 'zlib';
import 'buffer';
import 'assert';
import 'node:querystring';
import 'node:zlib';
import 'worker_threads';
// This file will be built for both ESM and CJS. Avoid relying on other modules as possible.
// copy from constants.ts
const CSS_LANGS_RE =
// eslint-disable-next-line regexp/no-unused-capturing-group
/\.(css|less|sass|scss|styl|stylus|pcss|postcss|sss)(?:$|\?)/;
const CSS_LANGS_RE = (
// eslint-disable-next-line regexp/no-unused-capturing-group
/\.(css|less|sass|scss|styl|stylus|pcss|postcss|sss)(?:$|\?)/
);
const isCSSRequest = (request) => CSS_LANGS_RE.test(request);
// Use splitVendorChunkPlugin() to get the same manualChunks strategy as Vite 2.7
// We don't recommend using this strategy as a general solution moving forward
// splitVendorChunk is a simple index/vendor strategy that was used in Vite
// until v2.8. It is exposed to let people continue to use it in case it was
// working well for their setups.
// The cache needs to be reset on buildStart for watch mode to work correctly
// Don't use this manualChunks strategy for ssr, lib mode, and 'umd' or 'iife'
class SplitVendorChunkCache {
cache;
constructor() {
this.cache = new Map();
}
reset() {
this.cache = new Map();
}
cache;
constructor() {
this.cache = /* @__PURE__ */ new Map();
}
reset() {
this.cache = /* @__PURE__ */ new Map();
}
}
function splitVendorChunk(options = {}) {
const cache = options.cache ?? new SplitVendorChunkCache();
return (id, { getModuleInfo }) => {
if (isInNodeModules(id) &&
!isCSSRequest(id) &&
staticImportedByEntry(id, getModuleInfo, cache.cache)) {
return 'vendor';
}
};
const cache = options.cache ?? new SplitVendorChunkCache();
return (id, { getModuleInfo }) => {
if (isInNodeModules(id) && !isCSSRequest(id) && staticImportedByEntry(id, getModuleInfo, cache.cache)) {
return "vendor";
}
};
}
function staticImportedByEntry(id, getModuleInfo, cache, importStack = []) {
if (cache.has(id)) {
return cache.get(id);
}
if (importStack.includes(id)) {
// circular deps!
cache.set(id, false);
return false;
}
const mod = getModuleInfo(id);
if (!mod) {
cache.set(id, false);
return false;
}
if (mod.isEntry) {
cache.set(id, true);
return true;
}
const someImporterIs = mod.importers.some((importer) => staticImportedByEntry(importer, getModuleInfo, cache, importStack.concat(id)));
cache.set(id, someImporterIs);
return someImporterIs;
if (cache.has(id)) {
return cache.get(id);
}
if (importStack.includes(id)) {
cache.set(id, false);
return false;
}
const mod = getModuleInfo(id);
if (!mod) {
cache.set(id, false);
return false;
}
if (mod.isEntry) {
cache.set(id, true);
return true;
}
const someImporterIs = mod.importers.some(
(importer) => staticImportedByEntry(
importer,
getModuleInfo,
cache,
importStack.concat(id)
)
);
cache.set(id, someImporterIs);
return someImporterIs;
}
function splitVendorChunkPlugin() {
const caches = [];
function createSplitVendorChunk(output, config) {
const cache = new SplitVendorChunkCache();
caches.push(cache);
const build = config.build ?? {};
const format = output?.format;
if (!build.ssr && !build.lib && format !== 'umd' && format !== 'iife') {
return splitVendorChunk({ cache });
}
const caches = [];
function createSplitVendorChunk(output, config) {
const cache = new SplitVendorChunkCache();
caches.push(cache);
const build = config.build ?? {};
const format = output.format;
if (!build.ssr && !build.lib && format !== "umd" && format !== "iife") {
return splitVendorChunk({ cache });
}
return {
name: 'vite:split-vendor-chunk',
config(config) {
let outputs = config?.build?.rollupOptions?.output;
if (outputs) {
outputs = Array.isArray(outputs) ? outputs : [outputs];
for (const output of outputs) {
const viteManualChunks = createSplitVendorChunk(output, config);
if (viteManualChunks) {
if (output.manualChunks) {
if (typeof output.manualChunks === 'function') {
const userManualChunks = output.manualChunks;
output.manualChunks = (id, api) => {
return userManualChunks(id, api) ?? viteManualChunks(id, api);
};
}
else {
// else, leave the object form of manualChunks untouched, as
// we can't safely replicate rollup handling.
// eslint-disable-next-line no-console
console.warn("(!) the `splitVendorChunk` plugin doesn't have any effect when using the object form of `build.rollupOptions.output.manualChunks`. Consider using the function form instead.");
}
}
else {
output.manualChunks = viteManualChunks;
}
}
}
}
else {
return {
build: {
rollupOptions: {
output: {
manualChunks: createSplitVendorChunk({}, config),
},
},
},
}
return {
name: "vite:split-vendor-chunk",
config(config) {
let outputs = config.build?.rollupOptions?.output;
if (outputs) {
outputs = arraify(outputs);
for (const output of outputs) {
const viteManualChunks = createSplitVendorChunk(output, config);
if (viteManualChunks) {
if (output.manualChunks) {
if (typeof output.manualChunks === "function") {
const userManualChunks = output.manualChunks;
output.manualChunks = (id, api) => {
return userManualChunks(id, api) ?? viteManualChunks(id, api);
};
} else {
console.warn(
"(!) the `splitVendorChunk` plugin doesn't have any effect when using the object form of `build.rollupOptions.output.manualChunks`. Consider using the function form instead."
);
}
} else {
output.manualChunks = viteManualChunks;
}
},
buildStart() {
caches.forEach((cache) => cache.reset());
},
};
}
}
} else {
return {
build: {
rollupOptions: {
output: {
manualChunks: createSplitVendorChunk({}, config)
}
}
}
};
}
},
buildStart() {
caches.forEach((cache) => cache.reset());
}
};
}
export { isCSSRequest, splitVendorChunk, splitVendorChunkPlugin };

288
node_modules/vite/dist/node/module-runner.d.ts generated vendored Normal file
View File

@@ -0,0 +1,288 @@
import { ModuleNamespace, ViteHotContext } from '../../types/hot.js';
import { Update, HotPayload } from '../../types/hmrPayload.js';
import { InferCustomEventPayload } from '../../types/customEvent.js';
import { N as NormalizedModuleRunnerTransport, E as ExternalFetchResult, V as ViteFetchResult, M as ModuleRunnerTransport, F as FetchFunctionOptions, a as FetchResult } from './moduleRunnerTransport.d-CXw_Ws6P.js';
export { b as ModuleRunnerTransportHandlers, c as createWebSocketModuleRunnerTransport } from './moduleRunnerTransport.d-CXw_Ws6P.js';
interface SourceMapLike {
version: number;
mappings?: string;
names?: string[];
sources?: string[];
sourcesContent?: string[];
}
declare class DecodedMap {
map: SourceMapLike;
_encoded: string;
_decoded: undefined | number[][][];
_decodedMemo: Stats;
url: string;
version: number;
names: string[];
resolvedSources: string[];
constructor(map: SourceMapLike, from: string);
}
interface Stats {
lastKey: number;
lastNeedle: number;
lastIndex: number;
}
type CustomListenersMap = Map<string, ((data: any) => void)[]>;
interface HotModule {
id: string;
callbacks: HotCallback[];
}
interface HotCallback {
deps: string[];
fn: (modules: Array<ModuleNamespace | undefined>) => void;
}
interface HMRLogger {
error(msg: string | Error): void;
debug(...msg: unknown[]): void;
}
declare class HMRClient {
logger: HMRLogger;
private transport;
private importUpdatedModule;
hotModulesMap: Map<string, HotModule>;
disposeMap: Map<string, (data: any) => void | Promise<void>>;
pruneMap: Map<string, (data: any) => void | Promise<void>>;
dataMap: Map<string, any>;
customListenersMap: CustomListenersMap;
ctxToListenersMap: Map<string, CustomListenersMap>;
constructor(logger: HMRLogger, transport: NormalizedModuleRunnerTransport, importUpdatedModule: (update: Update) => Promise<ModuleNamespace>);
notifyListeners<T extends string>(event: T, data: InferCustomEventPayload<T>): Promise<void>;
send(payload: HotPayload): void;
clear(): void;
prunePaths(paths: string[]): Promise<void>;
protected warnFailedUpdate(err: Error, path: string | string[]): void;
private updateQueue;
private pendingUpdateQueue;
/**
* buffer multiple hot updates triggered by the same src change
* so that they are invoked in the same order they were sent.
* (otherwise the order may be inconsistent because of the http request round trip)
*/
queueUpdate(payload: Update): Promise<void>;
private fetchUpdate;
}
interface DefineImportMetadata {
/**
* Imported names before being transformed to `ssrImportKey`
*
* import foo, { bar as baz, qux } from 'hello'
* => ['default', 'bar', 'qux']
*
* import * as namespace from 'world
* => undefined
*/
importedNames?: string[];
}
interface SSRImportMetadata extends DefineImportMetadata {
isDynamicImport?: boolean;
}
declare const ssrModuleExportsKey = "__vite_ssr_exports__";
declare const ssrImportKey = "__vite_ssr_import__";
declare const ssrDynamicImportKey = "__vite_ssr_dynamic_import__";
declare const ssrExportAllKey = "__vite_ssr_exportAll__";
declare const ssrImportMetaKey = "__vite_ssr_import_meta__";
interface ModuleRunnerDebugger {
(formatter: unknown, ...args: unknown[]): void;
}
declare class ModuleRunner {
options: ModuleRunnerOptions;
evaluator: ModuleEvaluator;
private debug?;
evaluatedModules: EvaluatedModules;
hmrClient?: HMRClient;
private readonly envProxy;
private readonly transport;
private readonly resetSourceMapSupport?;
private readonly concurrentModuleNodePromises;
private closed;
constructor(options: ModuleRunnerOptions, evaluator?: ModuleEvaluator, debug?: ModuleRunnerDebugger | undefined);
/**
* URL to execute. Accepts file path, server path or id relative to the root.
*/
import<T = any>(url: string): Promise<T>;
/**
* Clear all caches including HMR listeners.
*/
clearCache(): void;
/**
* Clears all caches, removes all HMR listeners, and resets source map support.
* This method doesn't stop the HMR connection.
*/
close(): Promise<void>;
/**
* Returns `true` if the runtime has been closed by calling `close()` method.
*/
isClosed(): boolean;
private processImport;
private isCircularModule;
private isCircularImport;
private cachedRequest;
private cachedModule;
private getModuleInformation;
protected directRequest(url: string, mod: EvaluatedModuleNode, _callstack: string[]): Promise<any>;
}
interface RetrieveFileHandler {
(path: string): string | null | undefined | false;
}
interface RetrieveSourceMapHandler {
(path: string): null | {
url: string;
map: any;
};
}
interface InterceptorOptions {
retrieveFile?: RetrieveFileHandler;
retrieveSourceMap?: RetrieveSourceMapHandler;
}
interface ModuleRunnerImportMeta extends ImportMeta {
url: string;
env: ImportMetaEnv;
hot?: ViteHotContext;
[key: string]: any;
}
interface ModuleRunnerContext {
[ssrModuleExportsKey]: Record<string, any>;
[ssrImportKey]: (id: string, metadata?: DefineImportMetadata) => Promise<any>;
[ssrDynamicImportKey]: (id: string, options?: ImportCallOptions) => Promise<any>;
[ssrExportAllKey]: (obj: any) => void;
[ssrImportMetaKey]: ModuleRunnerImportMeta;
}
interface ModuleEvaluator {
/**
* Number of prefixed lines in the transformed code.
*/
startOffset?: number;
/**
* Run code that was transformed by Vite.
* @param context Function context
* @param code Transformed code
* @param module The module node
*/
runInlinedModule(context: ModuleRunnerContext, code: string, module: Readonly<EvaluatedModuleNode>): Promise<any>;
/**
* Run externalized module.
* @param file File URL to the external module
*/
runExternalModule(file: string): Promise<any>;
}
type ResolvedResult = (ExternalFetchResult | ViteFetchResult) & {
url: string;
id: string;
};
type FetchFunction = (id: string, importer?: string, options?: FetchFunctionOptions) => Promise<FetchResult>;
interface ModuleRunnerHmr {
/**
* Configure HMR logger.
*/
logger?: false | HMRLogger;
}
interface ModuleRunnerOptions {
/**
* Root of the project
* @deprecated not used and to be removed
*/
root?: string;
/**
* A set of methods to communicate with the server.
*/
transport: ModuleRunnerTransport;
/**
* Configure how source maps are resolved. Prefers `node` if `process.setSourceMapsEnabled` is available.
* Otherwise it will use `prepareStackTrace` by default which overrides `Error.prepareStackTrace` method.
* You can provide an object to configure how file contents and source maps are resolved for files that were not processed by Vite.
*/
sourcemapInterceptor?: false | 'node' | 'prepareStackTrace' | InterceptorOptions;
/**
* Disable HMR or configure HMR options.
*
* @default true
*/
hmr?: boolean | ModuleRunnerHmr;
/**
* Custom module cache. If not provided, creates a separate module cache for each ModuleRunner instance.
*/
evaluatedModules?: EvaluatedModules;
}
interface ImportMetaEnv {
[key: string]: any;
BASE_URL: string;
MODE: string;
DEV: boolean;
PROD: boolean;
SSR: boolean;
}
declare class EvaluatedModuleNode {
id: string;
url: string;
importers: Set<string>;
imports: Set<string>;
evaluated: boolean;
meta: ResolvedResult | undefined;
promise: Promise<any> | undefined;
exports: any | undefined;
file: string;
map: DecodedMap | undefined;
constructor(id: string, url: string);
}
declare class EvaluatedModules {
readonly idToModuleMap: Map<string, EvaluatedModuleNode>;
readonly fileToModulesMap: Map<string, Set<EvaluatedModuleNode>>;
readonly urlToIdModuleMap: Map<string, EvaluatedModuleNode>;
/**
* Returns the module node by the resolved module ID. Usually, module ID is
* the file system path with query and/or hash. It can also be a virtual module.
*
* Module runner graph will have 1 to 1 mapping with the server module graph.
* @param id Resolved module ID
*/
getModuleById(id: string): EvaluatedModuleNode | undefined;
/**
* Returns all modules related to the file system path. Different modules
* might have different query parameters or hash, so it's possible to have
* multiple modules for the same file.
* @param file The file system path of the module
*/
getModulesByFile(file: string): Set<EvaluatedModuleNode> | undefined;
/**
* Returns the module node by the URL that was used in the import statement.
* Unlike module graph on the server, the URL is not resolved and is used as is.
* @param url Server URL that was used in the import statement
*/
getModuleByUrl(url: string): EvaluatedModuleNode | undefined;
/**
* Ensure that module is in the graph. If the module is already in the graph,
* it will return the existing module node. Otherwise, it will create a new
* module node and add it to the graph.
* @param id Resolved module ID
* @param url URL that was used in the import statement
*/
ensureModule(id: string, url: string): EvaluatedModuleNode;
invalidateModule(node: EvaluatedModuleNode): void;
/**
* Extracts the inlined source map from the module code and returns the decoded
* source map. If the source map is not inlined, it will return null.
* @param id Resolved module ID
*/
getModuleSourceMapById(id: string): DecodedMap | null;
clear(): void;
}
declare class ESModulesEvaluator implements ModuleEvaluator {
readonly startOffset: number;
runInlinedModule(context: ModuleRunnerContext, code: string): Promise<any>;
runExternalModule(filepath: string): Promise<any>;
}
export { ESModulesEvaluator, EvaluatedModuleNode, EvaluatedModules, type FetchFunction, FetchFunctionOptions, FetchResult, type HMRLogger, type InterceptorOptions, type ModuleEvaluator, ModuleRunner, type ModuleRunnerContext, type ModuleRunnerHmr, type ModuleRunnerImportMeta, type ModuleRunnerOptions, ModuleRunnerTransport, type ResolvedResult, type SSRImportMetadata, ssrDynamicImportKey, ssrExportAllKey, ssrImportKey, ssrImportMetaKey, ssrModuleExportsKey };

1298
node_modules/vite/dist/node/module-runner.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,86 @@
import { HotPayload } from '../../types/hmrPayload.js';
interface FetchFunctionOptions {
cached?: boolean;
startOffset?: number;
}
type FetchResult = CachedFetchResult | ExternalFetchResult | ViteFetchResult;
interface CachedFetchResult {
/**
* If module cached in the runner, we can just confirm
* it wasn't invalidated on the server side.
*/
cache: true;
}
interface ExternalFetchResult {
/**
* The path to the externalized module starting with file://,
* by default this will be imported via a dynamic "import"
* instead of being transformed by vite and loaded with vite runner
*/
externalize: string;
/**
* Type of the module. Will be used to determine if import statement is correct.
* For example, if Vite needs to throw an error if variable is not actually exported
*/
type: 'module' | 'commonjs' | 'builtin' | 'network';
}
interface ViteFetchResult {
/**
* Code that will be evaluated by vite runner
* by default this will be wrapped in an async function
*/
code: string;
/**
* File path of the module on disk.
* This will be resolved as import.meta.url/filename
* Will be equal to `null` for virtual modules
*/
file: string | null;
/**
* Module ID in the server module graph.
*/
id: string;
/**
* Module URL used in the import.
*/
url: string;
/**
* Invalidate module on the client side.
*/
invalidate: boolean;
}
type InvokeMethods = {
fetchModule: (id: string, importer?: string, options?: FetchFunctionOptions) => Promise<FetchResult>;
};
type ModuleRunnerTransportHandlers = {
onMessage: (data: HotPayload) => void;
onDisconnection: () => void;
};
/**
* "send and connect" or "invoke" must be implemented
*/
interface ModuleRunnerTransport {
connect?(handlers: ModuleRunnerTransportHandlers): Promise<void> | void;
disconnect?(): Promise<void> | void;
send?(data: HotPayload): Promise<void> | void;
invoke?(data: HotPayload): Promise<{
result: any;
} | {
error: any;
}>;
timeout?: number;
}
interface NormalizedModuleRunnerTransport {
connect?(onMessage?: (data: HotPayload) => void): Promise<void> | void;
disconnect?(): Promise<void> | void;
send(data: HotPayload): Promise<void>;
invoke<T extends keyof InvokeMethods>(name: T, data: Parameters<InvokeMethods[T]>): Promise<ReturnType<Awaited<InvokeMethods[T]>>>;
}
declare const createWebSocketModuleRunnerTransport: (options: {
createConnection: () => WebSocket;
pingInterval?: number;
}) => Required<Pick<ModuleRunnerTransport, "connect" | "disconnect" | "send">>;
export { type ExternalFetchResult as E, type FetchFunctionOptions as F, type ModuleRunnerTransport as M, type NormalizedModuleRunnerTransport as N, type ViteFetchResult as V, type FetchResult as a, type ModuleRunnerTransportHandlers as b, createWebSocketModuleRunnerTransport as c };