source: imaps-frontend/node_modules/vite/dist/node/chunks/dep-VqAwxVIc.js@ d565449

main
Last change on this file since d565449 was d565449, checked in by stefan toskovski <stefantoska84@…>, 4 weeks ago

Update repo after prototype presentation

  • Property mode set to 100644
File size: 22.7 KB
Line 
1import { B as getDefaultExportFromCjs } from './dep-mCdpKltl.js';
2import require$$0 from 'path';
3import require$$0__default from 'fs';
4import { l as lib } from './dep-IQS-Za7F.js';
5
6import { fileURLToPath as __cjs_fileURLToPath } from 'node:url';
7import { dirname as __cjs_dirname } from 'node:path';
8import { createRequire as __cjs_createRequire } from 'node:module';
9
10const __filename = __cjs_fileURLToPath(import.meta.url);
11const __dirname = __cjs_dirname(__filename);
12const require = __cjs_createRequire(import.meta.url);
13const __require = require;
14function _mergeNamespaces(n, m) {
15 for (var i = 0; i < m.length; i++) {
16 var e = m[i];
17 if (typeof e !== 'string' && !Array.isArray(e)) { for (var k in e) {
18 if (k !== 'default' && !(k in n)) {
19 n[k] = e[k];
20 }
21 } }
22 }
23 return n;
24}
25
26var formatImportPrelude$2 = function formatImportPrelude(layer, media, supports) {
27 const parts = [];
28
29 if (typeof layer !== "undefined") {
30 let layerParams = "layer";
31 if (layer) {
32 layerParams = `layer(${layer})`;
33 }
34
35 parts.push(layerParams);
36 }
37
38 if (typeof supports !== "undefined") {
39 parts.push(`supports(${supports})`);
40 }
41
42 if (typeof media !== "undefined") {
43 parts.push(media);
44 }
45
46 return parts.join(" ")
47};
48
49const formatImportPrelude$1 = formatImportPrelude$2;
50
51// Base64 encode an import with conditions
52// The order of conditions is important and is interleaved with cascade layer declarations
53// Each group of conditions and cascade layers needs to be interpreted in order
54// To achieve this we create a list of base64 encoded imports, where each import contains a stylesheet with another import.
55// Each import can define a single group of conditions and a single cascade layer.
56var base64EncodedImport = function base64EncodedConditionalImport(prelude, conditions) {
57 conditions.reverse();
58 const first = conditions.pop();
59 let params = `${prelude} ${formatImportPrelude$1(
60 first.layer,
61 first.media,
62 first.supports,
63 )}`;
64
65 for (const condition of conditions) {
66 params = `'data:text/css;base64,${Buffer.from(`@import ${params}`).toString(
67 "base64",
68 )}' ${formatImportPrelude$1(
69 condition.layer,
70 condition.media,
71 condition.supports,
72 )}`;
73 }
74
75 return params
76};
77
78const base64EncodedConditionalImport = base64EncodedImport;
79
80var applyConditions$1 = function applyConditions(bundle, atRule) {
81 bundle.forEach(stmt => {
82 if (
83 stmt.type === "charset" ||
84 stmt.type === "warning" ||
85 !stmt.conditions?.length
86 ) {
87 return
88 }
89
90 if (stmt.type === "import") {
91 stmt.node.params = base64EncodedConditionalImport(
92 stmt.fullUri,
93 stmt.conditions,
94 );
95 return
96 }
97
98 const { nodes } = stmt;
99 const { parent } = nodes[0];
100
101 const atRules = [];
102
103 // Convert conditions to at-rules
104 for (const condition of stmt.conditions) {
105 if (typeof condition.media !== "undefined") {
106 const mediaNode = atRule({
107 name: "media",
108 params: condition.media,
109 source: parent.source,
110 });
111
112 atRules.push(mediaNode);
113 }
114
115 if (typeof condition.supports !== "undefined") {
116 const supportsNode = atRule({
117 name: "supports",
118 params: `(${condition.supports})`,
119 source: parent.source,
120 });
121
122 atRules.push(supportsNode);
123 }
124
125 if (typeof condition.layer !== "undefined") {
126 const layerNode = atRule({
127 name: "layer",
128 params: condition.layer,
129 source: parent.source,
130 });
131
132 atRules.push(layerNode);
133 }
134 }
135
136 // Add nodes to AST
137 const outerAtRule = atRules.shift();
138 const innerAtRule = atRules.reduce((previous, next) => {
139 previous.append(next);
140 return next
141 }, outerAtRule);
142
143 parent.insertBefore(nodes[0], outerAtRule);
144
145 // remove nodes
146 nodes.forEach(node => {
147 node.parent = undefined;
148 });
149
150 // better output
151 nodes[0].raws.before = nodes[0].raws.before || "\n";
152
153 // wrap new rules with media query and/or layer at rule
154 innerAtRule.append(nodes);
155
156 stmt.type = "nodes";
157 stmt.nodes = [outerAtRule];
158 delete stmt.node;
159 });
160};
161
162var applyRaws$1 = function applyRaws(bundle) {
163 bundle.forEach((stmt, index) => {
164 if (index === 0) return
165
166 if (stmt.parent) {
167 const { before } = stmt.parent.node.raws;
168 if (stmt.type === "nodes") stmt.nodes[0].raws.before = before;
169 else stmt.node.raws.before = before;
170 } else if (stmt.type === "nodes") {
171 stmt.nodes[0].raws.before = stmt.nodes[0].raws.before || "\n";
172 }
173 });
174};
175
176var applyStyles$1 = function applyStyles(bundle, styles) {
177 styles.nodes = [];
178
179 // Strip additional statements.
180 bundle.forEach(stmt => {
181 if (["charset", "import"].includes(stmt.type)) {
182 stmt.node.parent = undefined;
183 styles.append(stmt.node);
184 } else if (stmt.type === "nodes") {
185 stmt.nodes.forEach(node => {
186 node.parent = undefined;
187 styles.append(node);
188 });
189 }
190 });
191};
192
193var readCache$1 = {exports: {}};
194
195var pify$2 = {exports: {}};
196
197var processFn = function (fn, P, opts) {
198 return function () {
199 var that = this;
200 var args = new Array(arguments.length);
201
202 for (var i = 0; i < arguments.length; i++) {
203 args[i] = arguments[i];
204 }
205
206 return new P(function (resolve, reject) {
207 args.push(function (err, result) {
208 if (err) {
209 reject(err);
210 } else if (opts.multiArgs) {
211 var results = new Array(arguments.length - 1);
212
213 for (var i = 1; i < arguments.length; i++) {
214 results[i - 1] = arguments[i];
215 }
216
217 resolve(results);
218 } else {
219 resolve(result);
220 }
221 });
222
223 fn.apply(that, args);
224 });
225 };
226};
227
228var pify$1 = pify$2.exports = function (obj, P, opts) {
229 if (typeof P !== 'function') {
230 opts = P;
231 P = Promise;
232 }
233
234 opts = opts || {};
235 opts.exclude = opts.exclude || [/.+Sync$/];
236
237 var filter = function (key) {
238 var match = function (pattern) {
239 return typeof pattern === 'string' ? key === pattern : pattern.test(key);
240 };
241
242 return opts.include ? opts.include.some(match) : !opts.exclude.some(match);
243 };
244
245 var ret = typeof obj === 'function' ? function () {
246 if (opts.excludeMain) {
247 return obj.apply(this, arguments);
248 }
249
250 return processFn(obj, P, opts).apply(this, arguments);
251 } : {};
252
253 return Object.keys(obj).reduce(function (ret, key) {
254 var x = obj[key];
255
256 ret[key] = typeof x === 'function' && filter(key) ? processFn(x, P, opts) : x;
257
258 return ret;
259 }, ret);
260};
261
262pify$1.all = pify$1;
263
264var pifyExports = pify$2.exports;
265
266var fs = require$$0__default;
267var path$3 = require$$0;
268var pify = pifyExports;
269
270var stat = pify(fs.stat);
271var readFile = pify(fs.readFile);
272var resolve = path$3.resolve;
273
274var cache = Object.create(null);
275
276function convert(content, encoding) {
277 if (Buffer.isEncoding(encoding)) {
278 return content.toString(encoding);
279 }
280 return content;
281}
282
283readCache$1.exports = function (path, encoding) {
284 path = resolve(path);
285
286 return stat(path).then(function (stats) {
287 var item = cache[path];
288
289 if (item && item.mtime.getTime() === stats.mtime.getTime()) {
290 return convert(item.content, encoding);
291 }
292
293 return readFile(path).then(function (data) {
294 cache[path] = {
295 mtime: stats.mtime,
296 content: data
297 };
298
299 return convert(data, encoding);
300 });
301 }).catch(function (err) {
302 cache[path] = null;
303 return Promise.reject(err);
304 });
305};
306
307readCache$1.exports.sync = function (path, encoding) {
308 path = resolve(path);
309
310 try {
311 var stats = fs.statSync(path);
312 var item = cache[path];
313
314 if (item && item.mtime.getTime() === stats.mtime.getTime()) {
315 return convert(item.content, encoding);
316 }
317
318 var data = fs.readFileSync(path);
319
320 cache[path] = {
321 mtime: stats.mtime,
322 content: data
323 };
324
325 return convert(data, encoding);
326 } catch (err) {
327 cache[path] = null;
328 throw err;
329 }
330
331};
332
333readCache$1.exports.get = function (path, encoding) {
334 path = resolve(path);
335 if (cache[path]) {
336 return convert(cache[path].content, encoding);
337 }
338 return null;
339};
340
341readCache$1.exports.clear = function () {
342 cache = Object.create(null);
343};
344
345var readCacheExports = readCache$1.exports;
346
347const anyDataURLRegexp = /^data:text\/css(?:;(base64|plain))?,/i;
348const base64DataURLRegexp = /^data:text\/css;base64,/i;
349const plainDataURLRegexp = /^data:text\/css;plain,/i;
350
351function isValid(url) {
352 return anyDataURLRegexp.test(url)
353}
354
355function contents(url) {
356 if (base64DataURLRegexp.test(url)) {
357 // "data:text/css;base64,".length === 21
358 return Buffer.from(url.slice(21), "base64").toString()
359 }
360
361 if (plainDataURLRegexp.test(url)) {
362 // "data:text/css;plain,".length === 20
363 return decodeURIComponent(url.slice(20))
364 }
365
366 // "data:text/css,".length === 14
367 return decodeURIComponent(url.slice(14))
368}
369
370var dataUrl = {
371 isValid,
372 contents,
373};
374
375const readCache = readCacheExports;
376const dataURL$1 = dataUrl;
377
378var loadContent$1 = function loadContent(filename) {
379 if (dataURL$1.isValid(filename)) {
380 return dataURL$1.contents(filename)
381 }
382
383 return readCache(filename, "utf-8")
384};
385
386// external tooling
387const valueParser = lib;
388
389// extended tooling
390const { stringify } = valueParser;
391
392var parseStatements$1 = function parseStatements(result, styles, conditions, from) {
393 const statements = [];
394 let nodes = [];
395
396 styles.each(node => {
397 let stmt;
398 if (node.type === "atrule") {
399 if (node.name === "import")
400 stmt = parseImport(result, node, conditions, from);
401 else if (node.name === "charset")
402 stmt = parseCharset(result, node, conditions, from);
403 }
404
405 if (stmt) {
406 if (nodes.length) {
407 statements.push({
408 type: "nodes",
409 nodes,
410 conditions: [...conditions],
411 from,
412 });
413 nodes = [];
414 }
415 statements.push(stmt);
416 } else nodes.push(node);
417 });
418
419 if (nodes.length) {
420 statements.push({
421 type: "nodes",
422 nodes,
423 conditions: [...conditions],
424 from,
425 });
426 }
427
428 return statements
429};
430
431function parseCharset(result, atRule, conditions, from) {
432 if (atRule.prev()) {
433 return result.warn("@charset must precede all other statements", {
434 node: atRule,
435 })
436 }
437 return {
438 type: "charset",
439 node: atRule,
440 conditions: [...conditions],
441 from,
442 }
443}
444
445function parseImport(result, atRule, conditions, from) {
446 let prev = atRule.prev();
447
448 // `@import` statements may follow other `@import` statements.
449 if (prev) {
450 do {
451 if (
452 prev.type === "comment" ||
453 (prev.type === "atrule" && prev.name === "import")
454 ) {
455 prev = prev.prev();
456 continue
457 }
458
459 break
460 } while (prev)
461 }
462
463 // All `@import` statements may be preceded by `@charset` or `@layer` statements.
464 // But the `@import` statements must be consecutive.
465 if (prev) {
466 do {
467 if (
468 prev.type === "comment" ||
469 (prev.type === "atrule" &&
470 (prev.name === "charset" || (prev.name === "layer" && !prev.nodes)))
471 ) {
472 prev = prev.prev();
473 continue
474 }
475
476 return result.warn(
477 "@import must precede all other statements (besides @charset or empty @layer)",
478 { node: atRule },
479 )
480 } while (prev)
481 }
482
483 if (atRule.nodes) {
484 return result.warn(
485 "It looks like you didn't end your @import statement correctly. " +
486 "Child nodes are attached to it.",
487 { node: atRule },
488 )
489 }
490
491 const params = valueParser(atRule.params).nodes;
492 const stmt = {
493 type: "import",
494 uri: "",
495 fullUri: "",
496 node: atRule,
497 conditions: [...conditions],
498 from,
499 };
500
501 let layer;
502 let media;
503 let supports;
504
505 for (let i = 0; i < params.length; i++) {
506 const node = params[i];
507
508 if (node.type === "space" || node.type === "comment") continue
509
510 if (node.type === "string") {
511 if (stmt.uri) {
512 return result.warn(`Multiple url's in '${atRule.toString()}'`, {
513 node: atRule,
514 })
515 }
516
517 if (!node.value) {
518 return result.warn(`Unable to find uri in '${atRule.toString()}'`, {
519 node: atRule,
520 })
521 }
522
523 stmt.uri = node.value;
524 stmt.fullUri = stringify(node);
525 continue
526 }
527
528 if (node.type === "function" && /^url$/i.test(node.value)) {
529 if (stmt.uri) {
530 return result.warn(`Multiple url's in '${atRule.toString()}'`, {
531 node: atRule,
532 })
533 }
534
535 if (!node.nodes?.[0]?.value) {
536 return result.warn(`Unable to find uri in '${atRule.toString()}'`, {
537 node: atRule,
538 })
539 }
540
541 stmt.uri = node.nodes[0].value;
542 stmt.fullUri = stringify(node);
543 continue
544 }
545
546 if (!stmt.uri) {
547 return result.warn(`Unable to find uri in '${atRule.toString()}'`, {
548 node: atRule,
549 })
550 }
551
552 if (
553 (node.type === "word" || node.type === "function") &&
554 /^layer$/i.test(node.value)
555 ) {
556 if (typeof layer !== "undefined") {
557 return result.warn(`Multiple layers in '${atRule.toString()}'`, {
558 node: atRule,
559 })
560 }
561
562 if (typeof supports !== "undefined") {
563 return result.warn(
564 `layers must be defined before support conditions in '${atRule.toString()}'`,
565 {
566 node: atRule,
567 },
568 )
569 }
570
571 if (node.nodes) {
572 layer = stringify(node.nodes);
573 } else {
574 layer = "";
575 }
576
577 continue
578 }
579
580 if (node.type === "function" && /^supports$/i.test(node.value)) {
581 if (typeof supports !== "undefined") {
582 return result.warn(
583 `Multiple support conditions in '${atRule.toString()}'`,
584 {
585 node: atRule,
586 },
587 )
588 }
589
590 supports = stringify(node.nodes);
591
592 continue
593 }
594
595 media = stringify(params.slice(i));
596 break
597 }
598
599 if (!stmt.uri) {
600 return result.warn(`Unable to find uri in '${atRule.toString()}'`, {
601 node: atRule,
602 })
603 }
604
605 if (
606 typeof media !== "undefined" ||
607 typeof layer !== "undefined" ||
608 typeof supports !== "undefined"
609 ) {
610 stmt.conditions.push({
611 layer,
612 media,
613 supports,
614 });
615 }
616
617 return stmt
618}
619
620// builtin tooling
621const path$2 = require$$0;
622
623// placeholder tooling
624let sugarss;
625
626var processContent$1 = function processContent(
627 result,
628 content,
629 filename,
630 options,
631 postcss,
632) {
633 const { plugins } = options;
634 const ext = path$2.extname(filename);
635
636 const parserList = [];
637
638 // SugarSS support:
639 if (ext === ".sss") {
640 if (!sugarss) {
641 /* c8 ignore next 3 */
642 try {
643 sugarss = __require('sugarss');
644 } catch {} // Ignore
645 }
646 if (sugarss)
647 return runPostcss(postcss, content, filename, plugins, [sugarss])
648 }
649
650 // Syntax support:
651 if (result.opts.syntax?.parse) {
652 parserList.push(result.opts.syntax.parse);
653 }
654
655 // Parser support:
656 if (result.opts.parser) parserList.push(result.opts.parser);
657 // Try the default as a last resort:
658 parserList.push(null);
659
660 return runPostcss(postcss, content, filename, plugins, parserList)
661};
662
663function runPostcss(postcss, content, filename, plugins, parsers, index) {
664 if (!index) index = 0;
665 return postcss(plugins)
666 .process(content, {
667 from: filename,
668 parser: parsers[index],
669 })
670 .catch(err => {
671 // If there's an error, try the next parser
672 index++;
673 // If there are no parsers left, throw it
674 if (index === parsers.length) throw err
675 return runPostcss(postcss, content, filename, plugins, parsers, index)
676 })
677}
678
679const path$1 = require$$0;
680
681const dataURL = dataUrl;
682const parseStatements = parseStatements$1;
683const processContent = processContent$1;
684const resolveId$1 = (id) => id;
685const formatImportPrelude = formatImportPrelude$2;
686
687async function parseStyles$1(
688 result,
689 styles,
690 options,
691 state,
692 conditions,
693 from,
694 postcss,
695) {
696 const statements = parseStatements(result, styles, conditions, from);
697
698 for (const stmt of statements) {
699 if (stmt.type !== "import" || !isProcessableURL(stmt.uri)) {
700 continue
701 }
702
703 if (options.filter && !options.filter(stmt.uri)) {
704 // rejected by filter
705 continue
706 }
707
708 await resolveImportId(result, stmt, options, state, postcss);
709 }
710
711 let charset;
712 const imports = [];
713 const bundle = [];
714
715 function handleCharset(stmt) {
716 if (!charset) charset = stmt;
717 // charsets aren't case-sensitive, so convert to lower case to compare
718 else if (
719 stmt.node.params.toLowerCase() !== charset.node.params.toLowerCase()
720 ) {
721 throw stmt.node.error(
722 `Incompatible @charset statements:
723 ${stmt.node.params} specified in ${stmt.node.source.input.file}
724 ${charset.node.params} specified in ${charset.node.source.input.file}`,
725 )
726 }
727 }
728
729 // squash statements and their children
730 statements.forEach(stmt => {
731 if (stmt.type === "charset") handleCharset(stmt);
732 else if (stmt.type === "import") {
733 if (stmt.children) {
734 stmt.children.forEach((child, index) => {
735 if (child.type === "import") imports.push(child);
736 else if (child.type === "charset") handleCharset(child);
737 else bundle.push(child);
738 // For better output
739 if (index === 0) child.parent = stmt;
740 });
741 } else imports.push(stmt);
742 } else if (stmt.type === "nodes") {
743 bundle.push(stmt);
744 }
745 });
746
747 return charset ? [charset, ...imports.concat(bundle)] : imports.concat(bundle)
748}
749
750async function resolveImportId(result, stmt, options, state, postcss) {
751 if (dataURL.isValid(stmt.uri)) {
752 // eslint-disable-next-line require-atomic-updates
753 stmt.children = await loadImportContent(
754 result,
755 stmt,
756 stmt.uri,
757 options,
758 state,
759 postcss,
760 );
761
762 return
763 } else if (dataURL.isValid(stmt.from.slice(-1))) {
764 // Data urls can't be used as a base url to resolve imports.
765 throw stmt.node.error(
766 `Unable to import '${stmt.uri}' from a stylesheet that is embedded in a data url`,
767 )
768 }
769
770 const atRule = stmt.node;
771 let sourceFile;
772 if (atRule.source?.input?.file) {
773 sourceFile = atRule.source.input.file;
774 }
775 const base = sourceFile
776 ? path$1.dirname(atRule.source.input.file)
777 : options.root;
778
779 const paths = [await options.resolve(stmt.uri, base, options, atRule)].flat();
780
781 // Ensure that each path is absolute:
782 const resolved = await Promise.all(
783 paths.map(file => {
784 return !path$1.isAbsolute(file)
785 ? resolveId$1(file)
786 : file
787 }),
788 );
789
790 // Add dependency messages:
791 resolved.forEach(file => {
792 result.messages.push({
793 type: "dependency",
794 plugin: "postcss-import",
795 file,
796 parent: sourceFile,
797 });
798 });
799
800 const importedContent = await Promise.all(
801 resolved.map(file => {
802 return loadImportContent(result, stmt, file, options, state, postcss)
803 }),
804 );
805
806 // Merge loaded statements
807 // eslint-disable-next-line require-atomic-updates
808 stmt.children = importedContent.flat().filter(x => !!x);
809}
810
811async function loadImportContent(
812 result,
813 stmt,
814 filename,
815 options,
816 state,
817 postcss,
818) {
819 const atRule = stmt.node;
820 const { conditions, from } = stmt;
821 const stmtDuplicateCheckKey = conditions
822 .map(condition =>
823 formatImportPrelude(condition.layer, condition.media, condition.supports),
824 )
825 .join(":");
826
827 if (options.skipDuplicates) {
828 // skip files already imported at the same scope
829 if (state.importedFiles[filename]?.[stmtDuplicateCheckKey]) {
830 return
831 }
832
833 // save imported files to skip them next time
834 if (!state.importedFiles[filename]) {
835 state.importedFiles[filename] = {};
836 }
837 state.importedFiles[filename][stmtDuplicateCheckKey] = true;
838 }
839
840 if (from.includes(filename)) {
841 return
842 }
843
844 const content = await options.load(filename, options);
845
846 if (content.trim() === "" && options.warnOnEmpty) {
847 result.warn(`${filename} is empty`, { node: atRule });
848 return
849 }
850
851 // skip previous imported files not containing @import rules
852 if (
853 options.skipDuplicates &&
854 state.hashFiles[content]?.[stmtDuplicateCheckKey]
855 ) {
856 return
857 }
858
859 const importedResult = await processContent(
860 result,
861 content,
862 filename,
863 options,
864 postcss,
865 );
866
867 const styles = importedResult.root;
868 result.messages = result.messages.concat(importedResult.messages);
869
870 if (options.skipDuplicates) {
871 const hasImport = styles.some(child => {
872 return child.type === "atrule" && child.name === "import"
873 });
874 if (!hasImport) {
875 // save hash files to skip them next time
876 if (!state.hashFiles[content]) {
877 state.hashFiles[content] = {};
878 }
879
880 state.hashFiles[content][stmtDuplicateCheckKey] = true;
881 }
882 }
883
884 // recursion: import @import from imported file
885 return parseStyles$1(
886 result,
887 styles,
888 options,
889 state,
890 conditions,
891 [...from, filename],
892 postcss,
893 )
894}
895
896function isProcessableURL(uri) {
897 // skip protocol base uri (protocol://url) or protocol-relative
898 if (/^(?:[a-z]+:)?\/\//i.test(uri)) {
899 return false
900 }
901
902 // check for fragment or query
903 try {
904 // needs a base to parse properly
905 const url = new URL(uri, "https://example.com");
906 if (url.search) {
907 return false
908 }
909 } catch {} // Ignore
910
911 return true
912}
913
914var parseStyles_1 = parseStyles$1;
915
916// builtin tooling
917const path = require$$0;
918
919// internal tooling
920const applyConditions = applyConditions$1;
921const applyRaws = applyRaws$1;
922const applyStyles = applyStyles$1;
923const loadContent = loadContent$1;
924const parseStyles = parseStyles_1;
925const resolveId = (id) => id;
926
927function AtImport(options) {
928 options = {
929 root: process.cwd(),
930 path: [],
931 skipDuplicates: true,
932 resolve: resolveId,
933 load: loadContent,
934 plugins: [],
935 addModulesDirectories: [],
936 warnOnEmpty: true,
937 ...options,
938 };
939
940 options.root = path.resolve(options.root);
941
942 // convert string to an array of a single element
943 if (typeof options.path === "string") options.path = [options.path];
944
945 if (!Array.isArray(options.path)) options.path = [];
946
947 options.path = options.path.map(p => path.resolve(options.root, p));
948
949 return {
950 postcssPlugin: "postcss-import",
951 async Once(styles, { result, atRule, postcss }) {
952 const state = {
953 importedFiles: {},
954 hashFiles: {},
955 };
956
957 if (styles.source?.input?.file) {
958 state.importedFiles[styles.source.input.file] = {};
959 }
960
961 if (options.plugins && !Array.isArray(options.plugins)) {
962 throw new Error("plugins option must be an array")
963 }
964
965 const bundle = await parseStyles(
966 result,
967 styles,
968 options,
969 state,
970 [],
971 [],
972 postcss,
973 );
974
975 applyRaws(bundle);
976 applyConditions(bundle, atRule);
977 applyStyles(bundle, styles);
978 },
979 }
980}
981
982AtImport.postcss = true;
983
984var postcssImport = AtImport;
985
986var index = /*@__PURE__*/getDefaultExportFromCjs(postcssImport);
987
988var index$1 = /*#__PURE__*/_mergeNamespaces({
989 __proto__: null,
990 default: index
991}, [postcssImport]);
992
993export { index$1 as i };
Note: See TracBrowser for help on using the repository browser.