diff --git a/CHANGELOG.md b/CHANGELOG.md
index e7929a669..b51071521 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Add support for `inline` option when defining `@theme` values ([#14095](https://github.com/tailwindlabs/tailwindcss/pull/14095))
- Add `inert` variant ([#14129](https://github.com/tailwindlabs/tailwindcss/pull/14129))
+- Add support for explicitly registering content paths using new `@source` at-rule ([#14078](https://github.com/tailwindlabs/tailwindcss/pull/14078))
## [4.0.0-alpha.18] - 2024-07-25
diff --git a/Cargo.lock b/Cargo.lock
index 1b67330dc..014a33c0d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -34,9 +34,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitflags"
-version = "2.3.1"
+version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6776fc96284a0bb647b615056fc496d1fe1644a7ab01829818a6d91cae888b84"
+checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "bstr"
@@ -154,6 +154,12 @@ dependencies = [
"syn 2.0.18",
]
+[[package]]
+name = "dunce"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
+
[[package]]
name = "either"
version = "1.8.1"
@@ -196,6 +202,83 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+[[package]]
+name = "futures"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-executor",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-channel"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78"
+dependencies = [
+ "futures-core",
+ "futures-sink",
+]
+
+[[package]]
+name = "futures-core"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d"
+
+[[package]]
+name = "futures-executor"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d"
+dependencies = [
+ "futures-core",
+ "futures-task",
+ "futures-util",
+]
+
+[[package]]
+name = "futures-io"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1"
+
+[[package]]
+name = "futures-sink"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5"
+
+[[package]]
+name = "futures-task"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004"
+
+[[package]]
+name = "futures-util"
+version = "0.3.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48"
+dependencies = [
+ "futures-channel",
+ "futures-core",
+ "futures-io",
+ "futures-sink",
+ "futures-task",
+ "memchr",
+ "pin-project-lite",
+ "pin-utils",
+ "slab",
+]
+
[[package]]
name = "fxhash"
version = "0.2.1"
@@ -205,6 +288,12 @@ dependencies = [
"byteorder",
]
+[[package]]
+name = "glob-match"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9985c9503b412198aa4197559e9a318524ebc4519c229bfa05a535828c950b9d"
+
[[package]]
name = "globset"
version = "0.4.10"
@@ -309,6 +398,16 @@ version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519"
+[[package]]
+name = "lock_api"
+version = "0.4.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
[[package]]
name = "log"
version = "0.4.18"
@@ -345,7 +444,7 @@ version = "2.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7f0a2e93526dd9c8c522d72a4d0c88678be8966fabe9fb8f2947fde6339b682"
dependencies = [
- "bitflags 2.3.1",
+ "bitflags 2.6.0",
"ctor",
"napi-derive",
"napi-sys",
@@ -418,9 +517,9 @@ dependencies = [
[[package]]
name = "once_cell"
-version = "1.17.2"
+version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9670a07f94779e00908f3e686eab508878ebb390ba6e604d3a284c00e8d0487b"
+checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92"
[[package]]
name = "overload"
@@ -428,6 +527,29 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
+[[package]]
+name = "parking_lot"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
+dependencies = [
+ "lock_api",
+ "parking_lot_core",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.9.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall 0.5.3",
+ "smallvec",
+ "windows-targets 0.52.6",
+]
+
[[package]]
name = "pin-project-lite"
version = "0.2.9"
@@ -435,10 +557,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
[[package]]
-name = "proc-macro2"
-version = "1.0.59"
+name = "pin-utils"
+version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b"
+checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.86"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77"
dependencies = [
"unicode-ident",
]
@@ -483,6 +611,15 @@ dependencies = [
"bitflags 1.3.2",
]
+[[package]]
+name = "redox_syscall"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4"
+dependencies = [
+ "bitflags 2.6.0",
+]
+
[[package]]
name = "regex"
version = "1.8.3"
@@ -538,12 +675,27 @@ dependencies = [
"winapi-util",
]
+[[package]]
+name = "scc"
+version = "2.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "05ccfb12511cdb770157ace92d7dda771e498445b78f9886e8cdbc5140a4eced"
+dependencies = [
+ "sdd",
+]
+
[[package]]
name = "scopeguard"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+[[package]]
+name = "sdd"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "177258b64c0faaa9ffd3c65cd3262c2bc7e2588dbbd9c1641d0346145c1bbda8"
+
[[package]]
name = "semver"
version = "1.0.17"
@@ -556,6 +708,31 @@ version = "1.0.163"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2113ab51b87a539ae008b5c6c02dc020ffa39afd2d83cffcb3f4eb2722cebec2"
+[[package]]
+name = "serial_test"
+version = "3.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d"
+dependencies = [
+ "futures",
+ "log",
+ "once_cell",
+ "parking_lot",
+ "scc",
+ "serial_test_derive",
+]
+
+[[package]]
+name = "serial_test_derive"
+version = "3.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn 2.0.18",
+]
+
[[package]]
name = "sharded-slab"
version = "0.1.4"
@@ -565,6 +742,15 @@ dependencies = [
"lazy_static",
]
+[[package]]
+name = "slab"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
+dependencies = [
+ "autocfg",
+]
+
[[package]]
name = "smallvec"
version = "1.10.0"
@@ -610,12 +796,15 @@ version = "0.1.0"
dependencies = [
"bstr",
"crossbeam",
+ "dunce",
"fxhash",
+ "glob-match",
"globwalk",
"ignore",
"lazy_static",
"log",
"rayon",
+ "serial_test",
"tempfile",
"tracing",
"tracing-subscriber",
@@ -630,7 +819,7 @@ checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998"
dependencies = [
"cfg-if",
"fastrand",
- "redox_syscall",
+ "redox_syscall 0.3.5",
"rustix",
"windows-sys 0.45.0",
]
@@ -814,6 +1003,22 @@ dependencies = [
"windows_x86_64_msvc 0.48.0",
]
+[[package]]
+name = "windows-targets"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
+dependencies = [
+ "windows_aarch64_gnullvm 0.52.6",
+ "windows_aarch64_msvc 0.52.6",
+ "windows_i686_gnu 0.52.6",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc 0.52.6",
+ "windows_x86_64_gnu 0.52.6",
+ "windows_x86_64_gnullvm 0.52.6",
+ "windows_x86_64_msvc 0.52.6",
+]
+
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.42.2"
@@ -826,6 +1031,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91ae572e1b79dba883e0d315474df7305d12f569b400fcf90581b06062f7e1bc"
+[[package]]
+name = "windows_aarch64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
+
[[package]]
name = "windows_aarch64_msvc"
version = "0.42.2"
@@ -838,6 +1049,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2ef27e0d7bdfcfc7b868b317c1d32c641a6fe4629c171b8928c7b08d98d7cf3"
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
+
[[package]]
name = "windows_i686_gnu"
version = "0.42.2"
@@ -850,6 +1067,18 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "622a1962a7db830d6fd0a69683c80a18fda201879f0f447f065a3b7467daa241"
+[[package]]
+name = "windows_i686_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
+
[[package]]
name = "windows_i686_msvc"
version = "0.42.2"
@@ -862,6 +1091,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4542c6e364ce21bf45d69fdd2a8e455fa38d316158cfd43b3ac1c5b1b19f8e00"
+[[package]]
+name = "windows_i686_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
+
[[package]]
name = "windows_x86_64_gnu"
version = "0.42.2"
@@ -874,6 +1109,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca2b8a661f7628cbd23440e50b05d705db3686f894fc9580820623656af974b1"
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
+
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.2"
@@ -886,6 +1127,12 @@ version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7896dbc1f41e08872e9d5e8f8baa8fdd2677f29468c4e156210174edc7f7b953"
+[[package]]
+name = "windows_x86_64_gnullvm"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
+
[[package]]
name = "windows_x86_64_msvc"
version = "0.42.2"
@@ -897,3 +1144,9 @@ name = "windows_x86_64_msvc"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a515f5799fe4961cb532f983ce2b23082366b898e52ffbce459c86f67c8378a"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.52.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
diff --git a/crates/node/src/lib.rs b/crates/node/src/lib.rs
index a7bfe0532..54860da97 100644
--- a/crates/node/src/lib.rs
+++ b/crates/node/src/lib.rs
@@ -1,5 +1,5 @@
use napi::bindgen_prelude::{FromNapiValue, ToNapiValue};
-use std::path::PathBuf;
+use std::{collections::HashSet, path::PathBuf};
#[macro_use]
extern crate napi_derive;
@@ -22,50 +22,104 @@ impl From for tailwindcss_oxide::ChangedContent {
}
#[derive(Debug, Clone)]
-#[napi(object)]
+#[napi]
pub struct ScanResult {
+ // Private information necessary for incremental rebuilds. Note: these fields are not exposed
+ // to JS
+ base: Option,
+ sources: Vec,
+
+ // Public API:
pub globs: Vec,
pub files: Vec,
pub candidates: Vec,
}
+#[napi]
+impl ScanResult {
+ #[napi]
+ pub fn scan_files(&self, input: Vec) -> Vec {
+ let result = tailwindcss_oxide::scan_dir(tailwindcss_oxide::ScanOptions {
+ base: self.base.clone(),
+ sources: self.sources.clone().into_iter().map(Into::into).collect(),
+ });
+
+ let mut unique_candidates: HashSet = HashSet::from_iter(result.candidates);
+ let candidates_from_files: HashSet = HashSet::from_iter(tailwindcss_oxide::scan_files(
+ input.into_iter().map(Into::into).collect(),
+ IO::Parallel as u8 | Parsing::Parallel as u8,
+ ));
+
+ unique_candidates.extend(candidates_from_files);
+
+ unique_candidates
+ .into_iter()
+ .map(|x| x.to_string())
+ .collect()
+ }
+}
+
#[derive(Debug, Clone)]
#[napi(object)]
pub struct GlobEntry {
pub base: String,
- pub glob: String,
+ pub pattern: String,
+}
+
+impl From for tailwindcss_oxide::GlobEntry {
+ fn from(glob: GlobEntry) -> Self {
+ tailwindcss_oxide::GlobEntry {
+ base: glob.base,
+ pattern: glob.pattern,
+ }
+ }
+}
+
+impl From for GlobEntry {
+ fn from(glob: tailwindcss_oxide::GlobEntry) -> Self {
+ GlobEntry {
+ base: glob.base,
+ pattern: glob.pattern,
+ }
+ }
}
#[derive(Debug, Clone)]
#[napi(object)]
pub struct ScanOptions {
- pub base: String,
- pub globs: Option,
+ /// Base path to start scanning from
+ pub base: Option,
+ /// Glob sources
+ pub sources: Option>,
}
#[napi]
pub fn clear_cache() {
- tailwindcss_oxide::clear_cache();
+ tailwindcss_oxide::clear_cache();
}
#[napi]
pub fn scan_dir(args: ScanOptions) -> ScanResult {
let result = tailwindcss_oxide::scan_dir(tailwindcss_oxide::ScanOptions {
- base: args.base,
- globs: args.globs.unwrap_or(false),
+ base: args.base.clone(),
+ sources: args
+ .sources
+ .clone()
+ .unwrap_or_default()
+ .into_iter()
+ .map(Into::into)
+ .collect(),
});
ScanResult {
+ // Private
+ base: args.base,
+ sources: args.sources.unwrap_or_default(),
+
+ // Public
files: result.files,
candidates: result.candidates,
- globs: result
- .globs
- .into_iter()
- .map(|g| GlobEntry {
- base: g.base,
- glob: g.glob,
- })
- .collect(),
+ globs: result.globs.into_iter().map(Into::into).collect(),
}
}
diff --git a/crates/oxide/Cargo.toml b/crates/oxide/Cargo.toml
index 3629f57f5..be6525056 100644
--- a/crates/oxide/Cargo.toml
+++ b/crates/oxide/Cargo.toml
@@ -15,6 +15,9 @@ tracing-subscriber = { version = "0.3.16", features = ["env-filter"] }
walkdir = "2.3.3"
ignore = "0.4.20"
lazy_static = "1.4.0"
+glob-match = "0.2.1"
+serial_test = "3.1.1"
+dunce = "1.0.5"
[dev-dependencies]
tempfile = "3.5.0"
diff --git a/crates/oxide/src/glob.rs b/crates/oxide/src/glob.rs
index 6d688f421..d74b60b4d 100644
--- a/crates/oxide/src/glob.rs
+++ b/crates/oxide/src/glob.rs
@@ -1,11 +1,13 @@
+use glob_match::glob_match;
use std::iter;
use std::path::{Path, PathBuf};
+use crate::GlobEntry;
+
pub fn fast_glob(
- base_path: &Path,
- patterns: &Vec,
+ patterns: &Vec,
) -> Result, std::io::Error> {
- Ok(get_fast_patterns(base_path, patterns)
+ Ok(get_fast_patterns(patterns)
.into_iter()
.flat_map(|(base_path, patterns)| {
globwalk::GlobWalkerBuilder::from_patterns(base_path, &patterns)
@@ -40,10 +42,13 @@ pub fn fast_glob(
/// tailwind --pwd ./project/pages --content "**/*.js"
/// tailwind --pwd ./project/components --content "**/*.js"
/// ```
-fn get_fast_patterns(base_path: &Path, patterns: &Vec) -> Vec<(PathBuf, Vec)> {
+pub fn get_fast_patterns(patterns: &Vec) -> Vec<(PathBuf, Vec)> {
let mut optimized_patterns: Vec<(PathBuf, Vec)> = vec![];
for pattern in patterns {
+ let base_path = PathBuf::from(&pattern.base);
+ let pattern = &pattern.pattern;
+
let is_negated = pattern.starts_with('!');
let mut pattern = pattern.clone();
if is_negated {
@@ -54,13 +59,13 @@ fn get_fast_patterns(base_path: &Path, patterns: &Vec) -> Vec<(PathBuf,
if folders.len() <= 1 {
// No paths we can simplify, so let's use it as-is.
- optimized_patterns.push((base_path.to_path_buf(), vec![pattern]));
+ optimized_patterns.push((base_path, vec![pattern]));
} else {
// We do have folders because `/` exists. Let's try to simplify the globs!
// Safety: We know that the length is greater than 1, so we can safely unwrap.
let file_pattern = folders.pop().unwrap();
let all_folders = folders.clone();
- let mut temp_paths = vec![base_path.to_path_buf()];
+ let mut temp_paths = vec![base_path];
let mut bail = false;
@@ -131,6 +136,14 @@ fn get_fast_patterns(base_path: &Path, patterns: &Vec) -> Vec<(PathBuf,
optimized_patterns
}
+pub fn path_matches_globs(path: &Path, globs: &[GlobEntry]) -> bool {
+ let path = path.to_string_lossy();
+
+ globs
+ .iter()
+ .any(|g| glob_match(&format!("{}/{}", g.base, g.pattern), &path))
+}
+
/// Given this input: a-{b,c}-d-{e,f}
/// We will get:
/// [
@@ -228,11 +241,15 @@ fn expand_braces(input: &str) -> Vec {
#[cfg(test)]
mod tests {
use super::get_fast_patterns;
+ use crate::GlobEntry;
use std::path::PathBuf;
#[test]
fn it_should_keep_globs_that_start_with_file_wildcards_as_is() {
- let actual = get_fast_patterns(&PathBuf::from("/projects"), &vec!["*.html".to_string()]);
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "*.html".to_string(),
+ }]);
let expected = vec![(PathBuf::from("/projects"), vec!["*.html".to_string()])];
assert_eq!(actual, expected,);
@@ -240,7 +257,11 @@ mod tests {
#[test]
fn it_should_keep_globs_that_start_with_folder_wildcards_as_is() {
- let actual = get_fast_patterns(&PathBuf::from("/projects"), &vec!["**/*.html".to_string()]);
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "**/*.html".to_string(),
+ }]);
+
let expected = vec![(PathBuf::from("/projects"), vec!["**/*.html".to_string()])];
assert_eq!(actual, expected,);
@@ -248,10 +269,10 @@ mod tests {
#[test]
fn it_should_move_the_starting_folder_to_the_path() {
- let actual = get_fast_patterns(
- &PathBuf::from("/projects"),
- &vec!["example/*.html".to_string()],
- );
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "example/*.html".to_string(),
+ }]);
let expected = vec![(
PathBuf::from("/projects/example"),
vec!["*.html".to_string()],
@@ -262,10 +283,10 @@ mod tests {
#[test]
fn it_should_move_the_starting_folders_to_the_path() {
- let actual = get_fast_patterns(
- &PathBuf::from("/projects"),
- &vec!["example/other/*.html".to_string()],
- );
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "example/other/*.html".to_string(),
+ }]);
let expected = vec![(
PathBuf::from("/projects/example/other"),
vec!["*.html".to_string()],
@@ -276,10 +297,11 @@ mod tests {
#[test]
fn it_should_branch_expandable_folders() {
- let actual = get_fast_patterns(
- &PathBuf::from("/projects"),
- &vec!["{foo,bar}/*.html".to_string()],
- );
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "{foo,bar}/*.html".to_string(),
+ }]);
+
let expected = vec![
(PathBuf::from("/projects/foo"), vec!["*.html".to_string()]),
(PathBuf::from("/projects/bar"), vec!["*.html".to_string()]),
@@ -290,10 +312,10 @@ mod tests {
#[test]
fn it_should_expand_multiple_expansions_in_the_same_folder() {
- let actual = get_fast_patterns(
- &PathBuf::from("/projects"),
- &vec!["a-{b,c}-d-{e,f}-g/*.html".to_string()],
- );
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "a-{b,c}-d-{e,f}-g/*.html".to_string(),
+ }]);
let expected = vec![
(
PathBuf::from("/projects/a-b-d-e-g"),
@@ -318,10 +340,10 @@ mod tests {
#[test]
fn multiple_expansions_per_folder_starting_at_the_root() {
- let actual = get_fast_patterns(
- &PathBuf::from("/projects"),
- &vec!["{a,b}-c-{d,e}-f/{b,c}-d-{e,f}-g/*.html".to_string()],
- );
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "{a,b}-c-{d,e}-f/{b,c}-d-{e,f}-g/*.html".to_string(),
+ }]);
let expected = vec![
(
PathBuf::from("/projects/a-c-d-f/b-d-e-g"),
@@ -394,10 +416,11 @@ mod tests {
#[test]
fn it_should_stop_expanding_once_we_hit_a_wildcard() {
- let actual = get_fast_patterns(
- &PathBuf::from("/projects"),
- &vec!["{foo,bar}/example/**/{baz,qux}/*.html".to_string()],
- );
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "{foo,bar}/example/**/{baz,qux}/*.html".to_string(),
+ }]);
+
let expected = vec![
(
PathBuf::from("/projects/foo/example"),
@@ -414,10 +437,10 @@ mod tests {
#[test]
fn it_should_keep_the_negation_symbol_for_all_new_patterns() {
- let actual = get_fast_patterns(
- &PathBuf::from("/projects"),
- &vec!["!{foo,bar}/*.html".to_string()],
- );
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "!{foo,bar}/*.html".to_string(),
+ }]);
let expected = vec![
(PathBuf::from("/projects/foo"), vec!["!*.html".to_string()]),
(PathBuf::from("/projects/bar"), vec!["!*.html".to_string()]),
@@ -428,10 +451,10 @@ mod tests {
#[test]
fn it_should_expand_a_complex_example() {
- let actual = get_fast_patterns(
- &PathBuf::from("/projects"),
- &vec!["a/{b,c}/d/{e,f}/g/*.html".to_string()],
- );
+ let actual = get_fast_patterns(&vec![GlobEntry {
+ base: "/projects".to_string(),
+ pattern: "a/{b,c}/d/{e,f}/g/*.html".to_string(),
+ }]);
let expected = vec![
(
PathBuf::from("/projects/a/b/d/e/g"),
diff --git a/crates/oxide/src/lib.rs b/crates/oxide/src/lib.rs
index ae49d0c91..ca8593e35 100644
--- a/crates/oxide/src/lib.rs
+++ b/crates/oxide/src/lib.rs
@@ -2,6 +2,8 @@ use crate::parser::Extractor;
use bstr::ByteSlice;
use cache::Cache;
use fxhash::FxHashSet;
+use glob::fast_glob;
+use glob::get_fast_patterns;
use ignore::DirEntry;
use ignore::WalkBuilder;
use lazy_static::lazy_static;
@@ -39,8 +41,10 @@ pub struct ChangedContent {
#[derive(Debug, Clone)]
pub struct ScanOptions {
- pub base: String,
- pub globs: bool,
+ /// Base path to start scanning from
+ pub base: Option,
+ /// Glob sources
+ pub sources: Vec,
}
#[derive(Debug, Clone)]
@@ -53,7 +57,7 @@ pub struct ScanResult {
#[derive(Debug, Clone)]
pub struct GlobEntry {
pub base: String,
- pub glob: String,
+ pub pattern: String,
}
pub fn clear_cache() {
@@ -64,16 +68,61 @@ pub fn clear_cache() {
pub fn scan_dir(opts: ScanOptions) -> ScanResult {
init_tracing();
- let root = Path::new(&opts.base);
+ let (mut files, mut globs) = match opts.base {
+ Some(base) => {
+ // Only enable auto content detection when `base` is provided.
+ let base = Path::new(&base);
+ let (files, dirs) = resolve_files(base);
+ let globs = resolve_globs(base, dirs);
- let (files, dirs) = resolve_files(root);
-
- let globs = if opts.globs {
- resolve_globs(root, dirs)
- } else {
- vec![]
+ (files, globs)
+ }
+ None => (vec![], vec![]),
};
+ // If we have additional sources, then we have to resolve them as well.
+ if !opts.sources.is_empty() {
+ let resolved_files: Vec<_> = match fast_glob(&opts.sources) {
+ Ok(matches) => matches
+ .filter_map(|x| dunce::canonicalize(&x).ok())
+ .collect(),
+ Err(err) => {
+ event!(tracing::Level::ERROR, "Failed to resolve glob: {:?}", err);
+ vec![]
+ }
+ };
+
+ files.extend(resolved_files);
+
+ let optimized_incoming_globs = get_fast_patterns(&opts.sources)
+ .iter()
+ .flat_map(|(root, globs)| {
+ globs.iter().filter_map(|glob| {
+ let root = match dunce::canonicalize(root.clone()) {
+ Ok(root) => root,
+ Err(error) => {
+ event!(
+ tracing::Level::ERROR,
+ "Failed to canonicalize base path {:?}",
+ error
+ );
+ return None;
+ }
+ };
+
+ let base = root.display().to_string();
+ let glob = glob.to_string();
+ Some(GlobEntry {
+ base,
+ pattern: glob,
+ })
+ })
+ })
+ .collect::>();
+
+ globs.extend(optimized_incoming_globs);
+ }
+
let mut cache = GLOBAL_CACHE.lock().unwrap();
let modified_files = cache.find_modified_files(&files);
@@ -259,12 +308,12 @@ fn resolve_globs(root: &Path, dirs: Vec) -> Vec {
// Build the globs for all globable directories.
let shallow_globs = shallow_globable_directories.iter().map(|path| GlobEntry {
base: path.display().to_string(),
- glob: format!("*/*.{{{}}}", extension_list),
+ pattern: format!("*/*.{{{}}}", extension_list),
});
let deep_globs = deep_globable_directories.iter().map(|path| GlobEntry {
base: path.display().to_string(),
- glob: format!("**/*.{{{}}}", extension_list),
+ pattern: format!("**/*.{{{}}}", extension_list),
});
shallow_globs.chain(deep_globs).collect::>()
diff --git a/crates/oxide/tests/auto_content.rs b/crates/oxide/tests/scan_dir.rs
similarity index 86%
rename from crates/oxide/tests/auto_content.rs
rename to crates/oxide/tests/scan_dir.rs
index 68fb809f9..7f875ea4a 100644
--- a/crates/oxide/tests/auto_content.rs
+++ b/crates/oxide/tests/scan_dir.rs
@@ -1,12 +1,19 @@
#[cfg(test)]
-mod auto_content {
+mod scan_dir {
+ use serial_test::serial;
use std::process::Command;
use std::{fs, path};
use tailwindcss_oxide::*;
use tempfile::tempdir;
- fn scan(paths_with_content: &[(&str, Option<&str>)]) -> (Vec, Vec) {
+ fn scan_with_globs(
+ paths_with_content: &[(&str, Option<&str>)],
+ globs: Vec<&str>,
+ ) -> (Vec, Vec) {
+ // Ensure that every test truly runs in isolation without any cache
+ clear_cache();
+
// Create a temporary working directory
let dir = tempdir().unwrap().into_path();
@@ -15,8 +22,8 @@ mod auto_content {
// Create the necessary files
for (path, contents) in paths_with_content {
- // Ensure we use the right path seperator for the current platform
- let path = dir.join(path.replace("/", path::MAIN_SEPARATOR.to_string().as_str()));
+ // Ensure we use the right path separator for the current platform
+ let path = dir.join(path.replace('/', path::MAIN_SEPARATOR.to_string().as_str()));
let parent = path.parent().unwrap();
if !parent.exists() {
fs::create_dir_all(parent).unwrap();
@@ -32,8 +39,14 @@ mod auto_content {
// Resolve all content paths for the (temporary) current working directory
let result = scan_dir(ScanOptions {
- base: base.clone(),
- globs: true,
+ base: Some(base.clone()),
+ sources: globs
+ .iter()
+ .map(|x| GlobEntry {
+ base: base.clone(),
+ pattern: x.to_string(),
+ })
+ .collect(),
});
let mut paths: Vec<_> = result
@@ -47,7 +60,7 @@ mod auto_content {
"{}{}{}",
glob.base,
path::MAIN_SEPARATOR,
- glob.glob
+ glob.pattern
));
}
@@ -57,7 +70,7 @@ mod auto_content {
let parent_dir = format!("{}{}", &base.to_string(), path::MAIN_SEPARATOR);
x.replace(&parent_dir, "")
// Normalize paths to use unix style separators
- .replace("\\", "/")
+ .replace('\\', "/")
})
.collect();
@@ -68,11 +81,16 @@ mod auto_content {
(paths, result.candidates)
}
+ fn scan(paths_with_content: &[(&str, Option<&str>)]) -> (Vec, Vec) {
+ scan_with_globs(paths_with_content, vec![])
+ }
+
fn test(paths_with_content: &[(&str, Option<&str>)]) -> Vec {
scan(paths_with_content).0
}
#[test]
+ #[serial]
fn it_should_work_with_a_set_of_root_files() {
let globs = test(&[
("index.html", None),
@@ -84,6 +102,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_work_with_a_set_of_root_files_and_ignore_ignored_files() {
let globs = test(&[
(".gitignore", Some("b.html")),
@@ -96,6 +115,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_list_all_files_in_the_public_folder_explicitly() {
let globs = test(&[
("index.html", None),
@@ -115,6 +135,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_list_nested_folders_explicitly_in_the_public_folder() {
let globs = test(&[
("index.html", None),
@@ -144,6 +165,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_list_all_files_in_the_public_folder_explicitly_except_ignored_files() {
let globs = test(&[
(".gitignore", Some("public/b.html\na.html")),
@@ -156,6 +178,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_use_a_glob_for_top_level_folders() {
let globs = test(&[
("index.html", None),
@@ -173,6 +196,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_ignore_binary_files() {
let globs = test(&[
("index.html", None),
@@ -184,6 +208,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_ignore_known_extensions() {
let globs = test(&[
("index.html", None),
@@ -195,6 +220,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_ignore_known_files() {
let globs = test(&[
("index.html", None),
@@ -205,6 +231,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_ignore_and_expand_nested_ignored_folders() {
let globs = test(&[
// Explicitly listed root files
@@ -291,6 +318,7 @@ mod auto_content {
}
#[test]
+ #[serial]
fn it_should_scan_for_utilities() {
let mut ignores = String::new();
ignores.push_str("# md:font-bold\n");
@@ -315,4 +343,37 @@ mod auto_content {
vec!["condition", "div", "font-bold", "md:flex", "px-4"]
);
}
+
+ #[test]
+ #[serial]
+ fn it_should_scan_content_paths() {
+ let candidates = scan_with_globs(
+ &[
+ // We know that `.styl` extensions are ignored, so they are not covered by auto content
+ // detection.
+ ("foo.styl", Some("content-['foo.styl']")),
+ ],
+ vec!["*.styl"],
+ )
+ .1;
+
+ assert_eq!(candidates, vec!["content-['foo.styl']"]);
+ }
+
+ #[test]
+ #[serial]
+ fn it_should_scan_content_paths_even_when_they_are_git_ignored() {
+ let candidates = scan_with_globs(
+ &[
+ (".gitignore", Some("foo.styl")),
+ // We know that `.styl` extensions are ignored, so they are not covered by auto content
+ // detection.
+ ("foo.styl", Some("content-['foo.styl']")),
+ ],
+ vec!["*.styl"],
+ )
+ .1;
+
+ assert_eq!(candidates, vec!["content-['foo.styl']"]);
+ }
}
diff --git a/integrations/cli/index.test.ts b/integrations/cli/index.test.ts
new file mode 100644
index 000000000..7ee7ea689
--- /dev/null
+++ b/integrations/cli/index.test.ts
@@ -0,0 +1,142 @@
+import path from 'node:path'
+import { candidate, css, html, js, json, test, yaml } from '../utils'
+
+test(
+ 'production build',
+ {
+ fs: {
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': json`
+ {
+ "dependencies": {
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/cli": "workspace:^"
+ }
+ }
+ `,
+ 'project-a/index.html': html`
+
+ `,
+ 'project-a/plugin.js': js`
+ module.exports = function ({ addVariant }) {
+ addVariant('inverted', '@media (inverted-colors: inverted)')
+ addVariant('hocus', ['&:focus', '&:hover'])
+ }
+ `,
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss/utilities';
+ @source '../../project-b/src/**/*.js';
+ @plugin '../plugin.js';
+ `,
+ 'project-a/src/index.js': js`
+ const className = "content-['project-a/src/index.js']"
+ module.exports = { className }
+ `,
+ 'project-b/src/index.js': js`
+ const className = "content-['project-b/src/index.js']"
+ module.exports = { className }
+ `,
+ },
+ },
+ async ({ root, fs, exec }) => {
+ await exec('pnpm tailwindcss --input src/index.css --output dist/out.css', {
+ cwd: path.join(root, 'project-a'),
+ })
+
+ await fs.expectFileToContain('project-a/dist/out.css', [
+ candidate`underline`,
+ candidate`content-['project-a/src/index.js']`,
+ candidate`content-['project-b/src/index.js']`,
+ candidate`inverted:flex`,
+ candidate`hocus:underline`,
+ ])
+ },
+)
+
+test(
+ 'watch mode',
+ {
+ fs: {
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': json`
+ {
+ "dependencies": {
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/cli": "workspace:^"
+ }
+ }
+ `,
+ 'project-a/index.html': html`
+
+ `,
+ 'project-a/plugin.js': js`
+ module.exports = function ({ addVariant }) {
+ addVariant('inverted', '@media (inverted-colors: inverted)')
+ addVariant('hocus', ['&:focus', '&:hover'])
+ }
+ `,
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss/utilities';
+ @source '../../project-b/src/**/*.js';
+ @plugin '../plugin.js';
+ `,
+ 'project-a/src/index.js': js`
+ const className = "content-['project-a/src/index.js']"
+ module.exports = { className }
+ `,
+ 'project-b/src/index.js': js`
+ const className = "content-['project-b/src/index.js']"
+ module.exports = { className }
+ `,
+ },
+ },
+ async ({ root, fs, spawn }) => {
+ await spawn('pnpm tailwindcss --input src/index.css --output dist/out.css --watch', {
+ cwd: path.join(root, 'project-a'),
+ })
+
+ await fs.expectFileToContain('project-a/dist/out.css', [
+ candidate`underline`,
+ candidate`content-['project-a/src/index.js']`,
+ candidate`content-['project-b/src/index.js']`,
+ candidate`inverted:flex`,
+ candidate`hocus:underline`,
+ ])
+
+ await fs.write(
+ 'project-a/src/index.js',
+ js`
+ const className = "[.changed_&]:content-['project-a/src/index.js']"
+ module.exports = { className }
+ `,
+ )
+ await fs.expectFileToContain('project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/src/index.js']`,
+ ])
+
+ await fs.write(
+ 'project-b/src/index.js',
+ js`
+ const className = "[.changed_&]:content-['project-b/src/index.js']"
+ module.exports = { className }
+ `,
+ )
+ await fs.expectFileToContain('project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-b/src/index.js']`,
+ ])
+ },
+)
diff --git a/integrations/postcss/index.test.ts b/integrations/postcss/index.test.ts
new file mode 100644
index 000000000..6ec7c749f
--- /dev/null
+++ b/integrations/postcss/index.test.ts
@@ -0,0 +1,164 @@
+import path from 'node:path'
+import { candidate, css, html, js, json, test, yaml } from '../utils'
+
+test(
+ 'production build',
+ {
+ fs: {
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': json`
+ {
+ "dependencies": {
+ "postcss": "^8",
+ "postcss-cli": "^10",
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/postcss": "workspace:^"
+ }
+ }
+ `,
+ 'project-a/postcss.config.js': js`
+ module.exports = {
+ plugins: {
+ '@tailwindcss/postcss': {},
+ },
+ }
+ `,
+ 'project-a/index.html': html`
+
+ `,
+ 'project-a/plugin.js': js`
+ module.exports = function ({ addVariant }) {
+ addVariant('inverted', '@media (inverted-colors: inverted)')
+ addVariant('hocus', ['&:focus', '&:hover'])
+ }
+ `,
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss/utilities';
+ @source '../../project-b/src/**/*.js';
+ @plugin '../plugin.js';
+ `,
+ 'project-a/src/index.js': js`
+ const className = "content-['a/src/index.js']"
+ module.exports = { className }
+ `,
+ 'project-b/src/index.js': js`
+ const className = "content-['b/src/index.js']"
+ module.exports = { className }
+ `,
+ },
+ },
+ async ({ root, fs, exec }) => {
+ await exec('pnpm postcss src/index.css --output dist/out.css', {
+ cwd: path.join(root, 'project-a'),
+ })
+
+ await fs.expectFileToContain('project-a/dist/out.css', [
+ candidate`underline`,
+ candidate`content-['a/src/index.js']`,
+ candidate`content-['b/src/index.js']`,
+ candidate`inverted:flex`,
+ candidate`hocus:underline`,
+ ])
+ },
+)
+
+test(
+ 'watch mode',
+ {
+ fs: {
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': json`
+ {
+ "dependencies": {
+ "postcss": "^8",
+ "postcss-cli": "^10",
+ "tailwindcss": "workspace:^",
+ "@tailwindcss/postcss": "workspace:^"
+ }
+ }
+ `,
+ 'project-a/postcss.config.js': js`
+ module.exports = {
+ plugins: {
+ '@tailwindcss/postcss': {},
+ },
+ }
+ `,
+ 'project-a/index.html': html`
+
+ `,
+ 'project-a/plugin.js': js`
+ module.exports = function ({ addVariant }) {
+ addVariant('inverted', '@media (inverted-colors: inverted)')
+ addVariant('hocus', ['&:focus', '&:hover'])
+ }
+ `,
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss/utilities';
+ @source '../../project-b/src/**/*.js';
+ @plugin '../plugin.js';
+ `,
+ 'project-a/src/index.js': js`
+ const className = "content-['a/src/index.js']"
+ module.exports = { className }
+ `,
+ 'project-b/src/index.js': js`
+ const className = "content-['b/src/index.js']"
+ module.exports = { className }
+ `,
+ },
+ },
+ async ({ root, fs, spawn }) => {
+ let process = await spawn(
+ 'pnpm postcss src/index.css --output dist/out.css --watch --verbose',
+ { cwd: path.join(root, 'project-a') },
+ )
+ await process.onStderr((message) => message.includes('Waiting for file changes...'))
+
+ await fs.expectFileToContain('project-a/dist/out.css', [
+ candidate`underline`,
+ candidate`content-['a/src/index.js']`,
+ candidate`content-['b/src/index.js']`,
+ candidate`inverted:flex`,
+ candidate`hocus:underline`,
+ ])
+
+ await fs.write(
+ 'project-a/src/index.js',
+ js`
+ const className = "[.changed_&]:content-['project-a/src/index.js']"
+ module.exports = { className }
+ `,
+ )
+
+ await fs.expectFileToContain('project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-a/src/index.js']`,
+ ])
+
+ await fs.write(
+ 'project-b/src/index.js',
+ js`
+ const className = "[.changed_&]:content-['project-b/src/index.js']"
+ module.exports = { className }
+ `,
+ )
+
+ await fs.expectFileToContain('project-a/dist/out.css', [
+ candidate`[.changed_&]:content-['project-b/src/index.js']`,
+ ])
+ },
+)
diff --git a/integrations/utils.ts b/integrations/utils.ts
index e601353b1..52ae3572a 100644
--- a/integrations/utils.ts
+++ b/integrations/utils.ts
@@ -6,14 +6,12 @@ import fs from 'node:fs/promises'
import net from 'node:net'
import { homedir, platform, tmpdir } from 'node:os'
import path from 'node:path'
-import { test as defaultTest } from 'vitest'
-
-export let css = dedent
-export let html = dedent
-export let ts = dedent
-export let json = dedent
+import { test as defaultTest, expect } from 'vitest'
const REPO_ROOT = path.join(__dirname, '..')
+const PUBLIC_PACKAGES = (await fs.readdir(path.join(REPO_ROOT, 'dist'))).map((name) =>
+ name.replace('tailwindcss-', '@tailwindcss/').replace('.tgz', ''),
+)
interface SpawnedProcess {
dispose: () => void
@@ -21,216 +19,267 @@ interface SpawnedProcess {
onStderr: (predicate: (message: string) => boolean) => Promise
}
+interface ChildProcessOptions {
+ cwd?: string
+}
+
interface TestConfig {
fs: {
[filePath: string]: string
}
}
interface TestContext {
- exec(command: string): Promise
- spawn(command: string): Promise
+ root: string
+ exec(command: string, options?: ChildProcessOptions): Promise
+ spawn(command: string, options?: ChildProcessOptions): Promise
getFreePort(): Promise
fs: {
write(filePath: string, content: string): Promise
+ read(filePath: string): Promise
glob(pattern: string): Promise<[string, string][]>
+ expectFileToContain(filePath: string, contents: string | string[]): Promise
}
}
type TestCallback = (context: TestContext) => Promise | void
type SpawnActor = { predicate: (message: string) => boolean; resolve: () => void }
+const TEST_TIMEOUT = 30000
+const ASSERTION_TIMEOUT = 5000
+
export function test(
name: string,
config: TestConfig,
testCallback: TestCallback,
{ only = false } = {},
) {
- return (only ? defaultTest.only : defaultTest)(name, { timeout: 30000 }, async (options) => {
- let root = await fs.mkdtemp(
- // On Windows CI, tmpdir returns a path containing a weird RUNNER~1 folder
- // that apparently causes the vite builds to not work.
- path.join(
- process.env.CI && platform() === 'win32' ? homedir() : tmpdir(),
- 'tailwind-integrations',
- ),
- )
+ return (only ? defaultTest.only : defaultTest)(
+ name,
+ { timeout: TEST_TIMEOUT },
+ async (options) => {
+ let root = await fs.mkdtemp(
+ // On Windows CI, tmpdir returns a path containing a weird RUNNER~1 folder
+ // that apparently causes the vite builds to not work.
+ path.join(
+ process.env.CI && platform() === 'win32' ? homedir() : tmpdir(),
+ 'tailwind-integrations',
+ ),
+ )
- async function write(filename: string, content: string): Promise {
- let full = path.join(root, filename)
+ async function write(filename: string, content: string): Promise {
+ let full = path.join(root, filename)
- if (filename.endsWith('package.json')) {
- content = overwriteVersionsInPackageJson(content)
- }
-
- // Ensure that files written on Windows use \r\n line ending
- if (platform() === 'win32') {
- content = content.replace(/\n/g, '\r\n')
- }
-
- let dir = path.dirname(full)
- await fs.mkdir(dir, { recursive: true })
- await fs.writeFile(full, content)
- }
-
- for (let [filename, content] of Object.entries(config.fs)) {
- await write(filename, content)
- }
-
- try {
- execSync('pnpm install', { cwd: root })
- } catch (error: any) {
- console.error(error.stdout.toString())
- console.error(error.stderr.toString())
- throw error
- }
-
- let disposables: (() => Promise)[] = []
- async function dispose() {
- await Promise.all(disposables.map((dispose) => dispose()))
- await fs.rm(root, { recursive: true, maxRetries: 3, force: true })
- }
- options.onTestFinished(dispose)
-
- let context = {
- async exec(command: string) {
- return execSync(command, { cwd: root }).toString()
- },
- async spawn(command: string) {
- let resolveDisposal: (() => void) | undefined
- let rejectDisposal: ((error: Error) => void) | undefined
- let disposePromise = new Promise((resolve, reject) => {
- resolveDisposal = resolve
- rejectDisposal = reject
- })
-
- let child = spawn(command, {
- cwd: root,
- shell: true,
- env: {
- ...process.env,
- },
- })
-
- function dispose() {
- child.kill()
-
- let timer = setTimeout(
- () => rejectDisposal?.(new Error(`spawned process (${command}) did not exit in time`)),
- 1000,
- )
- disposePromise.finally(() => clearTimeout(timer))
- return disposePromise
- }
- disposables.push(dispose)
- function onExit() {
- resolveDisposal?.()
+ if (filename.endsWith('package.json')) {
+ content = await overwriteVersionsInPackageJson(content)
}
- let stdoutMessages: string[] = []
- let stderrMessages: string[] = []
+ // Ensure that files written on Windows use \r\n line ending
+ if (platform() === 'win32') {
+ content = content.replace(/\n/g, '\r\n')
+ }
- let stdoutActors: SpawnActor[] = []
- let stderrActors: SpawnActor[] = []
+ let dir = path.dirname(full)
+ await fs.mkdir(dir, { recursive: true })
+ await fs.writeFile(full, content)
+ }
- function notifyNext(actors: SpawnActor[], messages: string[]) {
- if (actors.length <= 0) return
- let [next] = actors
+ for (let [filename, content] of Object.entries(config.fs)) {
+ await write(filename, content)
+ }
- for (let [idx, message] of messages.entries()) {
- if (next.predicate(message)) {
- messages.splice(0, idx + 1)
- let actorIdx = actors.indexOf(next)
- actors.splice(actorIdx, 1)
- next.resolve()
- break
+ try {
+ execSync('pnpm install', { cwd: root })
+ } catch (error: any) {
+ console.error(error.stdout.toString())
+ console.error(error.stderr.toString())
+ throw error
+ }
+
+ let disposables: (() => Promise)[] = []
+
+ async function dispose() {
+ await Promise.all(disposables.map((dispose) => dispose()))
+ try {
+ await fs.rm(root, { recursive: true, maxRetries: 5, force: true })
+ } catch (err) {
+ if (!process.env.CI) {
+ throw err
+ }
+ }
+ }
+
+ options.onTestFinished(dispose)
+
+ let context = {
+ root,
+ async exec(command: string, childProcessOptions: ChildProcessOptions = {}) {
+ return execSync(command, {
+ cwd: root,
+ stdio: 'pipe',
+ ...childProcessOptions,
+ }).toString()
+ },
+ async spawn(command: string, childProcessOptions: ChildProcessOptions = {}) {
+ let resolveDisposal: (() => void) | undefined
+ let rejectDisposal: ((error: Error) => void) | undefined
+ let disposePromise = new Promise((resolve, reject) => {
+ resolveDisposal = resolve
+ rejectDisposal = reject
+ })
+
+ let child = spawn(command, {
+ cwd: root,
+ shell: true,
+ env: {
+ ...process.env,
+ },
+ ...childProcessOptions,
+ })
+
+ function dispose() {
+ child.kill()
+
+ let timer = setTimeout(
+ () =>
+ rejectDisposal?.(new Error(`spawned process (${command}) did not exit in time`)),
+ ASSERTION_TIMEOUT,
+ )
+ disposePromise.finally(() => clearTimeout(timer))
+ return disposePromise
+ }
+ disposables.push(dispose)
+ function onExit() {
+ resolveDisposal?.()
+ }
+
+ let stdoutMessages: string[] = []
+ let stderrMessages: string[] = []
+
+ let stdoutActors: SpawnActor[] = []
+ let stderrActors: SpawnActor[] = []
+
+ function notifyNext(actors: SpawnActor[], messages: string[]) {
+ if (actors.length <= 0) return
+ let [next] = actors
+
+ for (let [idx, message] of messages.entries()) {
+ if (next.predicate(message)) {
+ messages.splice(0, idx + 1)
+ let actorIdx = actors.indexOf(next)
+ actors.splice(actorIdx, 1)
+ next.resolve()
+ break
+ }
}
}
- }
- child.stdout.on('data', (result) => {
- stdoutMessages.push(result.toString())
- notifyNext(stdoutActors, stdoutMessages)
- })
- child.stderr.on('data', (result) => {
- stderrMessages.push(result.toString())
- notifyNext(stderrActors, stderrMessages)
- })
- child.on('exit', onExit)
- child.on('error', (error) => {
- if (error.name !== 'AbortError') {
- throw error
- }
- })
+ let combined: ['stdout' | 'stderr', string][] = []
- options.onTestFailed(() => {
- stdoutMessages.map((message) => console.log(message))
- stderrMessages.map((message) => console.error(message))
- })
+ child.stdout.on('data', (result) => {
+ let content = result.toString()
+ combined.push(['stdout', content])
+ stdoutMessages.push(content)
+ notifyNext(stdoutActors, stdoutMessages)
+ })
+ child.stderr.on('data', (result) => {
+ let content = result.toString()
+ combined.push(['stderr', content])
+ stderrMessages.push(content)
+ notifyNext(stderrActors, stderrMessages)
+ })
+ child.on('exit', onExit)
+ child.on('error', (error) => {
+ if (error.name !== 'AbortError') {
+ throw error
+ }
+ })
- return {
- dispose,
- onStdout(predicate: (message: string) => boolean) {
- return new Promise((resolve) => {
- stdoutActors.push({ predicate, resolve })
- notifyNext(stdoutActors, stdoutMessages)
- })
- },
- onStderr(predicate: (message: string) => boolean) {
- return new Promise((resolve) => {
- stderrActors.push({ predicate, resolve })
- notifyNext(stderrActors, stderrMessages)
- })
- },
- }
- },
- async getFreePort(): Promise {
- return new Promise((resolve, reject) => {
- let server = net.createServer()
- server.listen(0, () => {
- let address = server.address()
- let port = address === null || typeof address === 'string' ? null : address.port
-
- server.close(() => {
- if (port === null) {
- reject(new Error(`Failed to get a free port: address is ${address}`))
+ options.onTestFailed(() => {
+ for (let [type, message] of combined) {
+ if (type === 'stdout') {
+ console.log(message)
} else {
- disposables.push(async () => {
- // Wait for 10ms in case the process was just killed
- await new Promise((resolve) => setTimeout(resolve, 10))
-
- // kill-port uses `lsof` on macOS which is expensive and can
- // block for multiple seconds. In order to avoid that for a
- // server that is no longer running, we check if the port is
- // still in use first.
- let isPortTaken = await testIfPortTaken(port)
- if (!isPortTaken) {
- return
- }
-
- await killPort(port)
- })
- resolve(port)
+ console.error(message)
}
+ }
+ })
+
+ return {
+ dispose,
+ onStdout(predicate: (message: string) => boolean) {
+ return new Promise((resolve) => {
+ stdoutActors.push({ predicate, resolve })
+ notifyNext(stdoutActors, stdoutMessages)
+ })
+ },
+ onStderr(predicate: (message: string) => boolean) {
+ return new Promise((resolve) => {
+ stderrActors.push({ predicate, resolve })
+ notifyNext(stderrActors, stderrMessages)
+ })
+ },
+ }
+ },
+ async getFreePort(): Promise {
+ return new Promise((resolve, reject) => {
+ let server = net.createServer()
+ server.listen(0, () => {
+ let address = server.address()
+ let port = address === null || typeof address === 'string' ? null : address.port
+
+ server.close(() => {
+ if (port === null) {
+ reject(new Error(`Failed to get a free port: address is ${address}`))
+ } else {
+ disposables.push(async () => {
+ // Wait for 10ms in case the process was just killed
+ await new Promise((resolve) => setTimeout(resolve, 10))
+
+ // kill-port uses `lsof` on macOS which is expensive and can
+ // block for multiple seconds. In order to avoid that for a
+ // server that is no longer running, we check if the port is
+ // still in use first.
+ let isPortTaken = await testIfPortTaken(port)
+ if (!isPortTaken) {
+ return
+ }
+
+ await killPort(port)
+ })
+ resolve(port)
+ }
+ })
})
})
- })
- },
- fs: {
- write,
- async glob(pattern: string) {
- let files = await fastGlob(pattern, { cwd: root })
- return Promise.all(
- files.map(async (file) => {
- let content = await fs.readFile(path.join(root, file), 'utf8')
- return [file, content]
- }),
- )
},
- },
- } satisfies TestContext
+ fs: {
+ write,
+ read(filePath: string) {
+ return fs.readFile(path.resolve(root, filePath), 'utf8')
+ },
+ async glob(pattern: string) {
+ let files = await fastGlob(pattern, { cwd: root })
+ return Promise.all(
+ files.map(async (file) => {
+ let content = await fs.readFile(path.join(root, file), 'utf8')
+ return [file, content]
+ }),
+ )
+ },
+ async expectFileToContain(filePath, contents) {
+ return retryUntil(async () => {
+ let fileContent = await this.read(filePath)
+ for (let content of contents) {
+ expect(fileContent).toContain(content)
+ }
+ })
+ },
+ },
+ } satisfies TestContext
- await testCallback(context)
- })
+ await testCallback(context)
+ },
+ )
}
test.only = (name: string, config: TestConfig, testCallback: TestCallback) => {
return test(name, config, testCallback, { only: true })
@@ -242,18 +291,20 @@ function pkgToFilename(name: string) {
return `${name.replace('@', '').replace('/', '-')}.tgz`
}
-function overwriteVersionsInPackageJson(content: string): string {
+async function overwriteVersionsInPackageJson(content: string): Promise {
let json = JSON.parse(content)
// Resolve all workspace:^ versions to local tarballs
- ;['dependencies', 'devDependencies', 'peerDependencies'].forEach((key) => {
- let dependencies = json[key] || {}
- for (let dependency in dependencies) {
- if (dependencies[dependency] === 'workspace:^') {
- dependencies[dependency] = resolveVersion(dependency)
+ ;['dependencies', 'devDependencies', 'peerDependencies', 'optionalDependencies'].forEach(
+ (key) => {
+ let dependencies = json[key] || {}
+ for (let dependency in dependencies) {
+ if (dependencies[dependency] === 'workspace:^') {
+ dependencies[dependency] = resolveVersion(dependency)
+ }
}
- }
- })
+ },
+ )
// Inject transitive dependency overwrite. This is necessary because
// @tailwindcss/vite internally depends on a specific version of
@@ -261,7 +312,9 @@ function overwriteVersionsInPackageJson(content: string): string {
// version.
json.pnpm ||= {}
json.pnpm.overrides ||= {}
- json.pnpm.overrides['@tailwindcss/oxide'] = resolveVersion('@tailwindcss/oxide')
+ for (let pkg of PUBLIC_PACKAGES) {
+ json.pnpm.overrides[pkg] = resolveVersion(pkg)
+ }
return JSON.stringify(json, null, 2)
}
@@ -293,3 +346,114 @@ function testIfPortTaken(port: number): Promise {
client.connect({ port: port, host: 'localhost' })
})
}
+
+export let css = dedent
+export let html = dedent
+export let ts = dedent
+export let js = dedent
+export let json = dedent
+export let yaml = dedent
+export let txt = dedent
+
+export function candidate(strings: TemplateStringsArray, ...values: any[]) {
+ let output: string[] = []
+ for (let i = 0; i < strings.length; i++) {
+ output.push(strings[i])
+ if (i < values.length) {
+ output.push(values[i])
+ }
+ }
+
+ return `.${escape(output.join('').trim())}`
+}
+
+// https://drafts.csswg.org/cssom/#serialize-an-identifier
+export function escape(value: string) {
+ if (arguments.length == 0) {
+ throw new TypeError('`CSS.escape` requires an argument.')
+ }
+ var string = String(value)
+ var length = string.length
+ var index = -1
+ var codeUnit
+ var result = ''
+ var firstCodeUnit = string.charCodeAt(0)
+
+ if (
+ // If the character is the first character and is a `-` (U+002D), and
+ // there is no second character, […]
+ length == 1 &&
+ firstCodeUnit == 0x002d
+ ) {
+ return '\\' + string
+ }
+
+ while (++index < length) {
+ codeUnit = string.charCodeAt(index)
+ // Note: there’s no need to special-case astral symbols, surrogate
+ // pairs, or lone surrogates.
+
+ // If the character is NULL (U+0000), then the REPLACEMENT CHARACTER
+ // (U+FFFD).
+ if (codeUnit == 0x0000) {
+ result += '\uFFFD'
+ continue
+ }
+
+ if (
+ // If the character is in the range [\1-\1F] (U+0001 to U+001F) or is
+ // U+007F, […]
+ (codeUnit >= 0x0001 && codeUnit <= 0x001f) ||
+ codeUnit == 0x007f ||
+ // If the character is the first character and is in the range [0-9]
+ // (U+0030 to U+0039), […]
+ (index == 0 && codeUnit >= 0x0030 && codeUnit <= 0x0039) ||
+ // If the character is the second character and is in the range [0-9]
+ // (U+0030 to U+0039) and the first character is a `-` (U+002D), […]
+ (index == 1 && codeUnit >= 0x0030 && codeUnit <= 0x0039 && firstCodeUnit == 0x002d)
+ ) {
+ // https://drafts.csswg.org/cssom/#escape-a-character-as-code-point
+ result += '\\' + codeUnit.toString(16) + ' '
+ continue
+ }
+
+ // If the character is not handled by one of the above rules and is
+ // greater than or equal to U+0080, is `-` (U+002D) or `_` (U+005F), or
+ // is in one of the ranges [0-9] (U+0030 to U+0039), [A-Z] (U+0041 to
+ // U+005A), or [a-z] (U+0061 to U+007A), […]
+ if (
+ codeUnit >= 0x0080 ||
+ codeUnit == 0x002d ||
+ codeUnit == 0x005f ||
+ (codeUnit >= 0x0030 && codeUnit <= 0x0039) ||
+ (codeUnit >= 0x0041 && codeUnit <= 0x005a) ||
+ (codeUnit >= 0x0061 && codeUnit <= 0x007a)
+ ) {
+ // the character itself
+ result += string.charAt(index)
+ continue
+ }
+
+ // Otherwise, the escaped character.
+ // https://drafts.csswg.org/cssom/#escape-a-character
+ result += '\\' + string.charAt(index)
+ }
+ return result
+}
+
+async function retryUntil(
+ fn: () => Promise,
+ { timeout = ASSERTION_TIMEOUT, delay = 5 }: { timeout?: number; delay?: number } = {},
+) {
+ let end = Date.now() + timeout
+ let error: any
+ while (Date.now() < end) {
+ try {
+ return await fn()
+ } catch (err) {
+ error = err
+ await new Promise((resolve) => setTimeout(resolve, delay))
+ }
+ }
+ throw error
+}
diff --git a/integrations/vite/index.test.ts b/integrations/vite/index.test.ts
index 56552b0ad..ab636eb46 100644
--- a/integrations/vite/index.test.ts
+++ b/integrations/vite/index.test.ts
@@ -1,5 +1,6 @@
+import path from 'node:path'
import { expect } from 'vitest'
-import { css, html, json, stripTailwindComment, test, ts } from '../utils'
+import { candidate, css, html, js, json, test, ts, yaml } from '../utils'
async function fetchCSS(pathname: string, port: number) {
// We need to fetch the main index.html file to populate the list of
@@ -17,10 +18,16 @@ async function fetchCSS(pathname: string, port: number) {
}
test(
- 'works with production builds',
+ 'production build',
{
fs: {
- 'package.json': json`
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': json`
{
"type": "module",
"dependencies": {
@@ -32,7 +39,7 @@ test(
}
}
`,
- 'vite.config.ts': ts`
+ 'project-a/vite.config.ts': ts`
import tailwindcss from '@tailwindcss/vite'
import { defineConfig } from 'vite'
@@ -41,7 +48,7 @@ test(
plugins: [tailwindcss()],
})
`,
- 'index.html': html`
+ 'project-a/index.html': html`
@@ -49,37 +56,43 @@ test(
Hello, world!
`,
- 'src/index.css': css`
- @import 'tailwindcss/theme' reference;
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss/theme' theme(reference);
@import 'tailwindcss/utilities';
+ @source '../../project-b/src/**/*.js';
+ `,
+ 'project-b/src/index.js': js`
+ const className = "content-['project-b/src/index.js']"
+ module.exports = { className }
`,
},
},
- async ({ fs, exec }) => {
- await exec('pnpm vite build')
+ async ({ root, fs, exec }) => {
+ await exec('pnpm vite build', { cwd: path.join(root, 'project-a') })
- let files = await fs.glob('dist/**/*.css')
+ let files = await fs.glob('project-a/dist/**/*.css')
expect(files).toHaveLength(1)
- let [, content] = files[0]
- expect(stripTailwindComment(content)).toMatchInlineSnapshot(
- `
- ".m-2 {
- margin: var(--spacing-2, .5rem);
- }
+ let [filename] = files[0]
- .underline {
- text-decoration-line: underline;
- }"
- `,
- )
+ await fs.expectFileToContain(filename, [
+ candidate`underline`,
+ candidate`m-2`,
+ candidate`content-['project-b/src/index.js']`,
+ ])
},
)
test(
- 'works with dev builds and live reloads',
+ 'dev mode',
{
fs: {
- 'package.json': json`
+ 'package.json': json`{}`,
+ 'pnpm-workspace.yaml': yaml`
+ #
+ packages:
+ - project-a
+ `,
+ 'project-a/package.json': json`
{
"type": "module",
"dependencies": {
@@ -91,7 +104,7 @@ test(
}
}
`,
- 'vite.config.ts': ts`
+ 'project-a/vite.config.ts': ts`
import tailwindcss from '@tailwindcss/vite'
import { defineConfig } from 'vite'
@@ -100,7 +113,7 @@ test(
plugins: [tailwindcss()],
})
`,
- 'index.html': html`
+ 'project-a/index.html': html`
@@ -108,29 +121,30 @@ test(
Hello, world!
`,
- 'src/index.css': css`
- @import 'tailwindcss/theme' reference;
+ 'project-a/src/index.css': css`
+ @import 'tailwindcss/theme' theme(reference);
@import 'tailwindcss/utilities';
+ @source '../../project-b/src/**/*.js';
+ `,
+ 'project-b/src/index.js': js`
+ const className = "content-['project-b/src/index.js']"
+ module.exports = { className }
`,
},
},
- async ({ spawn, getFreePort, fs }) => {
+ async ({ root, spawn, getFreePort, fs }) => {
let port = await getFreePort()
- let process = await spawn(`pnpm vite dev --port ${port}`)
+ let process = await spawn(`pnpm vite dev --port ${port}`, {
+ cwd: path.join(root, 'project-a'),
+ })
await process.onStdout((message) => message.includes('ready in'))
let css = await fetchCSS('/src/index.css', port)
- expect(stripTailwindComment(css)).toMatchInlineSnapshot(
- `
- ".underline {
- text-decoration-line: underline;
- }"
- `,
- )
+ expect(css).toContain(candidate`underline`)
await fs.write(
- 'index.html',
+ 'project-a/index.html',
html`
@@ -143,15 +157,18 @@ test(
await process.onStdout((message) => message.includes('page reload'))
css = await fetchCSS('/src/index.css', port)
- expect(stripTailwindComment(css)).toMatchInlineSnapshot(
- `
- ".m-2 {
- margin: var(--spacing-2, 0.5rem);
- }
- .underline {
- text-decoration-line: underline;
- }"
- `,
+ expect(css).toContain(candidate`m-2`)
+
+ await fs.write(
+ 'project-b/src/index.js',
+ js`
+ const className = "[.changed_&]:content-['project-b/src/index.js']"
+ module.exports = { className }
+ `,
)
+ await process.onStdout((message) => message.includes('page reload'))
+
+ css = await fetchCSS('/src/index.css', port)
+ expect(css).toContain(candidate`[.changed_&]:content-['project-b/src/index.js']`)
},
)
diff --git a/packages/@tailwindcss-cli/package.json b/packages/@tailwindcss-cli/package.json
index 53344e308..0c77e1bba 100644
--- a/packages/@tailwindcss-cli/package.json
+++ b/packages/@tailwindcss-cli/package.json
@@ -12,7 +12,7 @@
"homepage": "https://tailwindcss.com",
"scripts": {
"lint": "tsc --noEmit",
- "build": "tsup-node ./src/index.ts --format esm --minify --clean",
+ "build": "tsup-node",
"dev": "pnpm run build -- --watch"
},
"bin": {
@@ -39,6 +39,7 @@
"tailwindcss": "workspace:^"
},
"devDependencies": {
- "@types/postcss-import": "^14.0.3"
+ "@types/postcss-import": "^14.0.3",
+ "internal-postcss-fix-relative-paths": "workspace:^"
}
}
diff --git a/packages/@tailwindcss-cli/src/commands/build/index.ts b/packages/@tailwindcss-cli/src/commands/build/index.ts
index 62084ec45..33202859d 100644
--- a/packages/@tailwindcss-cli/src/commands/build/index.ts
+++ b/packages/@tailwindcss-cli/src/commands/build/index.ts
@@ -1,6 +1,8 @@
import watcher from '@parcel/watcher'
-import { IO, Parsing, scanDir, scanFiles, type ChangedContent } from '@tailwindcss/oxide'
+import { clearCache, scanDir, type ChangedContent } from '@tailwindcss/oxide'
+import fixRelativePathsPlugin from 'internal-postcss-fix-relative-paths'
import { Features, transform } from 'lightningcss'
+import { createRequire } from 'module'
import { existsSync } from 'node:fs'
import fs from 'node:fs/promises'
import path from 'node:path'
@@ -8,6 +10,7 @@ import postcss from 'postcss'
import atImport from 'postcss-import'
import * as tailwindcss from 'tailwindcss'
import type { Arg, Result } from '../../utils/args'
+import { Disposables } from '../../utils/disposables'
import {
eprintln,
formatDuration,
@@ -18,6 +21,7 @@ import {
} from '../../utils/renderer'
import { resolve } from '../../utils/resolve'
import { drainStdin, outputFile } from './utils'
+const require = createRequire(import.meta.url)
const css = String.raw
@@ -79,7 +83,6 @@ export async function handle(args: Result>) {
}
let start = process.hrtime.bigint()
- let { candidates } = scanDir({ base })
// Resolve the input
let [input, cssImportPaths] = await handleImports(
@@ -125,14 +128,13 @@ export async function handle(args: Result>) {
}
let inputFile = args['--input'] && args['--input'] !== '-' ? args['--input'] : process.cwd()
-
- let basePath = path.dirname(path.resolve(inputFile))
+ let inputBasePath = path.dirname(path.resolve(inputFile))
function compile(css: string) {
return tailwindcss.compile(css, {
loadPlugin: (pluginPath) => {
if (pluginPath[0] === '.') {
- return require(path.resolve(basePath, pluginPath))
+ return require(path.resolve(inputBasePath, pluginPath))
}
return require(pluginPath)
@@ -141,100 +143,119 @@ export async function handle(args: Result>) {
}
// Compile the input
- let { build } = compile(input)
-
- await write(build(candidates), args)
-
- let end = process.hrtime.bigint()
- eprintln(header())
- eprintln()
- eprintln(`Done in ${formatDuration(end - start)}`)
+ let compiler = compile(input)
+ let scanDirResult = scanDir({
+ base, // Root directory, mainly used for auto content detection
+ sources: compiler.globs.map((pattern) => ({
+ base: inputBasePath, // Globs are relative to the input.css file
+ pattern,
+ })),
+ })
// Watch for changes
if (args['--watch']) {
- await watcher.subscribe(base, async (err, events) => {
- if (err) {
- console.error(err)
- return
- }
+ let cleanupWatchers = await createWatchers(
+ watchDirectories(base, scanDirResult),
+ async function handle(files) {
+ try {
+ // If the only change happened to the output file, then we don't want to
+ // trigger a rebuild because that will result in an infinite loop.
+ if (files.length === 1 && files[0] === args['--output']) return
- try {
- // If the only change happened to the output file, then we don't want to
- // trigger a rebuild because that will result in an infinite loop.
- if (events.length === 1 && events[0].path === args['--output']) return
+ let changedFiles: ChangedContent[] = []
+ let rebuildStrategy: 'incremental' | 'full' = 'incremental'
- let changedFiles: ChangedContent[] = []
- let rebuildStrategy: 'incremental' | 'full' = 'incremental'
+ for (let file of files) {
+ // If one of the changed files is related to the input CSS files, then
+ // we need to do a full rebuild because the theme might have changed.
+ if (cssImportPaths.includes(file)) {
+ rebuildStrategy = 'full'
- for (let event of events) {
- // Track new and updated files for incremental rebuilds.
- if (event.type === 'create' || event.type === 'update') {
+ // No need to check the rest of the events, because we already know we
+ // need to do a full rebuild.
+ break
+ }
+
+ // Track new and updated files for incremental rebuilds.
changedFiles.push({
- file: event.path,
- extension: path.extname(event.path).slice(1),
+ file,
+ extension: path.extname(file).slice(1),
} satisfies ChangedContent)
}
- // If one of the changed files is related to the input CSS files, then
- // we need to do a full rebuild because the theme might have changed.
- if (cssImportPaths.includes(event.path)) {
- rebuildStrategy = 'full'
+ // Re-compile the input
+ let start = process.hrtime.bigint()
- // No need to check the rest of the events, because we already know we
- // need to do a full rebuild.
- break
+ // Track the compiled CSS
+ let compiledCss = ''
+
+ // Scan the entire `base` directory for full rebuilds.
+ if (rebuildStrategy === 'full') {
+ // Clear all watchers
+ cleanupWatchers()
+
+ // Clear cached candidates
+ clearCache()
+
+ // Collect the new `input` and `cssImportPaths`.
+ ;[input, cssImportPaths] = await handleImports(
+ args['--input']
+ ? await fs.readFile(args['--input'], 'utf-8')
+ : css`
+ @import '${resolve('tailwindcss/index.css')}';
+ `,
+ args['--input'] ?? base,
+ )
+
+ // Create a new compiler, given the new `input`
+ compiler = compile(input)
+
+ // Re-scan the directory to get the new `candidates`
+ scanDirResult = scanDir({
+ base, // Root directory, mainly used for auto content detection
+ sources: compiler.globs.map((pattern) => ({
+ base: inputBasePath, // Globs are relative to the input.css file
+ pattern,
+ })),
+ })
+
+ // Setup new watchers
+ cleanupWatchers = await createWatchers(watchDirectories(base, scanDirResult), handle)
+
+ // Re-compile the CSS
+ compiledCss = compiler.build(scanDirResult.candidates)
+ }
+
+ // Scan changed files only for incremental rebuilds.
+ else if (rebuildStrategy === 'incremental') {
+ let candidates = scanDirResult.scanFiles(changedFiles)
+
+ // No candidates found which means we don't need to rebuild. This can
+ // happen if a file is detected but doesn't match any of the globs.
+ if (candidates.length === 0) return
+
+ compiledCss = compiler.build(candidates)
+ }
+
+ await write(compiledCss, args)
+
+ let end = process.hrtime.bigint()
+ eprintln(`Done in ${formatDuration(end - start)}`)
+ } catch (err) {
+ // Catch any errors and print them to stderr, but don't exit the process
+ // and keep watching.
+ if (err instanceof Error) {
+ eprintln(err.toString())
}
}
-
- // Re-compile the input
- let start = process.hrtime.bigint()
-
- // Track the compiled CSS
- let compiledCss = ''
-
- // Scan the entire `base` directory for full rebuilds.
- if (rebuildStrategy === 'full') {
- // Re-scan the directory to get the new `candidates`.
- candidates = scanDir({ base }).candidates
-
- // Collect the new `input` and `cssImportPaths`.
- ;[input, cssImportPaths] = await handleImports(
- args['--input']
- ? await fs.readFile(args['--input'], 'utf-8')
- : css`
- @import '${resolve('tailwindcss/index.css')}';
- `,
- args['--input'] ?? base,
- )
-
- build = compile(input).build
- compiledCss = build(candidates)
- }
-
- // Scan changed files only for incremental rebuilds.
- else if (rebuildStrategy === 'incremental') {
- let newCandidates = scanFiles(changedFiles, IO.Sequential | Parsing.Sequential)
-
- compiledCss = build(newCandidates)
- }
-
- await write(compiledCss, args)
-
- let end = process.hrtime.bigint()
- eprintln(`Done in ${formatDuration(end - start)}`)
- } catch (err) {
- // Catch any errors and print them to stderr, but don't exit the process
- // and keep watching.
- if (err instanceof Error) {
- eprintln(err.toString())
- }
- }
- })
+ },
+ )
// Abort the watcher if `stdin` is closed to avoid zombie processes. You can
// disable this behavior with `--watch=always`.
if (args['--watch'] !== 'always') {
process.stdin.on('end', () => {
+ cleanupWatchers()
process.exit(0)
})
}
@@ -242,6 +263,98 @@ export async function handle(args: Result>) {
// Keep the process running
process.stdin.resume()
}
+
+ await write(compiler.build(scanDirResult.candidates), args)
+
+ let end = process.hrtime.bigint()
+ eprintln(header())
+ eprintln()
+ eprintln(`Done in ${formatDuration(end - start)}`)
+}
+
+function watchDirectories(base: string, scanDirResult: ReturnType) {
+ return [base].concat(
+ scanDirResult.globs.flatMap((globEntry) => {
+ // We don't want a watcher for negated globs.
+ if (globEntry.pattern[0] === '!') return []
+
+ // We don't want a watcher for nested directories, these will be covered
+ // by the `base` directory already.
+ if (globEntry.base.startsWith(base)) return []
+
+ return globEntry.base
+ }),
+ )
+}
+
+async function createWatchers(dirs: string[], cb: (files: string[]) => void) {
+ // Track all Parcel watchers for each glob.
+ //
+ // When we encounter a change in a CSS file, we need to setup new watchers and
+ // we want to cleanup the old ones we captured here.
+ let watchers = new Disposables()
+
+ // Track all files that were added or changed.
+ let files = new Set()
+
+ // Keep track of the debounce queue to avoid multiple rebuilds.
+ let debounceQueue = new Disposables()
+
+ // A changed file can be watched by multiple watchers, but we only want to
+ // handle the file once. We debounce the handle function with the collected
+ // files to handle them in a single batch and to avoid multiple rebuilds.
+ function enqueueCallback() {
+ // Dispose all existing macrotask.
+ debounceQueue.dispose()
+
+ // Setup a new macrotask to handle the files in batch.
+ debounceQueue.queueMacrotask(() => {
+ cb(Array.from(files))
+ files.clear()
+ })
+ }
+
+ // Setup a watcher for every directory.
+ for (let dir of dirs) {
+ let { unsubscribe } = await watcher.subscribe(dir, async (err, events) => {
+ // Whenever an error occurs we want to let the user know about it but we
+ // want to keep watching for changes.
+ if (err) {
+ console.error(err)
+ return
+ }
+
+ await Promise.all(
+ events.map(async (event) => {
+ // We currently don't handle deleted files because it doesn't influence
+ // the CSS output. This is because we currently keep all scanned
+ // candidates in a cache for performance reasons.
+ if (event.type === 'delete') return
+
+ // Ignore directory changes. We only care about file changes
+ let stats = await fs.lstat(event.path)
+ if (stats.isDirectory()) {
+ return
+ }
+
+ // Track the changed file.
+ files.add(event.path)
+ }),
+ )
+
+ // Handle the tracked files at some point in the future.
+ enqueueCallback()
+ })
+
+ // Ensure we cleanup the watcher when we're done.
+ watchers.add(unsubscribe)
+ }
+
+ // Cleanup
+ return () => {
+ watchers.dispose()
+ debounceQueue.dispose()
+ }
}
function handleImports(
@@ -259,6 +372,7 @@ function handleImports(
return postcss()
.use(atImport())
+ .use(fixRelativePathsPlugin())
.process(input, { from: file })
.then((result) => [
result.css,
diff --git a/packages/@tailwindcss-cli/src/utils/disposables.ts b/packages/@tailwindcss-cli/src/utils/disposables.ts
new file mode 100644
index 000000000..cb0d982c1
--- /dev/null
+++ b/packages/@tailwindcss-cli/src/utils/disposables.ts
@@ -0,0 +1,45 @@
+/**
+ * Disposables allow you to manage resources that can be cleaned up. Each helper
+ * function returns a dispose function to clean up the resource.
+ *
+ * The `dispose` method can be called to clean up all resources at once.
+ */
+export class Disposables {
+ // Track all disposables
+ #disposables = new Set([])
+
+ /**
+ * Enqueue a callback in the macrotasks queue.
+ */
+ queueMacrotask(cb: () => void) {
+ let timer = setTimeout(cb, 0)
+
+ return this.add(() => {
+ clearTimeout(timer)
+ })
+ }
+
+ /**
+ * General purpose disposable function that can be cleaned up.
+ */
+ add(dispose: () => void) {
+ this.#disposables.add(dispose)
+
+ return () => {
+ this.#disposables.delete(dispose)
+
+ dispose()
+ }
+ }
+
+ /**
+ * Dispose all disposables at once.
+ */
+ dispose() {
+ for (let dispose of this.#disposables) {
+ dispose()
+ }
+
+ this.#disposables.clear()
+ }
+}
diff --git a/packages/@tailwindcss-cli/tsup.config.ts b/packages/@tailwindcss-cli/tsup.config.ts
new file mode 100644
index 000000000..236281270
--- /dev/null
+++ b/packages/@tailwindcss-cli/tsup.config.ts
@@ -0,0 +1,9 @@
+import { defineConfig } from 'tsup'
+
+export default defineConfig({
+ format: ['esm'],
+ clean: true,
+ minify: true,
+ entry: ['src/index.ts'],
+ noExternal: ['internal-postcss-fix-relative-paths'],
+})
diff --git a/packages/@tailwindcss-postcss/package.json b/packages/@tailwindcss-postcss/package.json
index a5cc2100e..f287d2f6c 100644
--- a/packages/@tailwindcss-postcss/package.json
+++ b/packages/@tailwindcss-postcss/package.json
@@ -12,7 +12,7 @@
"homepage": "https://tailwindcss.com",
"scripts": {
"lint": "tsc --noEmit",
- "build": "tsup-node ./src/index.ts --format cjs,esm --dts --cjsInterop --splitting --minify --clean",
+ "build": "tsup-node",
"dev": "pnpm run build -- --watch"
},
"files": [
@@ -39,6 +39,7 @@
"@types/node": "catalog:",
"@types/postcss-import": "^14.0.3",
"postcss": "8.4.24",
- "internal-example-plugin": "workspace:*"
+ "internal-example-plugin": "workspace:*",
+ "internal-postcss-fix-relative-paths": "workspace:^"
}
}
diff --git a/packages/@tailwindcss-postcss/src/fixtures/example-project/src/relative-import.css b/packages/@tailwindcss-postcss/src/fixtures/example-project/src/relative-import.css
new file mode 100644
index 000000000..48a30ab4d
--- /dev/null
+++ b/packages/@tailwindcss-postcss/src/fixtures/example-project/src/relative-import.css
@@ -0,0 +1 @@
+@plugin '../plugin.js';
diff --git a/packages/@tailwindcss-postcss/src/index.test.ts b/packages/@tailwindcss-postcss/src/index.test.ts
index 7aeae5c6c..ce4699586 100644
--- a/packages/@tailwindcss-postcss/src/index.test.ts
+++ b/packages/@tailwindcss-postcss/src/index.test.ts
@@ -13,7 +13,7 @@ const INPUT_CSS_PATH = `${__dirname}/fixtures/example-project/input.css`
const css = String.raw
beforeEach(async () => {
- const { clearCache } = await import('@tailwindcss/oxide')
+ let { clearCache } = await import('@tailwindcss/oxide')
clearCache()
})
@@ -144,7 +144,7 @@ describe('plugins', () => {
let result = await processor.process(
css`
@import 'tailwindcss/utilities';
- @plugin 'internal-example-plugin';
+ @plugin './plugin.js';
`,
{ from: INPUT_CSS_PATH },
)
@@ -166,6 +166,36 @@ describe('plugins', () => {
`)
})
+ test('local CJS plugin from `@import`-ed file', async () => {
+ let processor = postcss([
+ tailwindcss({ base: `${__dirname}/fixtures/example-project`, optimize: { minify: false } }),
+ ])
+
+ let result = await processor.process(
+ css`
+ @import 'tailwindcss/utilities';
+ @import '../example-project/src/relative-import.css';
+ `,
+ { from: `${__dirname}/fixtures/another-project/input.css` },
+ )
+
+ expect(result.css.trim()).toMatchInlineSnapshot(`
+ ".underline {
+ text-decoration-line: underline;
+ }
+
+ @media (inverted-colors: inverted) {
+ .inverted\\:flex {
+ display: flex;
+ }
+ }
+
+ .hocus\\:underline:focus, .hocus\\:underline:hover {
+ text-decoration-line: underline;
+ }"
+ `)
+ })
+
test('published CJS plugin', async () => {
let processor = postcss([
tailwindcss({ base: `${__dirname}/fixtures/example-project`, optimize: { minify: false } }),
diff --git a/packages/@tailwindcss-postcss/src/index.ts b/packages/@tailwindcss-postcss/src/index.ts
index 30a0828a2..700250527 100644
--- a/packages/@tailwindcss-postcss/src/index.ts
+++ b/packages/@tailwindcss-postcss/src/index.ts
@@ -1,8 +1,9 @@
import { scanDir } from '@tailwindcss/oxide'
import fs from 'fs'
+import fixRelativePathsPlugin from 'internal-postcss-fix-relative-paths'
import { Features, transform } from 'lightningcss'
import path from 'path'
-import postcss, { type AcceptedPlugin, type PluginCreator } from 'postcss'
+import postcss, { AtRule, type AcceptedPlugin, type PluginCreator } from 'postcss'
import postcssImport from 'postcss-import'
import { compile } from 'tailwindcss'
@@ -42,120 +43,137 @@ function tailwindcss(opts: PluginOptions = {}): AcceptedPlugin {
let cache = new DefaultMap(() => {
return {
mtimes: new Map(),
- build: null as null | ReturnType['build'],
+ compiler: null as null | ReturnType,
css: '',
optimizedCss: '',
}
})
+ let hasApply: boolean, hasTailwind: boolean
+
return {
postcssPlugin: '@tailwindcss/postcss',
plugins: [
// We need to run `postcss-import` first to handle `@import` rules.
postcssImport(),
+ fixRelativePathsPlugin(),
- (root, result) => {
- let inputFile = result.opts.from ?? ''
- let context = cache.get(inputFile)
-
- let rebuildStrategy: 'full' | 'incremental' = 'incremental'
-
- // Track file modification times to CSS files
- {
- let files = result.messages.flatMap((message) => {
- if (message.type !== 'dependency') return []
- return message.file
- })
- files.push(inputFile)
- for (let file of files) {
- let changedTime = fs.statSync(file, { throwIfNoEntry: false })?.mtimeMs ?? null
- if (changedTime === null) {
- if (file === inputFile) {
- rebuildStrategy = 'full'
- }
- continue
- }
-
- let prevTime = context.mtimes.get(file)
- if (prevTime === changedTime) continue
-
- rebuildStrategy = 'full'
- context.mtimes.set(file, changedTime)
- }
- }
-
- let hasApply = false
- let hasTailwind = false
-
- root.walkAtRules((rule) => {
+ {
+ postcssPlugin: 'tailwindcss',
+ Once() {
+ // Reset some state between builds
+ hasApply = false
+ hasTailwind = false
+ },
+ AtRule(rule: AtRule) {
if (rule.name === 'apply') {
hasApply = true
} else if (rule.name === 'tailwind') {
hasApply = true
hasTailwind = true
- // If we've found `@tailwind` then we already
- // know we have to run a "full" build
- return false
}
- })
+ },
+ OnceExit(root, { result }) {
+ let inputFile = result.opts.from ?? ''
+ let context = cache.get(inputFile)
+ let inputBasePath = path.dirname(path.resolve(inputFile))
- // Do nothing if neither `@tailwind` nor `@apply` is used
- if (!hasTailwind && !hasApply) return
+ function createCompiler() {
+ return compile(root.toString(), {
+ loadPlugin: (pluginPath) => {
+ if (pluginPath[0] === '.') {
+ return require(path.resolve(inputBasePath, pluginPath))
+ }
- let css = ''
+ return require(pluginPath)
+ },
+ })
+ }
- // Look for candidates used to generate the CSS
- let { candidates, files, globs } = scanDir({ base, globs: true })
+ // Setup the compiler if it doesn't exist yet. This way we can
+ // guarantee a `build()` function is available.
+ context.compiler ??= createCompiler()
- // Add all found files as direct dependencies
- for (let file of files) {
- result.messages.push({
- type: 'dependency',
- plugin: '@tailwindcss/postcss',
- file,
- parent: result.opts.from,
- })
- }
+ let rebuildStrategy: 'full' | 'incremental' = 'incremental'
- // Register dependencies so changes in `base` cause a rebuild while
- // giving tools like Vite or Parcel a glob that can be used to limit
- // the files that cause a rebuild to only those that match it.
- for (let { base, glob } of globs) {
- result.messages.push({
- type: 'dir-dependency',
- plugin: '@tailwindcss/postcss',
- dir: base,
- glob,
- parent: result.opts.from,
- })
- }
-
- if (rebuildStrategy === 'full') {
- let basePath = path.dirname(path.resolve(inputFile))
- let { build } = compile(root.toString(), {
- loadPlugin: (pluginPath) => {
- if (pluginPath[0] === '.') {
- return require(path.resolve(basePath, pluginPath))
+ // Track file modification times to CSS files
+ {
+ let files = result.messages.flatMap((message) => {
+ if (message.type !== 'dependency') return []
+ return message.file
+ })
+ files.push(inputFile)
+ for (let file of files) {
+ let changedTime = fs.statSync(file, { throwIfNoEntry: false })?.mtimeMs ?? null
+ if (changedTime === null) {
+ if (file === inputFile) {
+ rebuildStrategy = 'full'
+ }
+ continue
}
- return require(pluginPath)
- },
- })
- context.build = build
- css = build(hasTailwind ? candidates : [])
- } else if (rebuildStrategy === 'incremental') {
- css = context.build!(candidates)
- }
+ let prevTime = context.mtimes.get(file)
+ if (prevTime === changedTime) continue
- // Replace CSS
- if (css !== context.css && optimize) {
- context.optimizedCss = optimizeCss(css, {
- minify: typeof optimize === 'object' ? optimize.minify : true,
+ rebuildStrategy = 'full'
+ context.mtimes.set(file, changedTime)
+ }
+ }
+
+ // Do nothing if neither `@tailwind` nor `@apply` is used
+ if (!hasTailwind && !hasApply) return
+
+ let css = ''
+
+ // Look for candidates used to generate the CSS
+ let scanDirResult = scanDir({
+ base, // Root directory, mainly used for auto content detection
+ sources: context.compiler.globs.map((pattern) => ({
+ base: inputBasePath, // Globs are relative to the input.css file
+ pattern,
+ })),
})
- }
- context.css = css
- root.removeAll()
- root.append(postcss.parse(optimize ? context.optimizedCss : context.css, result.opts))
+
+ // Add all found files as direct dependencies
+ for (let file of scanDirResult.files) {
+ result.messages.push({
+ type: 'dependency',
+ plugin: '@tailwindcss/postcss',
+ file,
+ parent: result.opts.from,
+ })
+ }
+
+ // Register dependencies so changes in `base` cause a rebuild while
+ // giving tools like Vite or Parcel a glob that can be used to limit
+ // the files that cause a rebuild to only those that match it.
+ for (let { base, pattern } of scanDirResult.globs) {
+ result.messages.push({
+ type: 'dir-dependency',
+ plugin: '@tailwindcss/postcss',
+ dir: base,
+ glob: pattern,
+ parent: result.opts.from,
+ })
+ }
+
+ if (rebuildStrategy === 'full') {
+ context.compiler = createCompiler()
+ css = context.compiler.build(hasTailwind ? scanDirResult.candidates : [])
+ } else if (rebuildStrategy === 'incremental') {
+ css = context.compiler.build!(scanDirResult.candidates)
+ }
+
+ // Replace CSS
+ if (css !== context.css && optimize) {
+ context.optimizedCss = optimizeCss(css, {
+ minify: typeof optimize === 'object' ? optimize.minify : true,
+ })
+ }
+ context.css = css
+ root.removeAll()
+ root.append(postcss.parse(optimize ? context.optimizedCss : context.css, result.opts))
+ },
},
],
}
diff --git a/packages/@tailwindcss-postcss/tsup.config.ts b/packages/@tailwindcss-postcss/tsup.config.ts
new file mode 100644
index 000000000..08ec8092a
--- /dev/null
+++ b/packages/@tailwindcss-postcss/tsup.config.ts
@@ -0,0 +1,12 @@
+import { defineConfig } from 'tsup'
+
+export default defineConfig({
+ format: ['esm', 'cjs'],
+ clean: true,
+ minify: true,
+ splitting: true,
+ cjsInterop: true,
+ dts: true,
+ entry: ['src/index.ts'],
+ noExternal: ['internal-postcss-fix-relative-paths'],
+})
diff --git a/packages/@tailwindcss-vite/package.json b/packages/@tailwindcss-vite/package.json
index 020a557c6..acbf1a152 100644
--- a/packages/@tailwindcss-vite/package.json
+++ b/packages/@tailwindcss-vite/package.json
@@ -11,7 +11,7 @@
"bugs": "https://github.com/tailwindlabs/tailwindcss/issues",
"homepage": "https://tailwindcss.com",
"scripts": {
- "build": "tsup-node ./src/index.ts --format esm --dts --minify --clean",
+ "build": "tsup-node",
"dev": "pnpm run build -- --watch"
},
"files": [
@@ -30,11 +30,13 @@
"dependencies": {
"@tailwindcss/oxide": "workspace:^",
"lightningcss": "^1.25.1",
+ "postcss-load-config": "^6.0.1",
"tailwindcss": "workspace:^"
},
"devDependencies": {
"@types/node": "catalog:",
- "vite": "catalog:"
+ "vite": "catalog:",
+ "internal-postcss-fix-relative-paths": "workspace:^"
},
"peerDependencies": {
"vite": "^5.2.0"
diff --git a/packages/@tailwindcss-vite/src/index.ts b/packages/@tailwindcss-vite/src/index.ts
index d8a0924c5..e0bd810c0 100644
--- a/packages/@tailwindcss-vite/src/index.ts
+++ b/packages/@tailwindcss-vite/src/index.ts
@@ -1,12 +1,18 @@
-import { IO, Parsing, scanFiles } from '@tailwindcss/oxide'
+import { scanDir } from '@tailwindcss/oxide'
+import fixRelativePathsPlugin, { normalizePath } from 'internal-postcss-fix-relative-paths'
import { Features, transform } from 'lightningcss'
import path from 'path'
+import postcssrc from 'postcss-load-config'
import { compile } from 'tailwindcss'
-import type { Plugin, Rollup, Update, ViteDevServer } from 'vite'
+import type { Plugin, ResolvedConfig, Rollup, Update, ViteDevServer } from 'vite'
export default function tailwindcss(): Plugin[] {
let server: ViteDevServer | null = null
+ let config: ResolvedConfig | null = null
let candidates = new Set()
+ let scanDirResult: ReturnType | null = null
+ let changedContent: { content: string; extension: string }[] = []
+
// In serve mode this is treated as a set — the content doesn't matter.
// In build mode, we store file contents to use them in renderChunk.
let cssModules: Record<
@@ -56,11 +62,14 @@ export default function tailwindcss(): Plugin[] {
function scan(src: string, extension: string) {
let updated = false
+
+ if (scanDirResult === null) {
+ changedContent.push({ content: src, extension })
+ return updated
+ }
+
// Parse all candidates given the resolved files
- for (let candidate of scanFiles(
- [{ content: src, extension }],
- IO.Sequential | Parsing.Sequential,
- )) {
+ for (let candidate of scanDirResult?.scanFiles([{ content: src, extension }]) ?? []) {
// On an initial or full build, updated becomes true immediately so we
// won't be making extra checks.
if (!updated) {
@@ -72,22 +81,62 @@ export default function tailwindcss(): Plugin[] {
return updated
}
- function generateCss(css: string, inputPath: string) {
- let basePath = path.dirname(path.resolve(inputPath))
-
- return compile(css, {
+ function generateCss(css: string, inputPath: string, addWatchFile: (file: string) => void) {
+ let inputBasePath = path.dirname(path.resolve(inputPath))
+ let { build, globs } = compile(css, {
loadPlugin: (pluginPath) => {
if (pluginPath[0] === '.') {
- return require(path.resolve(basePath, pluginPath))
+ return require(path.resolve(inputBasePath, pluginPath))
}
return require(pluginPath)
},
- }).build(Array.from(candidates))
+ })
+
+ scanDirResult = scanDir({
+ sources: globs.map((pattern) => ({
+ base: inputBasePath, // Globs are relative to the input.css file
+ pattern,
+ })),
+ })
+
+ if (changedContent.length > 0) {
+ scanDirResult.candidates = scanDirResult.scanFiles(changedContent.splice(0))
+ }
+
+ for (let candidate of scanDirResult.candidates) {
+ candidates.add(candidate)
+ }
+
+ // Watch individual files
+ for (let file of scanDirResult.files) {
+ addWatchFile(file)
+ }
+
+ // Watch globs
+ for (let glob of scanDirResult.globs) {
+ if (glob.pattern[0] === '!') continue
+
+ let relative = path.relative(config!.root, glob.base)
+ if (relative[0] !== '.') {
+ relative = './' + relative
+ }
+ // Ensure relative is a posix style path since we will merge it with
+ // the glob.
+ relative = normalizePath(relative)
+
+ addWatchFile(path.posix.join(relative, glob.pattern))
+ }
+
+ return build(Array.from(candidates))
}
- function generateOptimizedCss(css: string, inputPath: string) {
- return optimizeCss(generateCss(css, inputPath), { minify })
+ function generateOptimizedCss(
+ css: string,
+ inputPath: string,
+ addWatchFile: (file: string) => void,
+ ) {
+ return optimizeCss(generateCss(css, inputPath, addWatchFile), { minify })
}
// Manually run the transform functions of non-Tailwind plugins on the given CSS
@@ -101,7 +150,7 @@ export default function tailwindcss(): Plugin[] {
for (let plugin of cssPlugins) {
if (!plugin.transform) continue
- const transformHandler =
+ let transformHandler =
'handler' in plugin.transform! ? plugin.transform.handler : plugin.transform!
try {
@@ -133,7 +182,8 @@ export default function tailwindcss(): Plugin[] {
server = _server
},
- async configResolved(config) {
+ async configResolved(_config) {
+ config = _config
minify = config.build.cssMinify !== false
isSSR = config.build.ssr !== false && config.build.ssr !== undefined
@@ -152,6 +202,55 @@ export default function tailwindcss(): Plugin[] {
})
},
+ // Append the postcss-fix-relative-paths plugin
+ async config(config) {
+ let postcssConfig = config.css?.postcss
+
+ if (typeof postcssConfig === 'string') {
+ // We expand string configs to their PostCSS config object similar to
+ // how Vite does it.
+ // See: https://github.com/vitejs/vite/blob/440783953a55c6c63cd09ec8d13728dc4693073d/packages/vite/src/node/plugins/css.ts#L1580
+ let searchPath = typeof postcssConfig === 'string' ? postcssConfig : config.root
+ let parsedConfig = await postcssrc({}, searchPath).catch((e: Error) => {
+ if (!e.message.includes('No PostCSS Config found')) {
+ if (e instanceof Error) {
+ let { name, message, stack } = e
+ e.name = 'Failed to load PostCSS config'
+ e.message = `Failed to load PostCSS config (searchPath: ${searchPath}): [${name}] ${message}\n${stack}`
+ e.stack = '' // add stack to message to retain stack
+ throw e
+ } else {
+ throw new Error(`Failed to load PostCSS config: ${e}`)
+ }
+ }
+ return null
+ })
+ if (parsedConfig !== null) {
+ postcssConfig = {
+ options: parsedConfig.options,
+ plugins: parsedConfig.plugins,
+ } as any
+ } else {
+ postcssConfig = {}
+ }
+ config.css = { postcss: postcssConfig }
+ }
+
+ // postcssConfig is no longer a string after the above. This test is to
+ // avoid TypeScript errors below.
+ if (typeof postcssConfig === 'string') {
+ return
+ }
+
+ if (!postcssConfig || !postcssConfig?.plugins) {
+ config.css = config.css || {}
+ config.css.postcss = postcssConfig || {}
+ config.css.postcss.plugins = [fixRelativePathsPlugin() as any]
+ } else {
+ postcssConfig.plugins.push(fixRelativePathsPlugin() as any)
+ }
+ },
+
// Scan index.html for candidates
transformIndexHtml(html) {
let updated = scan(html, 'html')
@@ -199,7 +298,11 @@ export default function tailwindcss(): Plugin[] {
await server?.waitForRequestsIdle?.(id)
}
- let code = await transformWithPlugins(this, id, generateCss(src, id))
+ let code = await transformWithPlugins(
+ this,
+ id,
+ generateCss(src, id, (file) => this.addWatchFile(file)),
+ )
return { code }
},
},
@@ -223,7 +326,7 @@ export default function tailwindcss(): Plugin[] {
continue
}
- let css = generateOptimizedCss(file.content, id)
+ let css = generateOptimizedCss(file.content, id, (file) => this.addWatchFile(file))
// These plugins have side effects which, during build, results in CSS
// being written to the output dir. We need to run them here to ensure
diff --git a/packages/@tailwindcss-vite/tsup.config.ts b/packages/@tailwindcss-vite/tsup.config.ts
new file mode 100644
index 000000000..eaf99e82a
--- /dev/null
+++ b/packages/@tailwindcss-vite/tsup.config.ts
@@ -0,0 +1,10 @@
+import { defineConfig } from 'tsup'
+
+export default defineConfig({
+ format: ['esm'],
+ clean: true,
+ minify: true,
+ dts: true,
+ entry: ['src/index.ts'],
+ noExternal: ['internal-postcss-fix-relative-paths'],
+})
diff --git a/packages/internal-postcss-fix-relative-paths/package.json b/packages/internal-postcss-fix-relative-paths/package.json
new file mode 100644
index 000000000..2ffb3c7dd
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/package.json
@@ -0,0 +1,27 @@
+{
+ "name": "internal-postcss-fix-relative-paths",
+ "version": "0.0.0",
+ "private": true,
+ "scripts": {
+ "lint": "tsc --noEmit",
+ "build": "tsup-node ./src/index.ts --format cjs,esm --dts --cjsInterop --splitting --minify --clean",
+ "dev": "pnpm run build -- --watch"
+ },
+ "files": [
+ "dist/"
+ ],
+ "exports": {
+ ".": {
+ "types": "./dist/index.d.ts",
+ "import": "./dist/index.mjs",
+ "require": "./dist/index.js"
+ }
+ },
+ "dependencies": {},
+ "devDependencies": {
+ "@types/node": "^20.12.12",
+ "@types/postcss-import": "^14.0.3",
+ "postcss": "8.4.24",
+ "postcss-import": "^16.1.0"
+ }
+}
diff --git a/packages/internal-postcss-fix-relative-paths/src/fixtures/example-project/src/index.css b/packages/internal-postcss-fix-relative-paths/src/fixtures/example-project/src/index.css
new file mode 100644
index 000000000..b8c4fc5f1
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/src/fixtures/example-project/src/index.css
@@ -0,0 +1,4 @@
+@source "./**/*.ts";
+@source "!./**/*.ts";
+@plugin "./plugin.js";
+@plugin "./what\"s-this.js";
diff --git a/packages/internal-postcss-fix-relative-paths/src/fixtures/example-project/src/invalid.css b/packages/internal-postcss-fix-relative-paths/src/fixtures/example-project/src/invalid.css
new file mode 100644
index 000000000..9eeb9353e
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/src/fixtures/example-project/src/invalid.css
@@ -0,0 +1,4 @@
+@plugin "/absolute/paths";
+@plugin "C:\Program Files\HAL 9000";
+@plugin "\\Media\Pictures\Worth\1000 words";
+@plugin "some-node-dep";
diff --git a/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/index.css b/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/index.css
new file mode 100644
index 000000000..2c014767b
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/index.css
@@ -0,0 +1 @@
+@import '../../example-project/src/index.css';
diff --git a/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/invalid.css b/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/invalid.css
new file mode 100644
index 000000000..b69d455c0
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/invalid.css
@@ -0,0 +1 @@
+@import '../../example-project/src/invalid.css';
diff --git a/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/plugins-in-root.css b/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/plugins-in-root.css
new file mode 100644
index 000000000..d6d5f082c
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/plugins-in-root.css
@@ -0,0 +1,5 @@
+@import './plugins-in-sibling.css';
+
+@plugin './plugin-in-root.ts';
+@plugin '../plugin-in-root.ts';
+@plugin 'plugin-in-root';
diff --git a/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/plugins-in-sibling.css b/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/plugins-in-sibling.css
new file mode 100644
index 000000000..5df3cb061
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/src/fixtures/external-import/src/plugins-in-sibling.css
@@ -0,0 +1,3 @@
+@plugin './plugin-in-sibling.ts';
+@plugin '../plugin-in-sibling.ts';
+@plugin 'plugin-in-sibling';
diff --git a/packages/internal-postcss-fix-relative-paths/src/index.test.ts b/packages/internal-postcss-fix-relative-paths/src/index.test.ts
new file mode 100644
index 000000000..d2f72f664
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/src/index.test.ts
@@ -0,0 +1,58 @@
+import fs from 'node:fs'
+import path from 'node:path'
+import postcss from 'postcss'
+import atImport from 'postcss-import'
+import { describe, expect, test } from 'vitest'
+import fixRelativePathsPlugin from '.'
+
+describe('fixRelativePathsPlugin', () => {
+ test('rewrites @source and @plugin to be relative to the initial css file', async () => {
+ let cssPath = path.join(__dirname, 'fixtures', 'external-import', 'src', 'index.css')
+ let css = fs.readFileSync(cssPath, 'utf-8')
+
+ let processor = postcss([atImport(), fixRelativePathsPlugin()])
+
+ let result = await processor.process(css, { from: cssPath })
+
+ expect(result.css.trim()).toMatchInlineSnapshot(`
+ "@source "../../example-project/src/**/*.ts";
+ @source "!../../example-project/src/**/*.ts";
+ @plugin "../../example-project/src/plugin.js";
+ @plugin "../../example-project/src/what\\"s-this.js";"
+ `)
+ })
+
+ test('should not rewrite non-relative paths', async () => {
+ let cssPath = path.join(__dirname, 'fixtures', 'external-import', 'src', 'invalid.css')
+ let css = fs.readFileSync(cssPath, 'utf-8')
+
+ let processor = postcss([atImport(), fixRelativePathsPlugin()])
+
+ let result = await processor.process(css, { from: cssPath })
+
+ expect(result.css.trim()).toMatchInlineSnapshot(`
+ "@plugin "/absolute/paths";
+ @plugin "C:\\Program Files\\HAL 9000";
+ @plugin "\\\\Media\\Pictures\\Worth\\1000 words";
+ @plugin "some-node-dep";"
+ `)
+ })
+
+ test('should return relative paths even if the file is resolved in the same basedir as the root stylesheet', async () => {
+ let cssPath = path.join(__dirname, 'fixtures', 'external-import', 'src', 'plugins-in-root.css')
+ let css = fs.readFileSync(cssPath, 'utf-8')
+
+ let processor = postcss([atImport(), fixRelativePathsPlugin()])
+
+ let result = await processor.process(css, { from: cssPath })
+
+ expect(result.css.trim()).toMatchInlineSnapshot(`
+ "@plugin './plugin-in-sibling.ts';
+ @plugin '../plugin-in-sibling.ts';
+ @plugin 'plugin-in-sibling';
+ @plugin './plugin-in-root.ts';
+ @plugin '../plugin-in-root.ts';
+ @plugin 'plugin-in-root';"
+ `)
+ })
+})
diff --git a/packages/internal-postcss-fix-relative-paths/src/index.ts b/packages/internal-postcss-fix-relative-paths/src/index.ts
new file mode 100644
index 000000000..3f35f4f9f
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/src/index.ts
@@ -0,0 +1,77 @@
+import path from 'node:path'
+import type { AtRule, Plugin } from 'postcss'
+import { normalizePath } from './normalize-path'
+
+const SINGLE_QUOTE = "'"
+const DOUBLE_QUOTE = '"'
+
+export { normalizePath }
+
+export default function fixRelativePathsPlugin(): Plugin {
+ // Retain a list of touched at-rules to avoid infinite loops
+ let touched: WeakSet = new WeakSet()
+
+ function fixRelativePath(atRule: AtRule) {
+ let rootPath = atRule.root().source?.input.file
+ if (!rootPath) {
+ return
+ }
+
+ let inputFilePath = atRule.source?.input.file
+ if (!inputFilePath) {
+ return
+ }
+
+ if (touched.has(atRule)) {
+ return
+ }
+
+ let value = atRule.params[0]
+
+ let quote =
+ value[0] === DOUBLE_QUOTE && value[value.length - 1] === DOUBLE_QUOTE
+ ? DOUBLE_QUOTE
+ : value[0] === SINGLE_QUOTE && value[value.length - 1] === SINGLE_QUOTE
+ ? SINGLE_QUOTE
+ : null
+ if (!quote) {
+ return
+ }
+ let glob = atRule.params.slice(1, -1)
+
+ // Handle eventual negative rules. We only support one level of negation.
+ let negativePrefix = ''
+ if (glob.startsWith('!')) {
+ glob = glob.slice(1)
+ negativePrefix = '!'
+ }
+
+ // We only want to rewrite relative paths.
+ if (!glob.startsWith('./') && !glob.startsWith('../')) {
+ return
+ }
+
+ let absoluteGlob = path.posix.join(normalizePath(path.dirname(inputFilePath)), glob)
+ let absoluteRootPosixPath = path.posix.dirname(normalizePath(rootPath))
+
+ let relative = path.posix.relative(absoluteRootPosixPath, absoluteGlob)
+
+ // If the path points to a file in the same directory, `path.relative` will
+ // remove the leading `./` and we need to add it back in order to still
+ // consider the path relative
+ if (!relative.startsWith('.')) {
+ relative = './' + relative
+ }
+
+ atRule.params = quote + negativePrefix + relative + quote
+ touched.add(atRule)
+ }
+
+ return {
+ postcssPlugin: 'tailwindcss-postcss-fix-relative-paths',
+ AtRule: {
+ source: fixRelativePath,
+ plugin: fixRelativePath,
+ },
+ }
+}
diff --git a/packages/internal-postcss-fix-relative-paths/src/normalize-path.ts b/packages/internal-postcss-fix-relative-paths/src/normalize-path.ts
new file mode 100644
index 000000000..a8184ef23
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/src/normalize-path.ts
@@ -0,0 +1,47 @@
+// Inlined version of `normalize-path`
+// Copyright (c) 2014-2018, Jon Schlinkert.
+// Released under the MIT License.
+function normalizePathBase(path: string, stripTrailing?: boolean) {
+ if (typeof path !== 'string') {
+ throw new TypeError('expected path to be a string')
+ }
+
+ if (path === '\\' || path === '/') return '/'
+
+ var len = path.length
+ if (len <= 1) return path
+
+ // ensure that win32 namespaces has two leading slashes, so that the path is
+ // handled properly by the win32 version of path.parse() after being normalized
+ // https://msdn.microsoft.com/library/windows/desktop/aa365247(v=vs.85).aspx#namespaces
+ var prefix = ''
+ if (len > 4 && path[3] === '\\') {
+ var ch = path[2]
+ if ((ch === '?' || ch === '.') && path.slice(0, 2) === '\\\\') {
+ path = path.slice(2)
+ prefix = '//'
+ }
+ }
+
+ var segs = path.split(/[/\\]+/)
+ if (stripTrailing !== false && segs[segs.length - 1] === '') {
+ segs.pop()
+ }
+ return prefix + segs.join('/')
+}
+
+export function normalizePath(originalPath: string) {
+ let normalized = normalizePathBase(originalPath)
+
+ // Make sure Windows network share paths are normalized properly
+ // They have to begin with two slashes or they won't resolve correctly
+ if (
+ originalPath.startsWith('\\\\') &&
+ normalized.startsWith('/') &&
+ !normalized.startsWith('//')
+ ) {
+ return `/${normalized}`
+ }
+
+ return normalized
+}
diff --git a/packages/internal-postcss-fix-relative-paths/tsconfig.json b/packages/internal-postcss-fix-relative-paths/tsconfig.json
new file mode 100644
index 000000000..6ae022f65
--- /dev/null
+++ b/packages/internal-postcss-fix-relative-paths/tsconfig.json
@@ -0,0 +1,3 @@
+{
+ "extends": "../tsconfig.base.json",
+}
diff --git a/packages/tailwindcss/src/candidate.bench.ts b/packages/tailwindcss/src/candidate.bench.ts
index f46dec7ec..10700b3a3 100644
--- a/packages/tailwindcss/src/candidate.bench.ts
+++ b/packages/tailwindcss/src/candidate.bench.ts
@@ -8,7 +8,7 @@ import { Theme } from './theme'
const root = process.env.FOLDER || process.cwd()
// Auto content detection
-const result = scanDir({ base: root, globs: true })
+const result = scanDir({ base: root })
const designSystem = buildDesignSystem(new Theme())
diff --git a/packages/tailwindcss/src/index.bench.ts b/packages/tailwindcss/src/index.bench.ts
index 6ec69f2f6..e90f49e15 100644
--- a/packages/tailwindcss/src/index.bench.ts
+++ b/packages/tailwindcss/src/index.bench.ts
@@ -7,7 +7,7 @@ const root = process.env.FOLDER || process.cwd()
const css = String.raw
bench('compile', async () => {
- let { candidates } = scanDir({ base: root, globs: true })
+ let { candidates } = scanDir({ base: root })
compile(css`
@tailwind utilities;
diff --git a/packages/tailwindcss/src/index.test.ts b/packages/tailwindcss/src/index.test.ts
index 70d239deb..2601edf56 100644
--- a/packages/tailwindcss/src/index.test.ts
+++ b/packages/tailwindcss/src/index.test.ts
@@ -1514,6 +1514,25 @@ describe('plugins', () => {
})
})
+describe('@source', () => {
+ test('emits @source files', () => {
+ let { globs } = compile(css`
+ @source "./foo/bar/*.ts";
+ `)
+
+ expect(globs).toEqual(['./foo/bar/*.ts'])
+ })
+
+ test('emits multiple @source files', () => {
+ let { globs } = compile(css`
+ @source "./foo/**/*.ts";
+ @source "./php/secr3t/smarty.php";
+ `)
+
+ expect(globs).toEqual(['./foo/**/*.ts', './php/secr3t/smarty.php'])
+ })
+})
+
describe('@variant', () => {
test('@variant must be top-level and cannot be nested', () => {
expect(() =>
diff --git a/packages/tailwindcss/src/index.ts b/packages/tailwindcss/src/index.ts
index 8b61e65b2..a542c72ca 100644
--- a/packages/tailwindcss/src/index.ts
+++ b/packages/tailwindcss/src/index.ts
@@ -52,6 +52,7 @@ export function compile(
css: string,
{ loadPlugin = throwOnPlugin }: CompileOptions = {},
): {
+ globs: string[]
build(candidates: string[]): string
} {
let ast = CSS.parse(css)
@@ -73,6 +74,7 @@ export function compile(
let customUtilities: ((designSystem: DesignSystem) => void)[] = []
let firstThemeRule: Rule | null = null
let keyframesRules: Rule[] = []
+ let globs: string[] = []
walk(ast, (node, { parent, replaceWith }) => {
if (node.kind !== 'rule') return
@@ -119,6 +121,29 @@ export function compile(
return
}
+ // Collect paths from `@source` at-rules
+ if (node.selector.startsWith('@source ')) {
+ if (node.nodes.length > 0) {
+ throw new Error('`@source` cannot have a body.')
+ }
+
+ if (parent !== null) {
+ throw new Error('`@source` cannot be nested.')
+ }
+
+ let path = node.selector.slice(8)
+ if (
+ (path[0] === '"' && path[path.length - 1] !== '"') ||
+ (path[0] === "'" && path[path.length - 1] !== "'") ||
+ (path[0] !== "'" && path[0] !== '"')
+ ) {
+ throw new Error('`@source` paths must be quoted.')
+ }
+ globs.push(path.slice(1, -1))
+ replaceWith([])
+ return
+ }
+
// Register custom variants from `@variant` at-rules
if (node.selector.startsWith('@variant ')) {
if (parent !== null) {
@@ -374,6 +399,7 @@ export function compile(
let previousAstNodeCount = 0
return {
+ globs,
build(newRawCandidates: string[]) {
let didChange = false
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 888eee519..f722556ed 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -152,6 +152,9 @@ importers:
'@types/postcss-import':
specifier: ^14.0.3
version: 14.0.3
+ internal-postcss-fix-relative-paths:
+ specifier: workspace:^
+ version: link:../internal-postcss-fix-relative-paths
packages/@tailwindcss-postcss:
dependencies:
@@ -177,6 +180,9 @@ importers:
internal-example-plugin:
specifier: workspace:*
version: link:../internal-example-plugin
+ internal-postcss-fix-relative-paths:
+ specifier: workspace:^
+ version: link:../internal-postcss-fix-relative-paths
postcss:
specifier: 8.4.24
version: 8.4.24
@@ -189,6 +195,9 @@ importers:
lightningcss:
specifier: ^1.25.1
version: 1.25.1
+ postcss-load-config:
+ specifier: ^6.0.1
+ version: 6.0.1(postcss@8.4.40)
tailwindcss:
specifier: workspace:^
version: link:../tailwindcss
@@ -196,12 +205,30 @@ importers:
'@types/node':
specifier: 'catalog:'
version: 20.14.13
+ internal-postcss-fix-relative-paths:
+ specifier: workspace:^
+ version: link:../internal-postcss-fix-relative-paths
vite:
specifier: 'catalog:'
version: 5.3.5(@types/node@20.14.13)(lightningcss@1.25.1)
packages/internal-example-plugin: {}
+ packages/internal-postcss-fix-relative-paths:
+ devDependencies:
+ '@types/node':
+ specifier: ^20.12.12
+ version: 20.14.13
+ '@types/postcss-import':
+ specifier: ^14.0.3
+ version: 14.0.3
+ postcss:
+ specifier: 8.4.24
+ version: 8.4.24
+ postcss-import:
+ specifier: ^16.1.0
+ version: 16.1.0(postcss@8.4.24)
+
packages/tailwindcss:
devDependencies:
'@tailwindcss/oxide':
@@ -4948,6 +4975,12 @@ snapshots:
optionalDependencies:
postcss: 8.4.24
+ postcss-load-config@6.0.1(postcss@8.4.40):
+ dependencies:
+ lilconfig: 3.1.2
+ optionalDependencies:
+ postcss: 8.4.40
+
postcss-value-parser@4.2.0: {}
postcss@8.4.24:
diff --git a/scripts/pack-packages.mjs b/scripts/pack-packages.mjs
index 8791f39c4..aa845c3e8 100644
--- a/scripts/pack-packages.mjs
+++ b/scripts/pack-packages.mjs
@@ -24,6 +24,18 @@ for (let path of paths) {
workspaces.set(pkg.name, { version: pkg.version ?? '', dir: dirname(path) })
}
+// Move napi artifacts into sub packages
+const tailwindcssOxideRoot = path.join(root, 'crates', 'node')
+for (let file of await fs.readdir(tailwindcssOxideRoot)) {
+ if (file.startsWith('tailwindcss-oxide.') && file.endsWith('.node')) {
+ let target = file.split('.')[1]
+ await fs.cp(
+ path.join(tailwindcssOxideRoot, file),
+ path.join(tailwindcssOxideRoot, 'npm', target, file),
+ )
+ }
+}
+
// Clean dist folder
await fs.rm(path.join(root, 'dist'), { recursive: true, force: true })