mirror of
https://github.com/tailwindlabs/tailwindcss.git
synced 2025-12-08 21:36:08 +00:00
Fix new file detection in PostCSS plugin (#14829)
We broke this at some point — probably when we tried to optimize rebuilds in PostCSS by not performing a full auto-source detection scan. This PR addresses this problem by: 1. Storing a list of found directories 2. Comparing their mod times on every scan 3. If the mod time has changed we scan the directory for new files which we then store and scan
This commit is contained in:
parent
94ea5e225b
commit
3b2ca85138
@ -7,7 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
- Nothing yet!
|
||||
### Fixed
|
||||
|
||||
- Detect classes in new files when using `@tailwindcss/postcss` ([#14829](https://github.com/tailwindlabs/tailwindcss/pull/14829))
|
||||
|
||||
## [4.0.0-alpha.31] - 2024-10-29
|
||||
|
||||
|
||||
@ -9,6 +9,7 @@ use glob::optimize_patterns;
|
||||
use glob_match::glob_match;
|
||||
use paths::Path;
|
||||
use rayon::prelude::*;
|
||||
use scanner::allowed_paths::read_dir;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use std::sync;
|
||||
@ -77,6 +78,9 @@ pub struct Scanner {
|
||||
/// All files that we have to scan
|
||||
files: Vec<PathBuf>,
|
||||
|
||||
/// All directories, sub-directories, etc… we saw during source detection
|
||||
dirs: Vec<PathBuf>,
|
||||
|
||||
/// All generated globs
|
||||
globs: Vec<GlobEntry>,
|
||||
|
||||
@ -98,7 +102,7 @@ impl Scanner {
|
||||
pub fn scan(&mut self) -> Vec<String> {
|
||||
init_tracing();
|
||||
self.prepare();
|
||||
|
||||
self.check_for_new_files();
|
||||
self.compute_candidates();
|
||||
|
||||
let mut candidates: Vec<String> = self.candidates.clone().into_iter().collect();
|
||||
@ -213,6 +217,62 @@ impl Scanner {
|
||||
self.ready = true;
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
fn check_for_new_files(&mut self) {
|
||||
let mut modified_dirs: Vec<PathBuf> = vec![];
|
||||
|
||||
// Check all directories to see if they were modified
|
||||
for path in &self.dirs {
|
||||
let current_time = fs::metadata(path)
|
||||
.and_then(|m| m.modified())
|
||||
.unwrap_or(SystemTime::now());
|
||||
|
||||
let previous_time = self.mtimes.insert(path.clone(), current_time);
|
||||
|
||||
let should_scan = match previous_time {
|
||||
// Time has changed, so we need to re-scan the file
|
||||
Some(prev) if prev != current_time => true,
|
||||
|
||||
// File was in the cache, no need to re-scan
|
||||
Some(_) => false,
|
||||
|
||||
// File didn't exist before, so we need to scan it
|
||||
None => true,
|
||||
};
|
||||
|
||||
if should_scan {
|
||||
modified_dirs.push(path.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Scan all modified directories for their immediate files
|
||||
let mut known = FxHashSet::from_iter(self.files.iter().chain(self.dirs.iter()).cloned());
|
||||
|
||||
while !modified_dirs.is_empty() {
|
||||
let new_entries = modified_dirs
|
||||
.iter()
|
||||
.flat_map(|dir| read_dir(dir, Some(1)))
|
||||
.map(|entry| entry.path().to_owned())
|
||||
.filter(|path| !known.contains(path))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
modified_dirs.clear();
|
||||
|
||||
for path in new_entries {
|
||||
if path.is_file() {
|
||||
known.insert(path.clone());
|
||||
self.files.push(path);
|
||||
} else if path.is_dir() {
|
||||
known.insert(path.clone());
|
||||
self.dirs.push(path.clone());
|
||||
|
||||
// Recursively scan the new directory for files
|
||||
modified_dirs.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip_all)]
|
||||
fn scan_sources(&mut self) {
|
||||
let Some(sources) = &self.sources else {
|
||||
@ -282,9 +342,10 @@ impl Scanner {
|
||||
// Detect all files/folders in the directory
|
||||
let detect_sources = DetectSources::new(path);
|
||||
|
||||
let (files, globs) = detect_sources.detect();
|
||||
let (files, globs, dirs) = detect_sources.detect();
|
||||
self.files.extend(files);
|
||||
self.globs.extend(globs);
|
||||
self.dirs.extend(dirs);
|
||||
}
|
||||
|
||||
// Turn `Vec<&GlobEntry>` in `Vec<GlobEntry>`
|
||||
|
||||
@ -27,9 +27,25 @@ static IGNORED_CONTENT_DIRS: sync::LazyLock<Vec<&'static str>> =
|
||||
|
||||
#[tracing::instrument(skip(root))]
|
||||
pub fn resolve_allowed_paths(root: &Path) -> impl Iterator<Item = DirEntry> {
|
||||
// Read the directory recursively with no depth limit
|
||||
read_dir(root, None)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(root))]
|
||||
pub fn resolve_paths(root: &Path) -> impl Iterator<Item = DirEntry> {
|
||||
WalkBuilder::new(root)
|
||||
.hidden(false)
|
||||
.require_git(false)
|
||||
.build()
|
||||
.filter_map(Result::ok)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(root))]
|
||||
pub fn read_dir(root: &Path, depth: Option<usize>) -> impl Iterator<Item = DirEntry> {
|
||||
WalkBuilder::new(root)
|
||||
.hidden(false)
|
||||
.require_git(false)
|
||||
.max_depth(depth)
|
||||
.filter_entry(move |entry| match entry.file_type() {
|
||||
Some(file_type) if file_type.is_dir() => match entry.file_name().to_str() {
|
||||
Some(dir) => !IGNORED_CONTENT_DIRS.contains(&dir),
|
||||
@ -44,15 +60,6 @@ pub fn resolve_allowed_paths(root: &Path) -> impl Iterator<Item = DirEntry> {
|
||||
.filter_map(Result::ok)
|
||||
}
|
||||
|
||||
#[tracing::instrument(skip(root))]
|
||||
pub fn resolve_paths(root: &Path) -> impl Iterator<Item = DirEntry> {
|
||||
WalkBuilder::new(root)
|
||||
.hidden(false)
|
||||
.require_git(false)
|
||||
.build()
|
||||
.filter_map(Result::ok)
|
||||
}
|
||||
|
||||
pub fn is_allowed_content_path(path: &Path) -> bool {
|
||||
// Skip known ignored files
|
||||
if path
|
||||
|
||||
@ -27,11 +27,11 @@ impl DetectSources {
|
||||
Self { base }
|
||||
}
|
||||
|
||||
pub fn detect(&self) -> (Vec<PathBuf>, Vec<GlobEntry>) {
|
||||
pub fn detect(&self) -> (Vec<PathBuf>, Vec<GlobEntry>, Vec<PathBuf>) {
|
||||
let (files, dirs) = self.resolve_files();
|
||||
let globs = self.resolve_globs(&dirs);
|
||||
|
||||
(files, globs)
|
||||
(files, globs, dirs)
|
||||
}
|
||||
|
||||
fn resolve_files(&self) -> (Vec<PathBuf>, Vec<PathBuf>) {
|
||||
|
||||
@ -1,13 +1,29 @@
|
||||
#[cfg(test)]
|
||||
mod scanner {
|
||||
use std::process::Command;
|
||||
use std::thread::sleep;
|
||||
use std::time::Duration;
|
||||
use std::{fs, path};
|
||||
|
||||
use tailwindcss_oxide::*;
|
||||
use tempfile::tempdir;
|
||||
|
||||
fn create_files_in(dir: &path::PathBuf, paths: &[(&str, &str)]) {
|
||||
// Create the necessary files
|
||||
for (path, contents) in paths {
|
||||
// Ensure we use the right path separator for the current platform
|
||||
let path = dir.join(path.replace('/', path::MAIN_SEPARATOR.to_string().as_str()));
|
||||
let parent = path.parent().unwrap();
|
||||
if !parent.exists() {
|
||||
fs::create_dir_all(parent).unwrap();
|
||||
}
|
||||
|
||||
fs::write(path, contents).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
fn scan_with_globs(
|
||||
paths_with_content: &[(&str, Option<&str>)],
|
||||
paths_with_content: &[(&str, &str)],
|
||||
globs: Vec<&str>,
|
||||
) -> (Vec<String>, Vec<String>) {
|
||||
// Create a temporary working directory
|
||||
@ -17,19 +33,7 @@ mod scanner {
|
||||
let _ = Command::new("git").arg("init").current_dir(&dir).output();
|
||||
|
||||
// Create the necessary files
|
||||
for (path, contents) in paths_with_content {
|
||||
// Ensure we use the right path separator for the current platform
|
||||
let path = dir.join(path.replace('/', path::MAIN_SEPARATOR.to_string().as_str()));
|
||||
let parent = path.parent().unwrap();
|
||||
if !parent.exists() {
|
||||
fs::create_dir_all(parent).unwrap();
|
||||
}
|
||||
|
||||
match contents {
|
||||
Some(contents) => fs::write(path, contents).unwrap(),
|
||||
None => fs::write(path, "").unwrap(),
|
||||
}
|
||||
}
|
||||
self::create_files_in(&dir, paths_with_content);
|
||||
|
||||
let base = format!("{}", dir.display()).replace('\\', "/");
|
||||
|
||||
@ -75,21 +79,21 @@ mod scanner {
|
||||
(paths, candidates)
|
||||
}
|
||||
|
||||
fn scan(paths_with_content: &[(&str, Option<&str>)]) -> (Vec<String>, Vec<String>) {
|
||||
fn scan(paths_with_content: &[(&str, &str)]) -> (Vec<String>, Vec<String>) {
|
||||
scan_with_globs(paths_with_content, vec![])
|
||||
}
|
||||
|
||||
fn test(paths_with_content: &[(&str, Option<&str>)]) -> Vec<String> {
|
||||
fn test(paths_with_content: &[(&str, &str)]) -> Vec<String> {
|
||||
scan(paths_with_content).0
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_should_work_with_a_set_of_root_files() {
|
||||
let globs = test(&[
|
||||
("index.html", None),
|
||||
("a.html", None),
|
||||
("b.html", None),
|
||||
("c.html", None),
|
||||
("index.html", ""),
|
||||
("a.html", ""),
|
||||
("b.html", ""),
|
||||
("c.html", ""),
|
||||
]);
|
||||
assert_eq!(globs, vec!["*", "a.html", "b.html", "c.html", "index.html"]);
|
||||
}
|
||||
@ -97,11 +101,11 @@ mod scanner {
|
||||
#[test]
|
||||
fn it_should_work_with_a_set_of_root_files_and_ignore_ignored_files() {
|
||||
let globs = test(&[
|
||||
(".gitignore", Some("b.html")),
|
||||
("index.html", None),
|
||||
("a.html", None),
|
||||
("b.html", None),
|
||||
("c.html", None),
|
||||
(".gitignore", "b.html"),
|
||||
("index.html", ""),
|
||||
("a.html", ""),
|
||||
("b.html", ""),
|
||||
("c.html", ""),
|
||||
]);
|
||||
assert_eq!(globs, vec!["*", "a.html", "c.html", "index.html"]);
|
||||
}
|
||||
@ -109,10 +113,10 @@ mod scanner {
|
||||
#[test]
|
||||
fn it_should_list_all_files_in_the_public_folder_explicitly() {
|
||||
let globs = test(&[
|
||||
("index.html", None),
|
||||
("public/a.html", None),
|
||||
("public/b.html", None),
|
||||
("public/c.html", None),
|
||||
("index.html", ""),
|
||||
("public/a.html", ""),
|
||||
("public/b.html", ""),
|
||||
("public/c.html", ""),
|
||||
]);
|
||||
assert_eq!(
|
||||
globs,
|
||||
@ -129,15 +133,15 @@ mod scanner {
|
||||
#[test]
|
||||
fn it_should_list_nested_folders_explicitly_in_the_public_folder() {
|
||||
let globs = test(&[
|
||||
("index.html", None),
|
||||
("public/a.html", None),
|
||||
("public/b.html", None),
|
||||
("public/c.html", None),
|
||||
("public/nested/a.html", None),
|
||||
("public/nested/b.html", None),
|
||||
("public/nested/c.html", None),
|
||||
("public/nested/again/a.html", None),
|
||||
("public/very/deeply/nested/a.html", None),
|
||||
("index.html", ""),
|
||||
("public/a.html", ""),
|
||||
("public/b.html", ""),
|
||||
("public/c.html", ""),
|
||||
("public/nested/a.html", ""),
|
||||
("public/nested/b.html", ""),
|
||||
("public/nested/c.html", ""),
|
||||
("public/nested/again/a.html", ""),
|
||||
("public/very/deeply/nested/a.html", ""),
|
||||
]);
|
||||
assert_eq!(
|
||||
globs,
|
||||
@ -159,11 +163,11 @@ mod scanner {
|
||||
#[test]
|
||||
fn it_should_list_all_files_in_the_public_folder_explicitly_except_ignored_files() {
|
||||
let globs = test(&[
|
||||
(".gitignore", Some("public/b.html\na.html")),
|
||||
("index.html", None),
|
||||
("public/a.html", None),
|
||||
("public/b.html", None),
|
||||
("public/c.html", None),
|
||||
(".gitignore", "public/b.html\na.html"),
|
||||
("index.html", ""),
|
||||
("public/a.html", ""),
|
||||
("public/b.html", ""),
|
||||
("public/c.html", ""),
|
||||
]);
|
||||
assert_eq!(globs, vec!["*", "index.html", "public/c.html",]);
|
||||
}
|
||||
@ -171,10 +175,10 @@ mod scanner {
|
||||
#[test]
|
||||
fn it_should_use_a_glob_for_top_level_folders() {
|
||||
let globs = test(&[
|
||||
("index.html", None),
|
||||
("src/a.html", None),
|
||||
("src/b.html", None),
|
||||
("src/c.html", None),
|
||||
("index.html", ""),
|
||||
("src/a.html", ""),
|
||||
("src/b.html", ""),
|
||||
("src/c.html", ""),
|
||||
]);
|
||||
assert_eq!(globs, vec!["*",
|
||||
"index.html",
|
||||
@ -188,10 +192,10 @@ mod scanner {
|
||||
#[test]
|
||||
fn it_should_ignore_binary_files() {
|
||||
let globs = test(&[
|
||||
("index.html", None),
|
||||
("a.mp4", None),
|
||||
("b.png", None),
|
||||
("c.lock", None),
|
||||
("index.html", ""),
|
||||
("a.mp4", ""),
|
||||
("b.png", ""),
|
||||
("c.lock", ""),
|
||||
]);
|
||||
assert_eq!(globs, vec!["*", "index.html"]);
|
||||
}
|
||||
@ -199,10 +203,10 @@ mod scanner {
|
||||
#[test]
|
||||
fn it_should_ignore_known_extensions() {
|
||||
let globs = test(&[
|
||||
("index.html", None),
|
||||
("a.css", None),
|
||||
("b.sass", None),
|
||||
("c.less", None),
|
||||
("index.html", ""),
|
||||
("a.css", ""),
|
||||
("b.sass", ""),
|
||||
("c.less", ""),
|
||||
]);
|
||||
assert_eq!(globs, vec!["*", "index.html"]);
|
||||
}
|
||||
@ -210,9 +214,9 @@ mod scanner {
|
||||
#[test]
|
||||
fn it_should_ignore_known_files() {
|
||||
let globs = test(&[
|
||||
("index.html", None),
|
||||
("package-lock.json", None),
|
||||
("yarn.lock", None),
|
||||
("index.html", ""),
|
||||
("package-lock.json", ""),
|
||||
("yarn.lock", ""),
|
||||
]);
|
||||
assert_eq!(globs, vec!["*", "index.html"]);
|
||||
}
|
||||
@ -221,45 +225,45 @@ mod scanner {
|
||||
fn it_should_ignore_and_expand_nested_ignored_folders() {
|
||||
let globs = test(&[
|
||||
// Explicitly listed root files
|
||||
("foo.html", None),
|
||||
("bar.html", None),
|
||||
("baz.html", None),
|
||||
("foo.html", ""),
|
||||
("bar.html", ""),
|
||||
("baz.html", ""),
|
||||
// Nested folder A, using glob
|
||||
("nested-a/foo.html", None),
|
||||
("nested-a/bar.html", None),
|
||||
("nested-a/baz.html", None),
|
||||
("nested-a/foo.html", ""),
|
||||
("nested-a/bar.html", ""),
|
||||
("nested-a/baz.html", ""),
|
||||
// Nested folder B, with deeply nested files, using glob
|
||||
("nested-b/deeply-nested/foo.html", None),
|
||||
("nested-b/deeply-nested/bar.html", None),
|
||||
("nested-b/deeply-nested/baz.html", None),
|
||||
("nested-b/deeply-nested/foo.html", ""),
|
||||
("nested-b/deeply-nested/bar.html", ""),
|
||||
("nested-b/deeply-nested/baz.html", ""),
|
||||
// Nested folder C, with ignored sub-folder
|
||||
("nested-c/foo.html", None),
|
||||
("nested-c/bar.html", None),
|
||||
("nested-c/baz.html", None),
|
||||
("nested-c/foo.html", ""),
|
||||
("nested-c/bar.html", ""),
|
||||
("nested-c/baz.html", ""),
|
||||
// Ignored folder
|
||||
("nested-c/.gitignore", Some("ignored-folder/")),
|
||||
("nested-c/ignored-folder/foo.html", None),
|
||||
("nested-c/ignored-folder/bar.html", None),
|
||||
("nested-c/ignored-folder/baz.html", None),
|
||||
("nested-c/.gitignore", "ignored-folder/"),
|
||||
("nested-c/ignored-folder/foo.html", ""),
|
||||
("nested-c/ignored-folder/bar.html", ""),
|
||||
("nested-c/ignored-folder/baz.html", ""),
|
||||
// Deeply nested, without issues
|
||||
("nested-c/sibling-folder/foo.html", None),
|
||||
("nested-c/sibling-folder/bar.html", None),
|
||||
("nested-c/sibling-folder/baz.html", None),
|
||||
("nested-c/sibling-folder/foo.html", ""),
|
||||
("nested-c/sibling-folder/bar.html", ""),
|
||||
("nested-c/sibling-folder/baz.html", ""),
|
||||
// Nested folder D, with deeply nested ignored folder
|
||||
("nested-d/foo.html", None),
|
||||
("nested-d/bar.html", None),
|
||||
("nested-d/baz.html", None),
|
||||
("nested-d/.gitignore", Some("deep/")),
|
||||
("nested-d/very/deeply/nested/deep/foo.html", None),
|
||||
("nested-d/very/deeply/nested/deep/bar.html", None),
|
||||
("nested-d/very/deeply/nested/deep/baz.html", None),
|
||||
("nested-d/very/deeply/nested/foo.html", None),
|
||||
("nested-d/very/deeply/nested/bar.html", None),
|
||||
("nested-d/very/deeply/nested/baz.html", None),
|
||||
("nested-d/very/deeply/nested/directory/foo.html", None),
|
||||
("nested-d/very/deeply/nested/directory/bar.html", None),
|
||||
("nested-d/very/deeply/nested/directory/baz.html", None),
|
||||
("nested-d/very/deeply/nested/directory/again/foo.html", None),
|
||||
("nested-d/foo.html", ""),
|
||||
("nested-d/bar.html", ""),
|
||||
("nested-d/baz.html", ""),
|
||||
("nested-d/.gitignore", "deep/"),
|
||||
("nested-d/very/deeply/nested/deep/foo.html", ""),
|
||||
("nested-d/very/deeply/nested/deep/bar.html", ""),
|
||||
("nested-d/very/deeply/nested/deep/baz.html", ""),
|
||||
("nested-d/very/deeply/nested/foo.html", ""),
|
||||
("nested-d/very/deeply/nested/bar.html", ""),
|
||||
("nested-d/very/deeply/nested/baz.html", ""),
|
||||
("nested-d/very/deeply/nested/directory/foo.html", ""),
|
||||
("nested-d/very/deeply/nested/directory/bar.html", ""),
|
||||
("nested-d/very/deeply/nested/directory/baz.html", ""),
|
||||
("nested-d/very/deeply/nested/directory/again/foo.html", ""),
|
||||
]);
|
||||
|
||||
assert_eq!(
|
||||
@ -312,15 +316,15 @@ mod scanner {
|
||||
|
||||
let candidates = scan(&[
|
||||
// The gitignore file is used to filter out files but not scanned for candidates
|
||||
(".gitignore", Some(&ignores)),
|
||||
(".gitignore", &ignores),
|
||||
// A file that should definitely be scanned
|
||||
("index.html", Some("font-bold md:flex")),
|
||||
("index.html", "font-bold md:flex"),
|
||||
// A file that should definitely not be scanned
|
||||
("foo.jpg", Some("xl:font-bold")),
|
||||
("foo.jpg", "xl:font-bold"),
|
||||
// A file that is ignored
|
||||
("foo.html", Some("lg:font-bold")),
|
||||
("foo.html", "lg:font-bold"),
|
||||
// A svelte file with `class:foo="bar"` syntax
|
||||
("index.svelte", Some("<div class:px-4='condition'></div>")),
|
||||
("index.svelte", "<div class:px-4='condition'></div>"),
|
||||
])
|
||||
.1;
|
||||
|
||||
@ -336,7 +340,7 @@ mod scanner {
|
||||
&[
|
||||
// We know that `.styl` extensions are ignored, so they are not covered by auto content
|
||||
// detection.
|
||||
("foo.styl", Some("content-['foo.styl']")),
|
||||
("foo.styl", "content-['foo.styl']"),
|
||||
],
|
||||
vec!["*.styl"],
|
||||
)
|
||||
@ -349,10 +353,10 @@ mod scanner {
|
||||
fn it_should_scan_content_paths_even_when_they_are_git_ignored() {
|
||||
let candidates = scan_with_globs(
|
||||
&[
|
||||
(".gitignore", Some("foo.styl")),
|
||||
(".gitignore", "foo.styl"),
|
||||
// We know that `.styl` extensions are ignored, so they are not covered by auto content
|
||||
// detection.
|
||||
("foo.styl", Some("content-['foo.styl']")),
|
||||
("foo.styl", "content-['foo.styl']"),
|
||||
],
|
||||
vec!["foo.styl"],
|
||||
)
|
||||
@ -360,4 +364,141 @@ mod scanner {
|
||||
|
||||
assert_eq!(candidates, vec!["content-['foo.styl']"]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_should_pick_up_new_files() {
|
||||
// Create a temporary working directory
|
||||
let dir = tempdir().unwrap().into_path();
|
||||
|
||||
// Initialize this directory as a git repository
|
||||
let _ = Command::new("git").arg("init").current_dir(&dir).output();
|
||||
|
||||
// Create files
|
||||
create_files_in(
|
||||
&dir,
|
||||
&[
|
||||
("project-a/index.html", "content-['project-a/index.html']"),
|
||||
("project-b/index.html", "content-['project-b/index.html']"),
|
||||
],
|
||||
);
|
||||
|
||||
let sources = vec![
|
||||
GlobEntry {
|
||||
base: dir.join("project-a").to_string_lossy().to_string(),
|
||||
pattern: "**/*".to_owned(),
|
||||
},
|
||||
GlobEntry {
|
||||
base: dir.join("project-b").to_string_lossy().to_string(),
|
||||
pattern: "**/*".to_owned(),
|
||||
},
|
||||
];
|
||||
|
||||
let mut scanner = Scanner::new(Some(sources));
|
||||
let candidates = scanner.scan();
|
||||
|
||||
// We've done the initial scan and found the files
|
||||
assert_eq!(
|
||||
candidates,
|
||||
vec![
|
||||
"content-['project-a/index.html']".to_owned(),
|
||||
"content-['project-b/index.html']".to_owned(),
|
||||
]
|
||||
);
|
||||
|
||||
// We have to sleep because it might run too fast (seriously) and the
|
||||
// mtimes of the directories end up being the same as the last time we
|
||||
// checked them
|
||||
sleep(Duration::from_millis(100));
|
||||
|
||||
// Create files
|
||||
create_files_in(
|
||||
&dir,
|
||||
&[
|
||||
("project-a/new.html", "content-['project-a/new.html']"),
|
||||
("project-b/new.html", "content-['project-b/new.html']"),
|
||||
],
|
||||
);
|
||||
|
||||
let candidates = scanner.scan();
|
||||
|
||||
assert_eq!(
|
||||
candidates,
|
||||
vec![
|
||||
"content-['project-a/index.html']".to_owned(),
|
||||
"content-['project-a/new.html']".to_owned(),
|
||||
"content-['project-b/index.html']".to_owned(),
|
||||
"content-['project-b/new.html']".to_owned(),
|
||||
]
|
||||
);
|
||||
|
||||
// We have to sleep because it might run too fast (seriously) and the
|
||||
// mtimes of the directories end up being the same as the last time we
|
||||
// checked them
|
||||
sleep(Duration::from_millis(100));
|
||||
|
||||
// Create folders
|
||||
create_files_in(
|
||||
&dir,
|
||||
&[
|
||||
(
|
||||
"project-a/sub1/sub2/index.html",
|
||||
"content-['project-a/sub1/sub2/index.html']",
|
||||
),
|
||||
(
|
||||
"project-b/sub1/sub2/index.html",
|
||||
"content-['project-b/sub1/sub2/index.html']",
|
||||
),
|
||||
],
|
||||
);
|
||||
|
||||
let candidates = scanner.scan();
|
||||
|
||||
assert_eq!(
|
||||
candidates,
|
||||
vec![
|
||||
"content-['project-a/index.html']".to_owned(),
|
||||
"content-['project-a/new.html']".to_owned(),
|
||||
"content-['project-a/sub1/sub2/index.html']".to_owned(),
|
||||
"content-['project-b/index.html']".to_owned(),
|
||||
"content-['project-b/new.html']".to_owned(),
|
||||
"content-['project-b/sub1/sub2/index.html']".to_owned(),
|
||||
]
|
||||
);
|
||||
|
||||
// We have to sleep because it might run too fast (seriously) and the
|
||||
// mtimes of the directories end up being the same as the last time we
|
||||
// checked them
|
||||
sleep(Duration::from_millis(100));
|
||||
|
||||
// Create folders
|
||||
create_files_in(
|
||||
&dir,
|
||||
&[
|
||||
(
|
||||
"project-a/sub1/sub2/new.html",
|
||||
"content-['project-a/sub1/sub2/new.html']",
|
||||
),
|
||||
(
|
||||
"project-b/sub1/sub2/new.html",
|
||||
"content-['project-b/sub1/sub2/new.html']",
|
||||
),
|
||||
],
|
||||
);
|
||||
|
||||
let candidates = scanner.scan();
|
||||
|
||||
assert_eq!(
|
||||
candidates,
|
||||
vec![
|
||||
"content-['project-a/index.html']".to_owned(),
|
||||
"content-['project-a/new.html']".to_owned(),
|
||||
"content-['project-a/sub1/sub2/index.html']".to_owned(),
|
||||
"content-['project-a/sub1/sub2/new.html']".to_owned(),
|
||||
"content-['project-b/index.html']".to_owned(),
|
||||
"content-['project-b/new.html']".to_owned(),
|
||||
"content-['project-b/sub1/sub2/index.html']".to_owned(),
|
||||
"content-['project-b/sub1/sub2/new.html']".to_owned(),
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -947,32 +947,42 @@ test(
|
||||
])
|
||||
|
||||
// Creating new files in the "root" of auto source detected folders
|
||||
// await fs.write(
|
||||
// 'project-b/new-file.html',
|
||||
// html`<div class="[.created_&]:content-['project-b/new-file.html']"></div>`,
|
||||
// )
|
||||
// await fs.write(
|
||||
// 'project-b/new-folder/new-file.html',
|
||||
// html`<div class="[.created_&]:content-['project-b/new-folder/new-file.html']"></div>`,
|
||||
// )
|
||||
// await fs.write(
|
||||
// 'project-c/new-file.html',
|
||||
// html`<div class="[.created_&]:content-['project-c/new-file.html']"></div>`,
|
||||
// )
|
||||
// await fs.write(
|
||||
// 'project-c/new-folder/new-file.html',
|
||||
// html`<div class="[.created_&]:content-['project-c/new-folder/new-file.html']"></div>`,
|
||||
// )
|
||||
// We need to create the files and *then* update them because postcss-cli
|
||||
// does not pick up new files — only changes to existing files.
|
||||
await fs.create([
|
||||
'project-b/new-file.html',
|
||||
'project-b/new-folder/new-file.html',
|
||||
'project-c/new-file.html',
|
||||
'project-c/new-folder/new-file.html',
|
||||
])
|
||||
|
||||
// await fs.write('project-a/src/index.css', await fs.read('project-a/src/index.css'))
|
||||
// await new Promise((resolve) => setTimeout(resolve, 1000))
|
||||
// If we don't wait writes will be coalesced into a "add" event which
|
||||
// isn't picked up by postcss-cli.
|
||||
await new Promise((resolve) => setTimeout(resolve, 100))
|
||||
|
||||
// await fs.expectFileToContain('./project-a/dist/out.css', [
|
||||
// candidate`[.created_&]:content-['project-b/new-file.html']`,
|
||||
// candidate`[.created_&]:content-['project-b/new-folder/new-file.html']`,
|
||||
// candidate`[.created_&]:content-['project-c/new-file.html']`,
|
||||
// candidate`[.created_&]:content-['project-c/new-folder/new-file.html']`,
|
||||
// ])
|
||||
await fs.write(
|
||||
'project-b/new-file.html',
|
||||
html`<div class="[.created_&]:content-['project-b/new-file.html']"></div>`,
|
||||
)
|
||||
await fs.write(
|
||||
'project-b/new-folder/new-file.html',
|
||||
html`<div class="[.created_&]:content-['project-b/new-folder/new-file.html']"></div>`,
|
||||
)
|
||||
await fs.write(
|
||||
'project-c/new-file.html',
|
||||
html`<div class="[.created_&]:content-['project-c/new-file.html']"></div>`,
|
||||
)
|
||||
await fs.write(
|
||||
'project-c/new-folder/new-file.html',
|
||||
html`<div class="[.created_&]:content-['project-c/new-folder/new-file.html']"></div>`,
|
||||
)
|
||||
|
||||
await fs.expectFileToContain('./project-a/dist/out.css', [
|
||||
candidate`[.created_&]:content-['project-b/new-file.html']`,
|
||||
candidate`[.created_&]:content-['project-b/new-folder/new-file.html']`,
|
||||
candidate`[.created_&]:content-['project-c/new-file.html']`,
|
||||
candidate`[.created_&]:content-['project-c/new-folder/new-file.html']`,
|
||||
])
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@ -39,6 +39,7 @@ interface TestContext {
|
||||
getFreePort(): Promise<number>
|
||||
fs: {
|
||||
write(filePath: string, content: string): Promise<void>
|
||||
create(filePaths: string[]): Promise<void>
|
||||
read(filePath: string): Promise<string>
|
||||
glob(pattern: string): Promise<[string, string][]>
|
||||
dumpFiles(pattern: string): Promise<string>
|
||||
@ -294,6 +295,17 @@ export function test(
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
await fs.writeFile(full, content)
|
||||
},
|
||||
|
||||
async create(filenames: string[]): Promise<void> {
|
||||
for (let filename of filenames) {
|
||||
let full = path.join(root, filename)
|
||||
|
||||
let dir = path.dirname(full)
|
||||
await fs.mkdir(dir, { recursive: true })
|
||||
await fs.writeFile(full, '')
|
||||
}
|
||||
},
|
||||
|
||||
async read(filePath: string) {
|
||||
let content = await fs.readFile(path.resolve(root, filePath), 'utf8')
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user