diff --git a/crates/node-file-trace/src/lib.rs b/crates/node-file-trace/src/lib.rs index 34e21c08c5702..abf8d45c2ce4e 100644 --- a/crates/node-file-trace/src/lib.rs +++ b/crates/node-file-trace/src/lib.rs @@ -188,7 +188,7 @@ impl Args { } async fn create_fs(name: &str, root: &str, watch: bool) -> Result>> { - let fs = DiskFileSystem::new(name.to_string(), root.to_string(), vec![]); + let fs = DiskFileSystem::new(name.to_string().into(), root.to_string().into(), vec![]); if watch { fs.await?.start_watching()?; } else { @@ -232,17 +232,18 @@ async fn add_glob_results( #[turbo_tasks::function] async fn input_to_modules( fs: Vc>, - input: Vec, + input: Vec>, exact: bool, - process_cwd: Option, - context_directory: String, + process_cwd: Option>, + context_directory: Arc, module_options: TransientInstance, resolve_options: TransientInstance, ) -> Result> { let root = fs.root(); let process_cwd = process_cwd .clone() - .map(|p| format!("/ROOT{}", p.trim_start_matches(&context_directory))); + .map(|p| format!("/ROOT{}", p.trim_start_matches(&*context_directory))) + .map(Arc::new); let asset_context: Vc> = Vc::upcast(create_module_asset( root, @@ -283,7 +284,7 @@ fn process_context(dir: &Path, context_directory: Option<&String>) -> Result Result { +fn make_relative_path(dir: &Path, context_directory: &str, input: &str) -> Result> { let mut input = PathBuf::from(input); if !input.is_absolute() { input = dir.join(input); @@ -299,10 +300,15 @@ fn make_relative_path(dir: &Path, context_directory: &str, input: &str) -> Resul Ok(input .to_str() .ok_or_else(|| anyhow!("input contains invalid characters"))? - .replace('\\', "/")) + .replace('\\', "/") + .into()) } -fn process_input(dir: &Path, context_directory: &str, input: &[String]) -> Result> { +fn process_input( + dir: &Path, + context_directory: &str, + input: &[String], +) -> Result>> { input .iter() .map(|input| make_relative_path(dir, context_directory, input)) @@ -537,8 +543,8 @@ async fn main_operation( fs, input, exact, - process_cwd.clone(), - context_directory, + process_cwd.clone().map(Arc::new), + context_directory.into(), module_options, resolve_options, ) @@ -563,8 +569,8 @@ async fn main_operation( fs, input, exact, - process_cwd.clone(), - context_directory, + process_cwd.clone().map(Arc::new), + context_directory.into(), module_options, resolve_options, ) @@ -573,7 +579,7 @@ async fn main_operation( { let nft_asset = NftJsonAsset::new(*module); let path = nft_asset.ident().path().await?.path.clone(); - output_nft_assets.push(path); + output_nft_assets.push((*path).clone()); emits.push(emit_asset(Vc::upcast(nft_asset))); } // Wait for all files to be emitted @@ -596,8 +602,8 @@ async fn main_operation( fs, input, exact, - process_cwd.clone(), - context_directory, + process_cwd.clone().map(Arc::new), + context_directory.into(), module_options, resolve_options, ) @@ -620,13 +626,13 @@ async fn main_operation( #[turbo_tasks::function] async fn create_module_asset( root: Vc, - process_cwd: Option, + process_cwd: Option>, module_options: TransientInstance, resolve_options: TransientInstance, ) -> Result> { let env = Environment::new(Value::new(ExecutionEnvironment::NodeJsLambda( NodeJsEnvironment { - cwd: Vc::cell(process_cwd), + cwd: Vc::cell(process_cwd.as_deref().cloned()), ..Default::default() } .into(), @@ -635,12 +641,12 @@ async fn create_module_asset( let glob_mappings = vec![ ( root, - Glob::new("**/*/next/dist/server/next.js".to_string()), + Glob::new("**/*/next/dist/server/next.js".to_string().into()), ImportMapping::Ignore.into(), ), ( root, - Glob::new("**/*/next/dist/bin/next".to_string()), + Glob::new("**/*/next/dist/bin/next".to_string().into()), ImportMapping::Ignore.into(), ), ]; diff --git a/crates/node-file-trace/src/nft_json.rs b/crates/node-file-trace/src/nft_json.rs index 0d800b417a427..1e57d80a5d5f2 100644 --- a/crates/node-file-trace/src/nft_json.rs +++ b/crates/node-file-trace/src/nft_json.rs @@ -29,7 +29,9 @@ impl OutputAsset for NftJsonAsset { async fn ident(&self) -> Result> { let path = self.entry.ident().path().await?; Ok(AssetIdent::from_path( - path.fs.root().join(format!("{}.nft.json", path.path)), + path.fs + .root() + .join(format!("{}.nft.json", path.path).into()), )) } } diff --git a/crates/turbo-tasks-env/src/custom.rs b/crates/turbo-tasks-env/src/custom.rs index f24f63e499fad..03e275e437a55 100644 --- a/crates/turbo-tasks-env/src/custom.rs +++ b/crates/turbo-tasks-env/src/custom.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::Vc; @@ -32,7 +34,7 @@ impl ProcessEnv for CustomProcessEnv { } #[turbo_tasks::function] - async fn read(&self, name: String) -> Result>> { + async fn read(&self, name: Arc) -> Result>> { let custom = case_insensitive_read(self.custom, name.clone()); match &*custom.await? { Some(_) => Ok(custom), diff --git a/crates/turbo-tasks-env/src/filter.rs b/crates/turbo-tasks-env/src/filter.rs index bd8fb1f86abe8..7486add44bdd7 100644 --- a/crates/turbo-tasks-env/src/filter.rs +++ b/crates/turbo-tasks-env/src/filter.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use indexmap::IndexMap; use turbo_tasks::Vc; @@ -15,7 +17,7 @@ pub struct FilterProcessEnv { #[turbo_tasks::value_impl] impl FilterProcessEnv { #[turbo_tasks::function] - pub fn new(prior: Vc>, filters: Vec) -> Vc { + pub fn new(prior: Vc>, filters: Vec>) -> Vc { FilterProcessEnv { prior, filters: filters.into_iter().map(|f| f.to_uppercase()).collect(), @@ -43,7 +45,7 @@ impl ProcessEnv for FilterProcessEnv { } #[turbo_tasks::function] - fn read(&self, name: String) -> Vc> { + fn read(&self, name: Arc) -> Vc> { for filter in &self.filters { if name.to_uppercase().starts_with(filter) { return self.prior.read(name); diff --git a/crates/turbo-tasks-env/src/lib.rs b/crates/turbo-tasks-env/src/lib.rs index db35acfd2d90b..0fa95a22e033a 100644 --- a/crates/turbo-tasks-env/src/lib.rs +++ b/crates/turbo-tasks-env/src/lib.rs @@ -5,7 +5,10 @@ mod custom; mod dotenv; mod filter; -use std::{env, sync::Mutex}; +use std::{ + env, + sync::{Arc, Mutex}, +}; use anyhow::Result; use indexmap::IndexMap; @@ -35,7 +38,7 @@ impl ProcessEnv for EnvMap { } #[turbo_tasks::function] - async fn read(self: Vc, name: String) -> Vc> { + async fn read(self: Vc, name: Arc) -> Vc> { case_insensitive_read(self, name) } } @@ -51,7 +54,7 @@ pub trait ProcessEnv { fn read_all(self: Vc) -> Vc; /// Reads a single env variable. Ignores casing. - fn read(self: Vc, name: String) -> Vc> { + fn read(self: Vc, name: Arc) -> Vc> { case_insensitive_read(self.read_all(), name) } } @@ -63,7 +66,10 @@ pub fn sorted_env_vars() -> IndexMap { } #[turbo_tasks::function] -pub async fn case_insensitive_read(map: Vc, name: String) -> Result>> { +pub async fn case_insensitive_read( + map: Vc, + name: Arc, +) -> Result>> { Ok(Vc::cell( to_uppercase_map(map) .await? diff --git a/crates/turbo-tasks-fetch/tests/fetch.rs b/crates/turbo-tasks-fetch/tests/fetch.rs index 0ccdcabce5168..48fa395ba9e4d 100644 --- a/crates/turbo-tasks-fetch/tests/fetch.rs +++ b/crates/turbo-tasks-fetch/tests/fetch.rs @@ -141,5 +141,5 @@ async fn errors_on_404() { } fn get_issue_context() -> Vc { - DiskFileSystem::new("root".to_owned(), "/".to_owned(), vec![]).root() + DiskFileSystem::new("root".to_owned().into(), "/".to_owned().into(), vec![]).root() } diff --git a/crates/turbo-tasks-fs/examples/hash_directory.rs b/crates/turbo-tasks-fs/examples/hash_directory.rs index 032e4f4ae667c..c69c7bf27aba4 100644 --- a/crates/turbo-tasks-fs/examples/hash_directory.rs +++ b/crates/turbo-tasks-fs/examples/hash_directory.rs @@ -30,12 +30,12 @@ async fn main() -> Result<()> { let task = tt.spawn_root_task(|| { Box::pin(async { let root = current_dir().unwrap().to_str().unwrap().to_string(); - let disk_fs = DiskFileSystem::new("project".to_string(), root, vec![]); + let disk_fs = DiskFileSystem::new("project".to_string().into(), root.into(), vec![]); disk_fs.await?.start_watching()?; // Smart Pointer cast let fs: Vc> = Vc::upcast(disk_fs); - let input = fs.root().join("demo".to_string()); + let input = fs.root().join("demo".to_string().into()); let dir_hash = hash_directory(input); print_hash(dir_hash).await?; Ok::, _>(Default::default()) diff --git a/crates/turbo-tasks-fs/examples/hash_glob.rs b/crates/turbo-tasks-fs/examples/hash_glob.rs index 28e3231682d3f..061cebfee138a 100644 --- a/crates/turbo-tasks-fs/examples/hash_glob.rs +++ b/crates/turbo-tasks-fs/examples/hash_glob.rs @@ -27,13 +27,13 @@ async fn main() -> Result<()> { let task = tt.spawn_root_task(|| { Box::pin(async { let root = current_dir().unwrap().to_str().unwrap().to_string(); - let disk_fs = DiskFileSystem::new("project".to_string(), root, vec![]); + let disk_fs = DiskFileSystem::new("project".to_string().into(), root.into(), vec![]); disk_fs.await?.start_watching()?; // Smart Pointer cast let fs: Vc> = Vc::upcast(disk_fs); - let input = fs.root().join("crates".to_string()); - let glob = Glob::new("**/*.rs".to_string()); + let input = fs.root().join("crates".to_string().into()); + let glob = Glob::new("**/*.rs".to_string().into()); let glob_result = input.read_glob(glob, true); let dir_hash = hash_glob_result(glob_result); print_hash(dir_hash).await?; diff --git a/crates/turbo-tasks-fs/src/attach.rs b/crates/turbo-tasks-fs/src/attach.rs index 7fc1af31ec9e1..339bf12695c9c 100644 --- a/crates/turbo-tasks-fs/src/attach.rs +++ b/crates/turbo-tasks-fs/src/attach.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::{bail, Result}; use auto_hash_map::AutoMap; use turbo_tasks::{Completion, ValueToString, Vc}; @@ -16,7 +18,7 @@ pub struct AttachedFileSystem { root_fs: Vc>, // we turn this into a string because creating a FileSystemPath requires the filesystem which // we are creating (circular reference) - child_path: String, + child_path: Arc, child_fs: Vc>, } @@ -112,7 +114,7 @@ impl AttachedFileSystem { .root() .resolve() .await? - .join(inner_path.to_string()) + .join(inner_path.to_string().into()) } else { this.root_fs.root().resolve().await?.join(path.path.clone()) }) diff --git a/crates/turbo-tasks-fs/src/embed/dir.rs b/crates/turbo-tasks-fs/src/embed/dir.rs index 90e34fc5b86e7..48601e0fba7e2 100644 --- a/crates/turbo-tasks-fs/src/embed/dir.rs +++ b/crates/turbo-tasks-fs/src/embed/dir.rs @@ -2,14 +2,14 @@ pub use ::include_dir::{ include_dir, {self}, }; use anyhow::Result; -use turbo_tasks::{TransientInstance, Vc}; +use turbo_tasks::{RcStr, TransientInstance, Vc}; use crate::{embed::EmbeddedFileSystem, DiskFileSystem, FileSystem}; #[turbo_tasks::function] pub async fn directory_from_relative_path( - name: String, - path: String, + name: RcStr, + path: RcStr, ) -> Result>> { let disk_fs = DiskFileSystem::new(name, path, vec![]); disk_fs.await?.start_watching()?; @@ -19,7 +19,7 @@ pub async fn directory_from_relative_path( #[turbo_tasks::function] pub async fn directory_from_include_dir( - name: String, + name: RcStr, dir: TransientInstance<&'static include_dir::Dir<'static>>, ) -> Result>> { Ok(Vc::upcast(EmbeddedFileSystem::new(name, dir))) @@ -72,7 +72,7 @@ macro_rules! embed_directory_internal { static dir: include_dir::Dir<'static> = turbo_tasks_fs::embed::include_dir!($path); turbo_tasks_fs::embed::directory_from_include_dir( - $name.to_string(), + $name.to_string().into(), turbo_tasks::TransientInstance::new(&dir), ) }}; diff --git a/crates/turbo-tasks-fs/src/embed/file.rs b/crates/turbo-tasks-fs/src/embed/file.rs index 6dcc925f13ae1..df02e2042f8c6 100644 --- a/crates/turbo-tasks-fs/src/embed/file.rs +++ b/crates/turbo-tasks-fs/src/embed/file.rs @@ -1,36 +1,33 @@ -use std::path::PathBuf; +use std::{path::Path, sync::Arc}; use anyhow::{Context, Result}; use dunce::canonicalize; -use turbo_tasks::Vc; +use turbo_tasks::{RcStr, Vc}; use crate::{DiskFileSystem, File, FileContent, FileSystem}; #[turbo_tasks::function] pub async fn content_from_relative_path( - package_path: String, - path: String, + package_path: Arc, + path: Arc, ) -> Result> { - let package_path = PathBuf::from(package_path); - let resolved_path = package_path.join(path); + let package_path = Path::new(&**package_path); + let resolved_path = package_path.join(&**path); let resolved_path = canonicalize(&resolved_path).context("failed to canonicalize embedded file path")?; let root_path = resolved_path.parent().unwrap(); let path = resolved_path.file_name().unwrap().to_str().unwrap(); - let disk_fs = DiskFileSystem::new( - root_path.to_string_lossy().to_string(), - root_path.to_string_lossy().to_string(), - vec![], - ); + let root_path_str = Arc::new(root_path.to_string_lossy().to_string()); + let disk_fs = DiskFileSystem::new(root_path_str.clone(), root_path_str, vec![]); disk_fs.await?.start_watching()?; - let fs_path = disk_fs.root().join(path.to_string()); + let fs_path = disk_fs.root().join(path.to_string().into()); Ok(fs_path.read()) } #[turbo_tasks::function] -pub async fn content_from_str(string: String) -> Result> { +pub async fn content_from_str(string: RcStr) -> Result> { Ok(File::from(string).into()) } @@ -57,7 +54,9 @@ macro_rules! embed_file { macro_rules! embed_file { ($path:expr) => { turbo_tasks_fs::embed::content_from_str( - include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/", $path)).to_string(), + include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/", $path)) + .to_string() + .into(), ) }; } diff --git a/crates/turbo-tasks-fs/src/embed/fs.rs b/crates/turbo-tasks-fs/src/embed/fs.rs index 7e5862a71427b..ebb26f549ee8b 100644 --- a/crates/turbo-tasks-fs/src/embed/fs.rs +++ b/crates/turbo-tasks-fs/src/embed/fs.rs @@ -1,6 +1,8 @@ +use std::sync::Arc; + use anyhow::{bail, Result}; use include_dir::{Dir, DirEntry}; -use turbo_tasks::{Completion, TransientInstance, ValueToString, Vc}; +use turbo_tasks::{Completion, RcStr, TransientInstance, ValueToString, Vc}; use crate::{ DirectoryContent, DirectoryEntry, File, FileContent, FileMeta, FileSystem, FileSystemPath, @@ -9,7 +11,7 @@ use crate::{ #[turbo_tasks::value(serialization = "none")] pub struct EmbeddedFileSystem { - name: String, + name: RcStr, #[turbo_tasks(trace_ignore)] dir: TransientInstance<&'static Dir<'static>>, } @@ -18,7 +20,7 @@ pub struct EmbeddedFileSystem { impl EmbeddedFileSystem { #[turbo_tasks::function] pub(super) fn new( - name: String, + name: RcStr, dir: TransientInstance<&'static Dir<'static>>, ) -> Vc { EmbeddedFileSystem { name, dir }.cell() @@ -29,7 +31,7 @@ impl EmbeddedFileSystem { impl FileSystem for EmbeddedFileSystem { #[turbo_tasks::function] async fn read(&self, path: Vc) -> Result> { - let file = match self.dir.get_file(&path.await?.path) { + let file = match self.dir.get_file(&*path.await?.path) { Some(file) => file, None => return Ok(FileContent::NotFound.cell()), }; @@ -45,7 +47,7 @@ impl FileSystem for EmbeddedFileSystem { #[turbo_tasks::function] async fn read_dir(&self, path: Vc) -> Result> { let path_str = &path.await?.path; - let dir = match (path_str.as_str(), self.dir.get_dir(path_str)) { + let dir = match (path_str.as_str(), self.dir.get_dir(&**path_str)) { ("", _) => *self.dir, (_, Some(dir)) => dir, (_, None) => return Ok(DirectoryContent::NotFound.cell()), @@ -55,8 +57,14 @@ impl FileSystem for EmbeddedFileSystem { .entries() .iter() .map(|e| { - let entry_name = e.path().file_name().unwrap_or_default().to_string_lossy(); - let entry_path = path.join(entry_name.to_string()); + let entry_name = Arc::new( + e.path() + .file_name() + .unwrap_or_default() + .to_string_lossy() + .to_string(), + ); + let entry_path = path.join(entry_name.clone()); ( entry_name.to_string(), @@ -96,7 +104,7 @@ impl FileSystem for EmbeddedFileSystem { #[turbo_tasks::function] async fn metadata(&self, path: Vc) -> Result> { - if self.dir.get_entry(&path.await?.path).is_none() { + if self.dir.get_entry(&*path.await?.path).is_none() { bail!("path not found, can't read metadata"); } @@ -108,6 +116,6 @@ impl FileSystem for EmbeddedFileSystem { impl ValueToString for EmbeddedFileSystem { #[turbo_tasks::function] fn to_string(&self) -> Vc { - Vc::cell(self.name.clone()) + Vc::cell(self.name.to_string()) } } diff --git a/crates/turbo-tasks-fs/src/glob.rs b/crates/turbo-tasks-fs/src/glob.rs index f68be03ca9ec0..cd9412edbe989 100644 --- a/crates/turbo-tasks-fs/src/glob.rs +++ b/crates/turbo-tasks-fs/src/glob.rs @@ -1,4 +1,4 @@ -use std::mem::take; +use std::{mem::take, sync::Arc}; use anyhow::{anyhow, bail, Context, Result}; use serde::{Deserialize, Serialize}; @@ -395,7 +395,7 @@ impl TryFrom<&str> for Glob { #[turbo_tasks::value_impl] impl Glob { #[turbo_tasks::function] - pub fn new(glob: String) -> Result> { + pub fn new(glob: Arc) -> Result> { Ok(Self::cell(Glob::try_from(glob.as_str())?)) } diff --git a/crates/turbo-tasks-fs/src/invalidation.rs b/crates/turbo-tasks-fs/src/invalidation.rs index cbb46ddd2659d..665e134d914dc 100644 --- a/crates/turbo-tasks-fs/src/invalidation.rs +++ b/crates/turbo-tasks-fs/src/invalidation.rs @@ -1,4 +1,7 @@ -use std::fmt::{Display, Formatter}; +use std::{ + fmt::{Display, Formatter}, + sync::Arc, +}; use indexmap::IndexSet; use turbo_tasks::{util::StaticOrArc, InvalidationReason, InvalidationReasonKind}; @@ -50,7 +53,7 @@ impl InvalidationReasonKind for WatchChangeKind { /// before. #[derive(PartialEq, Eq, Hash)] pub struct WatchStart { - pub name: String, + pub name: Arc, } impl InvalidationReason for WatchStart { diff --git a/crates/turbo-tasks-fs/src/lib.rs b/crates/turbo-tasks-fs/src/lib.rs index e828fbe69ce90..d57a5bebb93de 100644 --- a/crates/turbo-tasks-fs/src/lib.rs +++ b/crates/turbo-tasks-fs/src/lib.rs @@ -57,7 +57,7 @@ use tokio::{ }; use tracing::Instrument; use turbo_tasks::{ - mark_stateful, trace::TraceRawVcs, Completion, InvalidationReason, Invalidator, ReadRef, + mark_stateful, trace::TraceRawVcs, Completion, InvalidationReason, Invalidator, RcStr, ReadRef, ValueToString, Vc, }; use turbo_tasks_hash::{hash_xxh3_hash64, DeterministicHash, DeterministicHasher}; @@ -76,7 +76,7 @@ use crate::{ pub trait FileSystem: ValueToString { /// Returns the path to the root of the file system. fn root(self: Vc) -> Vc { - FileSystemPath::new_normalized(self, String::new()) + FileSystemPath::new_normalized(self, Arc::default()) } fn read(self: Vc, fs_path: Vc) -> Vc; fn read_link(self: Vc, fs_path: Vc) -> Vc; @@ -97,8 +97,8 @@ pub trait FileSystem: ValueToString { #[turbo_tasks::value(cell = "new", eq = "manual")] pub struct DiskFileSystem { - pub name: String, - pub root: String, + pub name: RcStr, + pub root: RcStr, #[turbo_tasks(debug_ignore, trace_ignore)] #[serde(skip)] mutex_map: MutexMap, @@ -118,7 +118,7 @@ pub struct DiskFileSystem { impl DiskFileSystem { /// Returns the root as Path fn root_path(&self) -> &Path { - simplified(Path::new(&self.root)) + simplified(Path::new(&*self.root)) } /// registers the path as an invalidator for the current task, @@ -287,11 +287,7 @@ impl DiskFileSystem { /// root & project dir is different and requires to ignore specific /// subpaths from each. #[turbo_tasks::function] - pub async fn new( - name: String, - root: String, - ignored_subpaths: Vec, - ) -> Result> { + pub async fn new(name: RcStr, root: RcStr, ignored_subpaths: Vec) -> Result> { mark_stateful(); // create the directory for the filesystem on disk, if it doesn't exist fs::create_dir_all(&root).await?; @@ -304,7 +300,10 @@ impl DiskFileSystem { invalidator_map: Arc::new(InvalidatorMap::new()), dir_invalidator_map: Arc::new(InvalidatorMap::new()), watcher: Arc::new(DiskWatcher::new( - ignored_subpaths.into_iter().map(PathBuf::from).collect(), + ignored_subpaths + .into_iter() + .map(|v| PathBuf::from(&*v)) + .collect(), )), }; @@ -383,9 +382,11 @@ impl FileSystem for DiskFileSystem { // we filter out any non unicode names and paths without the same root here let file_name = path.file_name()?.to_str()?.to_string(); - let path_to_root = sys_to_unix(path.strip_prefix(&self.root).ok()?.to_str()?); + let path_to_root = Arc::new( + sys_to_unix(path.strip_prefix(&*self.root).ok()?.to_str()?).to_string(), + ); - let fs_path = FileSystemPath::new_normalized(fs_path.fs, path_to_root.to_string()); + let fs_path = FileSystemPath::new_normalized(fs_path.fs, path_to_root); let entry = match e.file_type() { Ok(t) if t.is_file() => DirectoryEntry::File(fs_path), @@ -447,7 +448,7 @@ impl FileSystem for DiskFileSystem { // // we use `dunce::simplify` to strip a potential UNC prefix on windows, on any // other OS this gets compiled away - let result = simplified(&file).strip_prefix(simplified(Path::new(&self.root))); + let result = simplified(&file).strip_prefix(simplified(Path::new(&*self.root))); let relative_to_root_path = match result { Ok(file) => PathBuf::from(sys_to_unix(&file.to_string_lossy()).as_ref()), @@ -455,7 +456,7 @@ impl FileSystem for DiskFileSystem { }; let (target, file_type) = if is_link_absolute { - let target_string = relative_to_root_path.to_string_lossy().to_string(); + let target_string = Arc::new(relative_to_root_path.to_string_lossy().to_string()); ( target_string.clone(), FileSystemPath::new_normalized(fs_path.fs(), target_string) @@ -464,14 +465,10 @@ impl FileSystem for DiskFileSystem { ) } else { let link_path_string_cow = link_path.to_string_lossy(); - let link_path_unix = sys_to_unix(&link_path_string_cow); + let link_path_unix = Arc::new(sys_to_unix(&link_path_string_cow).to_string()); ( - link_path_unix.to_string(), - fs_path - .parent() - .join(link_path_unix.to_string()) - .get_type() - .await?, + link_path_unix.clone(), + fs_path.parent().join(link_path_unix).get_type().await?, ) }; @@ -653,7 +650,7 @@ impl FileSystem for DiskFileSystem { LinkContent::Link { target, link_type } => { let link_type = *link_type; let target_path = if link_type.contains(LinkType::ABSOLUTE) { - Path::new(&self.root).join(unix_to_sys(target).as_ref()) + Path::new(&*self.root).join(unix_to_sys(target).as_ref()) } else { PathBuf::from(unix_to_sys(target).as_ref()) }; @@ -721,7 +718,7 @@ impl FileSystem for DiskFileSystem { impl ValueToString for DiskFileSystem { #[turbo_tasks::function] fn to_string(&self) -> Vc { - Vc::cell(self.name.clone()) + Vc::cell(self.name.to_string()) } } @@ -729,12 +726,12 @@ impl ValueToString for DiskFileSystem { #[derive(Debug, Clone)] pub struct FileSystemPath { pub fs: Vc>, - pub path: String, + pub path: Arc, } impl FileSystemPath { pub fn is_inside_ref(&self, other: &FileSystemPath) -> bool { - if self.fs == other.fs && self.path.starts_with(&other.path) { + if self.fs == other.fs && self.path.starts_with(&*other.path) { if other.path.is_empty() { true } else { @@ -746,7 +743,7 @@ impl FileSystemPath { } pub fn is_inside_or_equal_ref(&self, other: &FileSystemPath) -> bool { - if self.fs == other.fs && self.path.starts_with(&other.path) { + if self.fs == other.fs && self.path.starts_with(&*other.path) { if other.path.is_empty() { true } else { @@ -771,7 +768,7 @@ impl FileSystemPath { if self.fs != inner.fs { return None; } - let path = inner.path.strip_prefix(&self.path)?; + let path = inner.path.strip_prefix(&*self.path)?; if self.path.is_empty() { Some(path) } else if let Some(stripped) = path.strip_prefix('/') { @@ -889,7 +886,7 @@ impl FileSystemPath { /// /-separated path is expected to be already normalized (this is asserted /// in dev mode). #[turbo_tasks::function] - fn new_normalized(fs: Vc>, path: String) -> Vc { + fn new_normalized(fs: Vc>, path: Arc) -> Vc { // On Windows, the path must be converted to a unix path before creating. But on // Unix, backslashes are a valid char in file names, and the path can be // provided by the user, so we allow it. @@ -910,10 +907,10 @@ impl FileSystemPath { /// contain ".." or "." seqments, but it must not leave the root of the /// filesystem. #[turbo_tasks::function] - pub async fn join(self: Vc, path: String) -> Result> { + pub async fn join(self: Vc, path: Arc) -> Result> { let this = self.await?; if let Some(path) = join_path(&this.path, &path) { - Ok(Self::new_normalized(this.fs, path)) + Ok(Self::new_normalized(this.fs, path.into())) } else { bail!( "Vc(\"{}\").join(\"{}\") leaves the filesystem root", @@ -925,7 +922,7 @@ impl FileSystemPath { /// Adds a suffix to the filename. [path] must not contain `/`. #[turbo_tasks::function] - pub async fn append(self: Vc, path: String) -> Result> { + pub async fn append(self: Vc, path: Arc) -> Result> { let this = self.await?; if path.contains('/') { bail!( @@ -936,14 +933,14 @@ impl FileSystemPath { } Ok(Self::new_normalized( this.fs, - format!("{}{}", this.path, path), + format!("{}{}", this.path, path).into(), )) } /// Adds a suffix to the basename of the filename. [appending] must not /// contain `/`. Extension will stay intact. #[turbo_tasks::function] - pub async fn append_to_stem(self: Vc, appending: String) -> Result> { + pub async fn append_to_stem(self: Vc, appending: Arc) -> Result> { let this = self.await?; if appending.contains('/') { bail!( @@ -955,23 +952,23 @@ impl FileSystemPath { if let (path, Some(ext)) = this.split_extension() { return Ok(Self::new_normalized( this.fs, - format!("{}{}.{}", path, appending, ext), + format!("{}{}.{}", path, appending, ext).into(), )); } Ok(Self::new_normalized( this.fs, - format!("{}{}", this.path, appending), + format!("{}{}", this.path, appending).into(), )) } /// Similar to [FileSystemPath::join], but returns an Option that will be /// None when the joined path would leave the filesystem root. #[turbo_tasks::function] - pub async fn try_join(self: Vc, path: String) -> Result> { + pub async fn try_join(self: Vc, path: Arc) -> Result> { let this = self.await?; if let Some(path) = join_path(&this.path, &path) { Ok(Vc::cell(Some( - Self::new_normalized(this.fs, path).resolve().await?, + Self::new_normalized(this.fs, path.into()).resolve().await?, ))) } else { Ok(FileSystemPathOption::none()) @@ -981,12 +978,15 @@ impl FileSystemPath { /// Similar to [FileSystemPath::join], but returns an Option that will be /// None when the joined path would leave the current path. #[turbo_tasks::function] - pub async fn try_join_inside(self: Vc, path: String) -> Result> { + pub async fn try_join_inside( + self: Vc, + path: Arc, + ) -> Result> { let this = self.await?; if let Some(path) = join_path(&this.path, &path) { - if path.starts_with(&this.path) { + if path.starts_with(&*this.path) { return Ok(Vc::cell(Some( - Self::new_normalized(this.fs, path).resolve().await?, + Self::new_normalized(this.fs, path.into()).resolve().await?, ))); } } @@ -1031,7 +1031,10 @@ impl FileSystemPath { /// Creates a new [`Vc`] like `self` but with the given /// extension. #[turbo_tasks::function] - pub async fn with_extension(self: Vc, extension: String) -> Result> { + pub async fn with_extension( + self: Vc, + extension: Arc, + ) -> Result> { let this = self.await?; let (path_without_extension, _) = this.split_extension(); Ok(Self::new_normalized( @@ -1039,8 +1042,8 @@ impl FileSystemPath { // Like `Path::with_extension` and `PathBuf::set_extension`, if the extension is empty, // we remove the extension altogether. match extension.is_empty() { - true => path_without_extension.to_string(), - false => format!("{path_without_extension}.{extension}"), + true => path_without_extension.to_string().into(), + false => format!("{path_without_extension}.{extension}").into(), }, )) } @@ -1085,7 +1088,7 @@ pub async fn rebase( if new_base.path.is_empty() { new_path = fs_path.path.clone(); } else { - new_path = [new_base.path.as_str(), "/", &fs_path.path].concat(); + new_path = [new_base.path.as_str(), "/", &fs_path.path].concat().into(); } } else { let base_path = [&old_base.path, "/"].concat(); @@ -1098,9 +1101,11 @@ pub async fn rebase( ); } if new_base.path.is_empty() { - new_path = [&fs_path.path[base_path.len()..]].concat(); + new_path = [&fs_path.path[base_path.len()..]].concat().into(); } else { - new_path = [new_base.path.as_str(), &fs_path.path[old_base.path.len()..]].concat(); + new_path = [new_base.path.as_str(), &fs_path.path[old_base.path.len()..]] + .concat() + .into(); } } Ok(new_base.fs.root().join(new_path)) @@ -1167,8 +1172,8 @@ impl FileSystemPath { return Ok(self); } let p = match str::rfind(path, '/') { - Some(index) => path[..index].to_string(), - None => "".to_string(), + Some(index) => Arc::new(path[..index].to_string()), + None => Arc::default(), }; Ok(FileSystemPath::new_normalized(this.fs, p)) } @@ -1223,7 +1228,7 @@ impl FileSystemPath { let real_self = if parent_result.path != parent { parent_result .path - .join(basename.to_string()) + .join(basename.to_string().into()) .resolve() .await? } else { @@ -1238,7 +1243,7 @@ impl FileSystemPath { } else { result.path } - .join(target.to_string()) + .join(target.to_string().into()) .resolve() .await?; return Ok(result.cell()); @@ -1428,7 +1433,10 @@ pub enum LinkContent { // normalized, which means in `fn write_link` we couldn't restore the raw value of the file // link because there is only **dist** path in `fn write_link`, and we need the raw path if // we want to restore the link value in `fn write_link` - Link { target: String, link_type: LinkType }, + Link { + target: Arc, + link_type: LinkType, + }, Invalid, NotFound, } @@ -1510,6 +1518,12 @@ impl From> for File { } } +impl From> for File { + fn from(s: Arc) -> Self { + File::from_bytes(s.as_bytes().to_vec()) + } +} + impl From<&str> for File { fn from(s: &str) -> Self { File::from_bytes(s.as_bytes().to_vec()) @@ -1950,26 +1964,28 @@ mod tests { turbo_tasks_testing::VcStorage::with(async { let fs = Vc::upcast(VirtualFileSystem::new()); - let path_txt = FileSystemPath::new_normalized(fs, "foo/bar.txt".into()); + let path_txt = FileSystemPath::new_normalized(fs, "foo/bar.txt".to_string().into()); - let path_json = path_txt.with_extension("json".to_string()); + let path_json = path_txt.with_extension("json".to_string().into()); assert_eq!(&*path_json.await.unwrap().path, "foo/bar.json"); - let path_no_ext = path_txt.with_extension("".to_string()); + let path_no_ext = path_txt.with_extension("".to_string().into()); assert_eq!(&*path_no_ext.await.unwrap().path, "foo/bar"); - let path_new_ext = path_no_ext.with_extension("json".to_string()); + let path_new_ext = path_no_ext.with_extension("json".to_string().into()); assert_eq!(&*path_new_ext.await.unwrap().path, "foo/bar.json"); - let path_no_slash_txt = FileSystemPath::new_normalized(fs, "bar.txt".into()); + let path_no_slash_txt = + FileSystemPath::new_normalized(fs, "bar.txt".to_string().into()); - let path_no_slash_json = path_no_slash_txt.with_extension("json".to_string()); + let path_no_slash_json = path_no_slash_txt.with_extension("json".to_string().into()); assert_eq!(path_no_slash_json.await.unwrap().path.as_str(), "bar.json"); - let path_no_slash_no_ext = path_no_slash_txt.with_extension("".to_string()); + let path_no_slash_no_ext = path_no_slash_txt.with_extension("".to_string().into()); assert_eq!(path_no_slash_no_ext.await.unwrap().path.as_str(), "bar"); - let path_no_slash_new_ext = path_no_slash_no_ext.with_extension("json".to_string()); + let path_no_slash_new_ext = + path_no_slash_no_ext.with_extension("json".to_string().into()); assert_eq!( path_no_slash_new_ext.await.unwrap().path.as_str(), "bar.json" @@ -1988,19 +2004,19 @@ mod tests { turbo_tasks_testing::VcStorage::with(async { let fs = Vc::upcast::>(VirtualFileSystem::new()); - let path = FileSystemPath::new_normalized(fs, "".into()); + let path = FileSystemPath::new_normalized(fs, "".to_string().into()); assert_eq!(path.file_stem().await.unwrap().as_deref(), None); - let path = FileSystemPath::new_normalized(fs, "foo/bar.txt".into()); + let path = FileSystemPath::new_normalized(fs, "foo/bar.txt".to_string().into()); assert_eq!(path.file_stem().await.unwrap().as_deref(), Some("bar")); - let path = FileSystemPath::new_normalized(fs, "bar.txt".into()); + let path = FileSystemPath::new_normalized(fs, "bar.txt".to_string().into()); assert_eq!(path.file_stem().await.unwrap().as_deref(), Some("bar")); - let path = FileSystemPath::new_normalized(fs, "foo/bar".into()); + let path = FileSystemPath::new_normalized(fs, "foo/bar".to_string().into()); assert_eq!(path.file_stem().await.unwrap().as_deref(), Some("bar")); - let path = FileSystemPath::new_normalized(fs, "foo/.bar".into()); + let path = FileSystemPath::new_normalized(fs, "foo/.bar".to_string().into()); assert_eq!(path.file_stem().await.unwrap().as_deref(), Some(".bar")); anyhow::Ok(()) diff --git a/crates/turbo-tasks-fs/src/read_glob.rs b/crates/turbo-tasks-fs/src/read_glob.rs index 35bffa372dc0a..0939712c37963 100644 --- a/crates/turbo-tasks-fs/src/read_glob.rs +++ b/crates/turbo-tasks-fs/src/read_glob.rs @@ -1,4 +1,4 @@ -use std::collections::HashMap; +use std::{collections::HashMap, sync::Arc}; use anyhow::Result; use turbo_tasks::Vc; @@ -27,7 +27,7 @@ pub async fn read_glob( #[turbo_tasks::function] async fn read_glob_inner( - prefix: String, + prefix: Arc, directory: Vc, glob: Vc, include_dot_files: bool, @@ -59,7 +59,12 @@ async fn read_glob_internal( if glob_value.execute(&full_path_prefix) { result.inner.insert( full_path, - read_glob_inner(full_path_prefix, *path, glob, include_dot_files), + read_glob_inner( + full_path_prefix.into(), + *path, + glob, + include_dot_files, + ), ); } } diff --git a/crates/turbo-tasks-fs/src/watcher.rs b/crates/turbo-tasks-fs/src/watcher.rs index 6af75e426bca6..eedee8ac564d1 100644 --- a/crates/turbo-tasks-fs/src/watcher.rs +++ b/crates/turbo-tasks-fs/src/watcher.rs @@ -124,9 +124,9 @@ impl DiskWatcher { /// - Doesn't emit Modify events after a Create event pub(crate) fn start_watching( self: Arc, - name: String, + name: Arc, root_path: PathBuf, - report_invalidation_reason: Option<(String, PathBuf)>, + report_invalidation_reason: Option<(Arc, PathBuf)>, invalidation_lock: Arc>, invalidator_map: Arc, dir_invalidator_map: Arc, @@ -209,7 +209,7 @@ impl DiskWatcher { &self, rx: Receiver, root_path: PathBuf, - report_invalidation_reason: Option<(String, PathBuf)>, + report_invalidation_reason: Option<(Arc, PathBuf)>, invalidation_lock: Arc>, invalidator_map: Arc, dir_invalidator_map: Arc, @@ -428,7 +428,7 @@ impl DiskWatcher { #[instrument(parent = None, level = "info", name = "DiskFileSystem file change", skip_all, fields(name = display(path.display())))] fn invalidate( - report_invalidation_reason: &Option<(String, PathBuf)>, + report_invalidation_reason: &Option<(Arc, PathBuf)>, path: &Path, invalidator: Invalidator, ) { @@ -442,7 +442,7 @@ fn invalidate( } fn invalidate_path( - report_invalidation_reason: &Option<(String, PathBuf)>, + report_invalidation_reason: &Option<(Arc, PathBuf)>, invalidator_map: &mut HashMap>, paths: impl Iterator, ) { @@ -457,7 +457,7 @@ fn invalidate_path( } fn invalidate_path_and_children_execute( - report_invalidation_reason: &Option<(String, PathBuf)>, + report_invalidation_reason: &Option<(Arc, PathBuf)>, invalidator_map: &mut HashMap>, paths: impl Iterator, ) { diff --git a/crates/turbo-tasks-hash/src/deterministic_hash.rs b/crates/turbo-tasks-hash/src/deterministic_hash.rs index 7d378108eeaea..9390280d126a8 100644 --- a/crates/turbo-tasks-hash/src/deterministic_hash.rs +++ b/crates/turbo-tasks-hash/src/deterministic_hash.rs @@ -1,4 +1,4 @@ -use std::mem::Discriminant; +use std::{mem::Discriminant, sync::Arc}; pub use turbo_tasks_macros::DeterministicHash; @@ -116,6 +116,12 @@ impl DeterministicHash for String { } } +impl DeterministicHash for Arc { + fn deterministic_hash(&self, state: &mut H) { + (**self).deterministic_hash(state) + } +} + impl DeterministicHash for &str { fn deterministic_hash(&self, state: &mut H) { state.write_usize(self.len()); diff --git a/crates/turbo-tasks-memory/tests/collectibles.rs b/crates/turbo-tasks-memory/tests/collectibles.rs index 35d48f7f42fb7..19899b094a3dd 100644 --- a/crates/turbo-tasks-memory/tests/collectibles.rs +++ b/crates/turbo-tasks-memory/tests/collectibles.rs @@ -1,18 +1,18 @@ #![feature(arbitrary_self_types)] -use std::{collections::HashSet, time::Duration}; +use std::{collections::HashSet, sync::Arc, time::Duration}; use anyhow::Result; use auto_hash_map::AutoSet; use tokio::time::sleep; -use turbo_tasks::{emit, CollectiblesSource, ValueToString, Vc}; +use turbo_tasks::{emit, CollectiblesSource, RcStr, ValueToString, Vc}; use turbo_tasks_testing::{register, run}; register!(); #[tokio::test] async fn transitive_emitting() { run! { - let result = my_transitive_emitting_function("".to_string(), "".to_string()); + let result = my_transitive_emitting_function("".to_string().into(), "".to_string().into()); result.strongly_consistent().await?; let list = result.peek_collectibles::>(); assert_eq!(list.len(), 2); @@ -27,8 +27,8 @@ async fn transitive_emitting() { #[tokio::test] async fn transitive_emitting_indirect() { run! { - let result = my_transitive_emitting_function("".to_string(), "".to_string()); - let collectibles = my_transitive_emitting_function_collectibles("".to_string(), "".to_string()); + let result = my_transitive_emitting_function("".to_string().into(), "".to_string().into()); + let collectibles = my_transitive_emitting_function_collectibles("".to_string().into(), "".to_string().into()); let list = collectibles.strongly_consistent().await?; assert_eq!(list.len(), 2); let mut expected = ["123", "42"].into_iter().collect::>(); @@ -82,31 +82,31 @@ async fn taking_collectibles_extra_layer() { #[tokio::test] async fn taking_collectibles_parallel() { run! { - let result = my_transitive_emitting_function("".to_string(), "a".to_string()); + let result = my_transitive_emitting_function("".to_string().into(), "a".to_string().into()); result.strongly_consistent().await?; let list = result.take_collectibles::>(); assert_eq!(list.len(), 2); assert_eq!(result.await?.0, 0); - let result = my_transitive_emitting_function("".to_string(), "b".to_string()); + let result = my_transitive_emitting_function("".to_string().into(), "b".to_string().into()); result.strongly_consistent().await?; let list = result.take_collectibles::>(); assert_eq!(list.len(), 2); assert_eq!(result.await?.0, 0); - let result = my_transitive_emitting_function_with_child_scope("".to_string(), "b".to_string(), "1".to_string()); + let result = my_transitive_emitting_function_with_child_scope("".to_string().into(), "b".to_string().into(), "1".to_string().into()); result.strongly_consistent().await?; let list = result.take_collectibles::>(); assert_eq!(list.len(), 2); assert_eq!(result.await?.0, 0); - let result = my_transitive_emitting_function_with_child_scope("".to_string(), "b".to_string(), "2".to_string()); + let result = my_transitive_emitting_function_with_child_scope("".to_string().into(), "b".to_string().into(), "2".to_string().into()); result.strongly_consistent().await?; let list = result.take_collectibles::>(); assert_eq!(list.len(), 2); assert_eq!(result.await?.0, 0); - let result = my_transitive_emitting_function_with_child_scope("".to_string(), "c".to_string(), "3".to_string()); + let result = my_transitive_emitting_function_with_child_scope("".to_string().into(), "c".to_string().into(), "3".to_string().into()); result.strongly_consistent().await?; let list = result.take_collectibles::>(); assert_eq!(list.len(), 2); @@ -119,7 +119,7 @@ struct Collectibles(AutoSet>>); #[turbo_tasks::function] async fn my_collecting_function() -> Result> { - let result = my_transitive_emitting_function("".to_string(), "".to_string()); + let result = my_transitive_emitting_function("".to_string().into(), "".to_string().into()); result.take_collectibles::>(); Ok(result) } @@ -137,32 +137,29 @@ async fn my_collecting_function_indirect() -> Result> { #[turbo_tasks::function] async fn my_multi_emitting_function() -> Result> { - my_transitive_emitting_function("".to_string(), "a".to_string()).await?; - my_transitive_emitting_function("".to_string(), "b".to_string()).await?; - my_emitting_function("".to_string()).await?; + my_transitive_emitting_function("".to_string().into(), "a".to_string().into()).await?; + my_transitive_emitting_function("".to_string().into(), "b".to_string().into()).await?; + my_emitting_function("".to_string().into()).await?; Ok(Thing::cell(Thing(0))) } #[turbo_tasks::function] -async fn my_transitive_emitting_function(key: String, _key2: String) -> Result> { +async fn my_transitive_emitting_function(key: RcStr, _key2: Arc) -> Result> { my_emitting_function(key).await?; Ok(Thing::cell(Thing(0))) } #[turbo_tasks::function] -async fn my_transitive_emitting_function_collectibles( - key: String, - key2: String, -) -> Vc { +async fn my_transitive_emitting_function_collectibles(key: RcStr, key2: RcStr) -> Vc { let result = my_transitive_emitting_function(key, key2); Vc::cell(result.peek_collectibles::>()) } #[turbo_tasks::function] async fn my_transitive_emitting_function_with_child_scope( - key: String, - key2: String, - _key3: String, + key: RcStr, + key2: RcStr, + _key3: RcStr, ) -> Result> { let thing = my_transitive_emitting_function(key, key2); thing.strongly_consistent().await?; @@ -172,7 +169,7 @@ async fn my_transitive_emitting_function_with_child_scope( } #[turbo_tasks::function] -async fn my_emitting_function(_key: String) -> Result<()> { +async fn my_emitting_function(_key: Arc) -> Result<()> { sleep(Duration::from_millis(100)).await; emit(Vc::upcast::>(Thing::new(123))); emit(Vc::upcast::>(Thing::new(42))); diff --git a/crates/turbo-tasks/src/lib.rs b/crates/turbo-tasks/src/lib.rs index 93bda5364a01f..ea3985b68c8a4 100644 --- a/crates/turbo-tasks/src/lib.rs +++ b/crates/turbo-tasks/src/lib.rs @@ -99,7 +99,7 @@ pub use read_ref::ReadRef; pub use state::State; pub use task::{ concrete_task_input::{ConcreteTaskInput, SharedReference, SharedValue}, - task_input::TaskInput, + task_input::{RcStr, TaskInput}, }; pub use trait_ref::{IntoTraitRef, TraitRef}; pub use turbo_tasks_macros::{function, value, value_impl, value_trait, TaskInput}; diff --git a/crates/turbo-tasks/src/task/concrete_task_input.rs b/crates/turbo-tasks/src/task/concrete_task_input.rs index c591d652bc658..08154ec1c29b4 100644 --- a/crates/turbo-tasks/src/task/concrete_task_input.rs +++ b/crates/turbo-tasks/src/task/concrete_task_input.rs @@ -11,6 +11,7 @@ use std::{ use anyhow::Result; use serde::{ser::SerializeTuple, Deserialize, Serialize}; +use super::task_input::RcStr; use crate::{ backend::CellContent, id::{FunctionId, TraitTypeId}, @@ -330,7 +331,7 @@ pub enum ConcreteTaskInput { TaskOutput(TaskId), TaskCell(TaskId, CellId), List(Vec), - String(String), + String(RcStr), Bool(bool), Usize(usize), I8(i8), diff --git a/crates/turbo-tasks/src/task/function.rs b/crates/turbo-tasks/src/task/function.rs index b753f468158b8..401726066d8c1 100644 --- a/crates/turbo-tasks/src/task/function.rs +++ b/crates/turbo-tasks/src/task/function.rs @@ -326,7 +326,7 @@ task_inputs_impl! { A1 A2 A3 A4 A5 A6 A7 A8 A9 A10 A11 A12 A13 A14 A15 A16 A17 } #[cfg(test)] mod tests { use super::*; - use crate::{VcCellNewMode, VcDefaultRead}; + use crate::{RcStr, VcCellNewMode, VcDefaultRead}; #[test] fn test_task_fn() { @@ -350,18 +350,15 @@ mod tests { todo!() } - fn with_recv_and_str(_a: &i32, _s: String) -> crate::Vc { + fn with_recv_and_str(_a: &i32, _s: RcStr) -> crate::Vc { todo!() } - async fn async_with_recv_and_str(_a: &i32, _s: String) -> crate::Vc { + async fn async_with_recv_and_str(_a: &i32, _s: RcStr) -> crate::Vc { todo!() } - async fn async_with_recv_and_str_and_result( - _a: &i32, - _s: String, - ) -> Result> { + async fn async_with_recv_and_str_and_result(_a: &i32, _s: RcStr) -> Result> { todo!() } diff --git a/crates/turbo-tasks/src/task/task_input.rs b/crates/turbo-tasks/src/task/task_input.rs index 9d1e2ffc40ea3..c4123045fbeb6 100644 --- a/crates/turbo-tasks/src/task/task_input.rs +++ b/crates/turbo-tasks/src/task/task_input.rs @@ -1,10 +1,13 @@ use std::{ any::{type_name, Any}, marker::PhantomData, + ops::Deref, + path::Path, sync::Arc, }; use anyhow::{anyhow, bail, Result}; +use serde::{Deserialize, Serialize}; use super::concrete_task_input::TransientSharedValue; use crate::{ @@ -31,7 +34,43 @@ impl TaskInput for ConcreteTaskInput { } } -impl TaskInput for String { +/// This type exists to allow swapping out the underlying string type easily. +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +pub struct RcStr(Arc); + +impl Deref for RcStr { + type Target = str; + + fn deref(&self) -> &Self::Target { + self.0.as_str() + } +} + +impl From> for RcStr { + fn from(s: Arc) -> Self { + RcStr(s) + } +} + +impl From for RcStr { + fn from(s: String) -> Self { + RcStr(Arc::new(s)) + } +} + +impl From<&'_ str> for RcStr { + fn from(s: &str) -> Self { + RcStr(Arc::new(s.to_string())) + } +} + +impl AsRef for RcStr { + fn as_ref(&self) -> &Path { + (*self.0).as_ref() + } +} + +impl TaskInput for RcStr { fn try_from_concrete(input: &ConcreteTaskInput) -> Result { match input { ConcreteTaskInput::String(s) => Ok(s.clone()), @@ -406,7 +445,7 @@ mod tests { #[test] fn test_multiple_unnamed_fields() -> Result<()> { #[derive(Clone, TaskInput, Eq, PartialEq, Debug)] - struct MultipleUnnamedFields(u32, String); + struct MultipleUnnamedFields(u32, RcStr); test_conversion!(MultipleUnnamedFields(42, "42".into())); Ok(()) @@ -428,12 +467,12 @@ mod tests { #[derive(Clone, TaskInput, Eq, PartialEq, Debug)] struct MultipleNamedFields { named: u32, - other: String, + other: RcStr, } test_conversion!(MultipleNamedFields { named: 42, - other: "42".into() + other: "42".to_string().into() }); Ok(()) } @@ -444,7 +483,7 @@ mod tests { struct GenericField(T); test_conversion!(GenericField(42)); - test_conversion!(GenericField("42".to_string())); + test_conversion!(GenericField(RcStr::from("42"))); Ok(()) } @@ -485,8 +524,8 @@ mod tests { Variant1, Variant2(u32), Variant3 { named: u32 }, - Variant4(u32, String), - Variant5 { named: u32, other: String }, + Variant4(u32, RcStr), + Variant5 { named: u32, other: RcStr }, } #[test] @@ -505,18 +544,18 @@ mod tests { Variant1, Variant2(MultipleVariantsAndHeterogeneousFields), Variant3 { named: OneVariant }, - Variant4(OneVariant, String), - Variant5 { named: OneVariant, other: String }, + Variant4(OneVariant, RcStr), + Variant5 { named: OneVariant, other: RcStr }, } test_conversion!(NestedVariants::Variant5 { named: OneVariant::Variant, - other: "42".into() + other: "42".to_string().into() }); test_conversion!(NestedVariants::Variant2( MultipleVariantsAndHeterogeneousFields::Variant5 { named: 42, - other: "42".into() + other: "42".to_string().into() } )); Ok(()) diff --git a/crates/turbo-tasks/src/trace.rs b/crates/turbo-tasks/src/trace.rs index da7000c8501cd..17fa1c185a8a1 100644 --- a/crates/turbo-tasks/src/trace.rs +++ b/crates/turbo-tasks/src/trace.rs @@ -9,7 +9,7 @@ use std::{ use auto_hash_map::{AutoMap, AutoSet}; use indexmap::{IndexMap, IndexSet}; -use crate::RawVc; +use crate::{RawVc, RcStr}; pub struct TraceRawVcsContext { list: Vec, @@ -70,7 +70,7 @@ ignore!( AtomicBool, AtomicUsize ); -ignore!((), String, Duration, anyhow::Error); +ignore!((), String, Duration, anyhow::Error, RcStr); ignore!(Path, PathBuf); ignore!(serde_json::Value); diff --git a/crates/turbopack-browser/src/chunking_context.rs b/crates/turbopack-browser/src/chunking_context.rs index 1827006f21e45..38f541166c547 100644 --- a/crates/turbopack-browser/src/chunking_context.rs +++ b/crates/turbopack-browser/src/chunking_context.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::{bail, Context, Result}; use tracing::Instrument; use turbo_tasks::{Value, ValueToString, Vc}; @@ -266,11 +268,11 @@ impl ChunkingContext for BrowserChunkingContext { async fn chunk_path( &self, ident: Vc, - extension: String, + extension: Arc, ) -> Result> { let root_path = self.chunk_root_path; let name = ident.output_name(self.context_path, extension).await?; - Ok(root_path.join(name.clone_value())) + Ok(root_path.join(name.clone_value().into())) } #[turbo_tasks::function] @@ -313,7 +315,7 @@ impl ChunkingContext for BrowserChunkingContext { #[turbo_tasks::function] async fn asset_path( &self, - content_hash: String, + content_hash: Arc, original_asset_ident: Vc, ) -> Result> { let source_path = original_asset_ident.path().await?; @@ -329,7 +331,7 @@ impl ChunkingContext for BrowserChunkingContext { content_hash = &content_hash[..8] ), }; - Ok(self.asset_root_path.join(asset_path)) + Ok(self.asset_root_path.join(asset_path.into())) } #[turbo_tasks::function] diff --git a/crates/turbopack-browser/src/ecmascript/chunk.rs b/crates/turbopack-browser/src/ecmascript/chunk.rs index 7a1007d4600a4..f9c2b50d8f6a0 100644 --- a/crates/turbopack-browser/src/ecmascript/chunk.rs +++ b/crates/turbopack-browser/src/ecmascript/chunk.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use indexmap::IndexSet; use turbo_tasks::{ValueToString, Vc}; @@ -80,7 +82,10 @@ impl OutputAsset for EcmascriptDevChunk { #[turbo_tasks::function] fn ident(&self) -> Vc { let ident = self.chunk.ident().with_modifier(modifier()); - AssetIdent::from_path(self.chunking_context.chunk_path(ident, ".js".to_string())) + AssetIdent::from_path( + self.chunking_context + .chunk_path(ident, ".js".to_string().into()), + ) } #[turbo_tasks::function] @@ -125,7 +130,7 @@ impl GenerateSourceMap for EcmascriptDevChunk { } #[turbo_tasks::function] - fn by_section(self: Vc, section: String) -> Vc { + fn by_section(self: Vc, section: Arc) -> Vc { self.own_content().by_section(section) } } diff --git a/crates/turbopack-browser/src/ecmascript/content.rs b/crates/turbopack-browser/src/ecmascript/content.rs index b3273c608a3b0..6fdf011d5c804 100644 --- a/crates/turbopack-browser/src/ecmascript/content.rs +++ b/crates/turbopack-browser/src/ecmascript/content.rs @@ -1,4 +1,4 @@ -use std::io::Write; +use std::{io::Write, sync::Arc}; use anyhow::{bail, Result}; use indoc::writedoc; @@ -159,7 +159,7 @@ impl GenerateSourceMap for EcmascriptDevChunkContent { } #[turbo_tasks::function] - async fn by_section(&self, section: String) -> Result> { + async fn by_section(&self, section: Arc) -> Result> { // Weirdly, the ContentSource will have already URL decoded the ModuleId, and we // can't reparse that via serde. if let Ok(id) = ModuleId::parse(§ion) { diff --git a/crates/turbopack-browser/src/ecmascript/evaluate/chunk.rs b/crates/turbopack-browser/src/ecmascript/evaluate/chunk.rs index 46372b4f8b201..5bc3942792678 100644 --- a/crates/turbopack-browser/src/ecmascript/evaluate/chunk.rs +++ b/crates/turbopack-browser/src/ecmascript/evaluate/chunk.rs @@ -221,7 +221,8 @@ impl OutputAsset for EcmascriptDevEvaluateChunk { let ident = AssetIdent::new(Value::new(ident)); Ok(AssetIdent::from_path( - self.chunking_context.chunk_path(ident, ".js".to_string()), + self.chunking_context + .chunk_path(ident, ".js".to_string().into()), )) } diff --git a/crates/turbopack-browser/src/ecmascript/list/asset.rs b/crates/turbopack-browser/src/ecmascript/list/asset.rs index cae5cb3eee07e..9c76ed5e8691e 100644 --- a/crates/turbopack-browser/src/ecmascript/list/asset.rs +++ b/crates/turbopack-browser/src/ecmascript/list/asset.rs @@ -111,7 +111,8 @@ impl OutputAsset for EcmascriptDevChunkList { let ident = AssetIdent::new(Value::new(ident)); Ok(AssetIdent::from_path( - self.chunking_context.chunk_path(ident, ".js".to_string()), + self.chunking_context + .chunk_path(ident, ".js".to_string().into()), )) } diff --git a/crates/turbopack-browser/src/react_refresh.rs b/crates/turbopack-browser/src/react_refresh.rs index 54e2a64e4e9b0..41934294f533f 100644 --- a/crates/turbopack-browser/src/react_refresh.rs +++ b/crates/turbopack-browser/src/react_refresh.rs @@ -12,12 +12,16 @@ use turbopack_resolve::{ #[turbo_tasks::function] fn react_refresh_request() -> Vc { - Request::parse_string("@next/react-refresh-utils/dist/runtime".to_string()) + Request::parse_string("@next/react-refresh-utils/dist/runtime".to_string().into()) } #[turbo_tasks::function] fn react_refresh_request_in_next() -> Vc { - Request::parse_string("next/dist/compiled/@next/react-refresh-utils/dist/runtime".to_string()) + Request::parse_string( + "next/dist/compiled/@next/react-refresh-utils/dist/runtime" + .to_string() + .into(), + ) } #[turbo_tasks::value] diff --git a/crates/turbopack-cli/src/build/mod.rs b/crates/turbopack-cli/src/build/mod.rs index ff5ee97fa7bd5..1e2906822f2e5 100644 --- a/crates/turbopack-cli/src/build/mod.rs +++ b/crates/turbopack-cli/src/build/mod.rs @@ -111,8 +111,8 @@ impl TurbopackBuildBuilder { pub async fn build(self) -> Result<()> { let task = self.turbo_tasks.spawn_once_task::<(), _>(async move { let build_result = build_internal( - self.project_dir.clone(), - self.root_dir, + self.project_dir.clone().into(), + self.root_dir.into(), EntryRequests( self.entry_requests .iter() @@ -121,7 +121,7 @@ impl TurbopackBuildBuilder { .collect(), ) .cell(), - self.browserslist_query, + self.browserslist_query.into(), self.minify_type, ); @@ -157,10 +157,10 @@ impl TurbopackBuildBuilder { #[turbo_tasks::function] async fn build_internal( - project_dir: String, - root_dir: String, + project_dir: Arc, + root_dir: Arc, entry_requests: Vc, - browserslist_query: String, + browserslist_query: Arc, minify_type: MinifyType, ) -> Result> { let env = Environment::new(Value::new(ExecutionEnvironment::Browser( @@ -168,7 +168,7 @@ async fn build_internal( dom: true, web_worker: false, service_worker: false, - browserslist_query: browserslist_query.clone(), + browserslist_query: browserslist_query.to_string(), } .into(), ))); @@ -341,7 +341,7 @@ pub async fn build(args: &BuildArguments) -> Result<()> { .show_all(args.common.show_all); for entry in normalize_entries(&args.common.entries) { - builder = builder.entry_request(EntryRequest::Relative(entry)); + builder = builder.entry_request(EntryRequest::Relative(entry.into())); } builder.build().await?; diff --git a/crates/turbopack-cli/src/dev/mod.rs b/crates/turbopack-cli/src/dev/mod.rs index 17695ec11651f..c10a9153c9acf 100644 --- a/crates/turbopack-cli/src/dev/mod.rs +++ b/crates/turbopack-cli/src/dev/mod.rs @@ -291,8 +291,8 @@ async fn source( browserslist_query, ); let static_source = Vc::upcast(StaticAssetsContentSource::new( - String::new(), - project_path.join("public".to_string()), + Arc::default(), + project_path.join("public".to_string().into()), )); let main_source = CombinedContentSource::new(vec![static_source, web_source]); let introspect = Vc::upcast( diff --git a/crates/turbopack-cli/src/dev/web_entry_source.rs b/crates/turbopack-cli/src/dev/web_entry_source.rs index 73a752d5cffed..4934d50f1c37d 100644 --- a/crates/turbopack-cli/src/dev/web_entry_source.rs +++ b/crates/turbopack-cli/src/dev/web_entry_source.rs @@ -41,8 +41,8 @@ pub fn get_client_chunking_context( project_path, server_root, server_root, - server_root.join("/_chunks".to_string()), - server_root.join("/_assets".to_string()), + server_root.join("/_chunks".to_string().into()), + server_root.join("/_assets".to_string().into()), environment, RuntimeType::Development, ) @@ -101,7 +101,7 @@ pub async fn create_web_entry_source( let runtime_entries = entries.resolve_entries(asset_context); - let origin = PlainResolveOrigin::new(asset_context, project_path.join("_".to_string())); + let origin = PlainResolveOrigin::new(asset_context, project_path.join("_".to_string().into())); let entries = entry_requests .into_iter() .map(|request| async move { diff --git a/crates/turbopack-cli/src/embed_js.rs b/crates/turbopack-cli/src/embed_js.rs index 793fe320013ae..474267da4d43e 100644 --- a/crates/turbopack-cli/src/embed_js.rs +++ b/crates/turbopack-cli/src/embed_js.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use turbo_tasks::Vc; use turbo_tasks_fs::{embed_directory, FileContent, FileSystem, FileSystemPath}; @@ -7,11 +9,11 @@ fn embed_fs() -> Vc> { } #[turbo_tasks::function] -pub(crate) fn embed_file(path: String) -> Vc { +pub(crate) fn embed_file(path: Arc) -> Vc { embed_fs().root().join(path).read() } #[turbo_tasks::function] -pub(crate) fn embed_file_path(path: String) -> Vc { +pub(crate) fn embed_file_path(path: Arc) -> Vc { embed_fs().root().join(path) } diff --git a/crates/turbopack-cli/src/util.rs b/crates/turbopack-cli/src/util.rs index 1c230e22d5be0..185e9f0af55b7 100644 --- a/crates/turbopack-cli/src/util.rs +++ b/crates/turbopack-cli/src/util.rs @@ -11,8 +11,8 @@ pub struct EntryRequests(pub Vec>); #[turbo_tasks::value(shared)] #[derive(Clone)] pub enum EntryRequest { - Relative(String), - Module(String, String), + Relative(Arc), + Module(Arc, Arc), } pub struct NormalizedDirs { @@ -60,14 +60,14 @@ pub fn normalize_entries(entries: &Option>) -> Vec { } #[turbo_tasks::function] -pub async fn project_fs(project_dir: String) -> Result>> { +pub async fn project_fs(project_dir: Arc) -> Result>> { let disk_fs = DiskFileSystem::new("project".to_string(), project_dir.to_string(), vec![]); disk_fs.await?.start_watching()?; Ok(Vc::upcast(disk_fs)) } #[turbo_tasks::function] -pub async fn output_fs(project_dir: String) -> Result>> { +pub async fn output_fs(project_dir: Arc) -> Result>> { let disk_fs = DiskFileSystem::new("output".to_string(), project_dir.to_string(), vec![]); disk_fs.await?.start_watching()?; Ok(Vc::upcast(disk_fs)) diff --git a/crates/turbopack-core/src/issue/unsupported_module.rs b/crates/turbopack-core/src/issue/unsupported_module.rs index 4d2b9e9bb4815..8c45853c57ad5 100644 --- a/crates/turbopack-core/src/issue/unsupported_module.rs +++ b/crates/turbopack-core/src/issue/unsupported_module.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::Vc; use turbo_tasks_fs::FileSystemPath; @@ -7,7 +9,7 @@ use super::{Issue, IssueSeverity, IssueStage, OptionStyledString, StyledString}; #[turbo_tasks::value(shared)] pub struct UnsupportedModuleIssue { pub file_path: Vc, - pub package: String, + pub package: Arc, pub package_path: Option, } diff --git a/crates/turbopack-core/src/reference_type.rs b/crates/turbopack-core/src/reference_type.rs index 57a0a6722bcd2..5946a6e405909 100644 --- a/crates/turbopack-core/src/reference_type.rs +++ b/crates/turbopack-core/src/reference_type.rs @@ -55,9 +55,9 @@ pub enum EcmaScriptModulesReferenceSubType { #[derive(Debug)] #[turbo_tasks::value(shared)] pub struct ImportAttributes { - pub layer: Option, - pub supports: Option, - pub media: Option, + pub layer: Option>, + pub supports: Option>, + pub media: Option>, } /// The accumulated list of conditions that should be applied to this module diff --git a/crates/turbopack-core/src/resolve/mod.rs b/crates/turbopack-core/src/resolve/mod.rs index 84c324a53a3aa..94f9f18bf8b1b 100644 --- a/crates/turbopack-core/src/resolve/mod.rs +++ b/crates/turbopack-core/src/resolve/mod.rs @@ -65,7 +65,7 @@ use crate::{error::PrettyPrintError, issue::IssueSeverity}; pub enum ModuleResolveResultItem { Module(Vc>), OutputAsset(Vc>), - External(String, ExternalType), + External(Arc, ExternalType), Ignore, Error(Vc), Empty, @@ -415,7 +415,7 @@ impl Display for ExternalType { #[derive(Clone, Debug)] pub enum ResolveResultItem { Source(Vc>), - External(String, ExternalType), + External(Arc, ExternalType), Ignore, Error(Vc), Empty, @@ -430,7 +430,7 @@ pub enum ResolveResultItem { #[derive(Clone, Debug, Default, Hash, Ord, PartialOrd)] #[turbo_tasks::value(serialization = "auto_for_input")] pub struct RequestKey { - pub request: Option, + pub request: Option>, pub conditions: BTreeMap, } @@ -456,7 +456,7 @@ impl Display for RequestKey { } impl RequestKey { - pub fn new(request: String) -> Self { + pub fn new(request: Arc) -> Self { RequestKey { request: Some(request), ..Default::default() @@ -712,7 +712,7 @@ impl ResolveResult { /// Returns a new [ResolveResult] where all [RequestKey]s are set to the /// passed `request`. - pub fn with_request_ref(&self, request: String) -> Self { + pub fn with_request_ref(&self, request: Arc) -> Self { let new_primary = self .primary .iter() @@ -896,7 +896,7 @@ impl ResolveResult { #[turbo_tasks::function] pub async fn with_replaced_request_key( self: Vc, - old_request_key: String, + old_request_key: Arc, request_key: Value, ) -> Result> { let this = self.await?; @@ -905,13 +905,13 @@ impl ResolveResult { .primary .iter() .filter_map(|(k, v)| { - let remaining = k.request.as_ref()?.strip_prefix(&old_request_key)?; + let remaining = k.request.as_ref()?.strip_prefix(&*old_request_key)?; Some(( RequestKey { request: request_key .request .as_ref() - .map(|r| format!("{}{}", r, remaining)), + .map(|r| format!("{}{}", r, remaining).into()), conditions: request_key.conditions.clone(), }, v.clone(), @@ -928,7 +928,7 @@ impl ResolveResult { /// Returns a new [ResolveResult] where all [RequestKey]s are set to the /// passed `request`. #[turbo_tasks::function] - pub async fn with_request(self: Vc, request: String) -> Result> { + pub async fn with_request(self: Vc, request: Arc) -> Result> { let this = self.await?; let new_primary = this .primary @@ -1258,7 +1258,7 @@ pub async fn resolve_raw( async fn to_result(request: &str, path: Vc) -> Result> { let RealPathResult { path, symlinks } = &*path.realpath_with_links().await?; Ok(ResolveResult::source_with_affecting_sources( - RequestKey::new(request.to_string()), + RequestKey::new(request.to_string().into()), Vc::upcast(FileSource::new(*path)), symlinks .iter() @@ -1279,7 +1279,8 @@ pub async fn resolve_raw( .and_then(|pat| pat.filter_could_not_match("/ROOT/fsd8nz8og54z")) { let path = Pattern::new(pat); - let matches = read_matches(lookup_dir.root(), "/ROOT/".to_string(), true, path).await?; + let matches = + read_matches(lookup_dir.root(), "/ROOT/".to_string().into(), true, path).await?; if matches.len() > 10000 { let path_str = path.to_string().await?; println!( @@ -1298,7 +1299,8 @@ pub async fn resolve_raw( } { - let matches = read_matches(lookup_dir, "".to_string(), force_in_lookup_dir, path).await?; + let matches = + read_matches(lookup_dir, "".to_string().into(), force_in_lookup_dir, path).await?; if matches.len() > 10000 { println!( "WARN: resolving pattern {} in {} leads to {} results", @@ -1592,7 +1594,7 @@ async fn resolve_internal_inline( let mut results = Vec::new(); let matches = read_matches( lookup_path, - "".to_string(), + "".to_string().into(), *force_in_lookup_dir, Pattern::new(path.clone()).resolve().await?, ) @@ -1764,7 +1766,7 @@ async fn resolve_internal_inline( query: _, fragment: _, } => { - let uri = format!("{}{}", protocol, remainder); + let uri = Arc::new(format!("{}{}", protocol, remainder)); ResolveResult::primary_with_key( RequestKey::new(uri.clone()), ResolveResultItem::External(uri, ExternalType::Url), @@ -1828,7 +1830,7 @@ async fn resolve_into_folder( match resolve_into_package { ResolveIntoPackage::MainField { field: name } => { if let Some(package_json) = &*read_package_json(package_json_path).await? { - if let Some(field_value) = package_json[name].as_str() { + if let Some(field_value) = package_json[&**name].as_str() { let normalized_request = normalize_request(field_value); if normalized_request.is_empty() || normalized_request == "." @@ -1850,7 +1852,7 @@ async fn resolve_into_folder( // we are not that strict when a main field fails to resolve // we continue to try other alternatives if !result.is_unresolveable_ref() { - let mut result = result.with_request_ref(".".to_string()); + let mut result = result.with_request_ref(".".to_string().into()); result.add_affecting_source_ref(Vc::upcast(FileSource::new( package_json_path, ))); @@ -1870,11 +1872,11 @@ async fn resolve_into_folder( // fall back to dir/index.[js,ts,...] let pattern = match &options_value.default_files[..] { [] => return Ok(ResolveResult::unresolveable().into()), - [file] => Pattern::Constant(format!("./{file}")), + [file] => Pattern::Constant(format!("./{file}").into()), files => Pattern::Alternatives( files .iter() - .map(|file| Pattern::Constant(format!("./{file}"))) + .map(|file| Pattern::Constant(format!("./{file}").into())) .collect(), ), }; @@ -1884,7 +1886,7 @@ async fn resolve_into_folder( Ok( resolve_internal_inline(package_path, request.resolve().await?, options) .await? - .with_request(".".to_string()), + .with_request(".".to_string().into()), ) } @@ -1909,7 +1911,7 @@ async fn resolve_relative_request( let request = path_pattern.as_string()?; let prefix_path = package_path.get_path_to(lookup_path_ref)?; let request = normalize_request(&format!("./{prefix_path}/{request}")); - Some(request) + Some(request.into()) }, query, fragment, @@ -1935,7 +1937,7 @@ async fn resolve_relative_request( // Add the extensions as alternatives to the path // read_matches keeps the order of alternatives intact new_path.push(Pattern::Alternatives( - once(Pattern::Constant("".to_string())) + once(Pattern::Constant("".to_string().into())) .chain( options_value .extensions @@ -1951,7 +1953,7 @@ async fn resolve_relative_request( let mut results = Vec::new(); let matches = read_matches( lookup_path, - "".to_string(), + "".to_string().into(), force_in_lookup_dir, Pattern::new(new_path).resolve().await?, ) @@ -1992,7 +1994,7 @@ async fn resolve_relative_request( if !pushed && path_pattern.is_match(matched_pattern) { results.push( resolved( - RequestKey::new(matched_pattern.to_string()), + RequestKey::new(matched_pattern.to_string().into()), *path, lookup_path, request, @@ -2062,7 +2064,7 @@ async fn apply_in_package( lookup_path: Vc, options: Vc, options_value: &ResolveOptions, - get_request: impl Fn(&FileSystemPath) -> Option, + get_request: impl Fn(&FileSystemPath) -> Option>, query: Vc, fragment: Vc, ) -> Result>> { @@ -2086,7 +2088,7 @@ async fn apply_in_package( continue; }; - let Some(field_value) = package_json[field].as_object() else { + let Some(field_value) = package_json[&**field].as_object() else { continue; }; @@ -2096,7 +2098,7 @@ async fn apply_in_package( continue; }; - let value = if let Some(value) = field_value.get(&request) { + let value = if let Some(value) = field_value.get(&*request) { value } else if let Some(request) = request.strip_prefix("./") { let Some(value) = field_value.get(request) else { @@ -2122,7 +2124,7 @@ async fn apply_in_package( } if let Some(value) = value.as_str() { - if value == request { + if value == &**request { // This would be a cycle, so we ignore it return Ok(None); } @@ -2134,7 +2136,7 @@ async fn apply_in_package( .with_fragment(fragment), options, ) - .with_replaced_request_key(value.to_string(), Value::new(request_key)) + .with_replaced_request_key(value.to_string().into(), Value::new(request_key)) .with_affecting_sources(refs), )); } @@ -2176,7 +2178,7 @@ async fn resolve_module_request( options_value, |_| { let full_pattern = Pattern::concat([module.to_string().into(), path.clone()]); - full_pattern.into_string() + full_pattern.into_string().map(Arc::new) }, query, fragment, @@ -2429,7 +2431,7 @@ async fn resolved( path.parent().resolve().await?, options, options_value, - |package_path| package_path.get_relative_path_to(path_ref), + |package_path| package_path.get_relative_path_to(path_ref).map(From::from), query, fragment, ) @@ -2509,9 +2511,11 @@ async fn handle_exports_imports_field( let request = Request::parse(Value::new(format!("./{}", result_path).into())); let resolve_result = resolve_internal_boxed(package_path, request, options).await?; if conditions.is_empty() { - resolved_results.push(resolve_result.with_request(path.to_string())); + resolved_results.push(resolve_result.with_request(path.to_string().into())); } else { - let mut resolve_result = resolve_result.await?.with_request_ref(path.to_string()); + let mut resolve_result = resolve_result + .await? + .with_request_ref(path.to_string().into()); resolve_result.add_conditions(conditions); resolved_results.push(resolve_result.cell()); } @@ -2541,7 +2545,7 @@ async fn resolve_package_internal_with_imports_field( bail!("PackageInternal requests can only be Constant strings"); }; // https://github.com/nodejs/node/blob/1b177932/lib/internal/modules/esm/resolve.js#L615-L619 - if specifier == "#" || specifier.starts_with("#/") || specifier.ends_with('/') { + if &**specifier == "#" || specifier.starts_with("#/") || specifier.ends_with('/') { ResolvingIssue { severity: IssueSeverity::Error.cell(), file_path, @@ -2655,21 +2659,21 @@ impl ModulePart { ModulePart::Evaluation.cell() } #[turbo_tasks::function] - pub fn export(export: String) -> Vc { - ModulePart::Export(Vc::cell(export)).cell() + pub fn export(export: Arc) -> Vc { + ModulePart::Export(Vc::cell((*export).clone())).cell() } #[turbo_tasks::function] - pub fn renamed_export(original_export: String, export: String) -> Vc { + pub fn renamed_export(original_export: Arc, export: Arc) -> Vc { ModulePart::RenamedExport { - original_export: Vc::cell(original_export), - export: Vc::cell(export), + original_export: Vc::cell((*original_export).clone()), + export: Vc::cell((*export).clone()), } .cell() } #[turbo_tasks::function] - pub fn renamed_namespace(export: String) -> Vc { + pub fn renamed_namespace(export: Arc) -> Vc { ModulePart::RenamedNamespace { - export: Vc::cell(export), + export: Vc::cell((*export).clone()), } .cell() } diff --git a/crates/turbopack-core/src/resolve/node.rs b/crates/turbopack-core/src/resolve/node.rs index e0c07728c8e48..db13813f799b5 100644 --- a/crates/turbopack-core/src/resolve/node.rs +++ b/crates/turbopack-core/src/resolve/node.rs @@ -13,12 +13,16 @@ pub fn node_cjs_resolve_options(root: Vc) -> Vc ("require".to_string(), ConditionValue::Set), ] .into(); - let extensions = vec![".js".to_string(), ".json".to_string(), ".node".to_string()]; + let extensions = vec![ + ".js".to_string().into(), + ".json".to_string().into(), + ".node".to_string().into(), + ]; ResolveOptions { extensions, modules: vec![ResolveModules::Nested( root, - vec!["node_modules".to_string()], + vec!["node_modules".to_string().into()], )], into_package: vec![ ResolveIntoPackage::ExportsField { @@ -26,14 +30,14 @@ pub fn node_cjs_resolve_options(root: Vc) -> Vc unspecified_conditions: ConditionValue::Unset, }, ResolveIntoPackage::MainField { - field: "main".to_string(), + field: "main".to_string().into(), }, ], in_package: vec![ResolveInPackage::ImportsField { conditions, unspecified_conditions: ConditionValue::Unset, }], - default_files: vec!["index".to_string()], + default_files: vec!["index".to_string().into()], ..Default::default() } .cell() @@ -46,13 +50,17 @@ pub fn node_esm_resolve_options(root: Vc) -> Vc ("import".to_string(), ConditionValue::Set), ] .into(); - let extensions = vec![".js".to_string(), ".json".to_string(), ".node".to_string()]; + let extensions = vec![ + ".js".to_string().into(), + ".json".to_string().into(), + ".node".to_string().into(), + ]; ResolveOptions { fully_specified: true, extensions, modules: vec![ResolveModules::Nested( root, - vec!["node_modules".to_string()], + vec!["node_modules".to_string().into()], )], into_package: vec![ ResolveIntoPackage::ExportsField { @@ -60,14 +68,14 @@ pub fn node_esm_resolve_options(root: Vc) -> Vc unspecified_conditions: ConditionValue::Unset, }, ResolveIntoPackage::MainField { - field: "main".to_string(), + field: "main".to_string().into(), }, ], in_package: vec![ResolveInPackage::ImportsField { conditions, unspecified_conditions: ConditionValue::Unset, }], - default_files: vec!["index".to_string()], + default_files: vec!["index".to_string().into()], ..Default::default() } .cell() diff --git a/crates/turbopack-core/src/resolve/options.rs b/crates/turbopack-core/src/resolve/options.rs index fdcd931ad335b..d93b2466a5a6d 100644 --- a/crates/turbopack-core/src/resolve/options.rs +++ b/crates/turbopack-core/src/resolve/options.rs @@ -1,4 +1,4 @@ -use std::{collections::BTreeMap, future::Future, pin::Pin}; +use std::{collections::BTreeMap, future::Future, pin::Pin, sync::Arc}; use anyhow::{bail, Result}; use serde::{Deserialize, Serialize}; @@ -25,7 +25,7 @@ pub struct LockedVersions {} pub enum ResolveModules { /// when inside of path, use the list of directories to /// resolve inside these - Nested(Vc, Vec), + Nested(Vc, Vec>), /// look into that directory Path(Vc), /// lookup versions based on lockfile in the registry filesystem @@ -68,14 +68,14 @@ pub enum ResolveIntoPackage { /// [main]: https://nodejs.org/api/packages.html#main /// [module]: https://esbuild.github.io/api/#main-fields /// [browser]: https://esbuild.github.io/api/#main-fields - MainField { field: String }, + MainField { field: Arc }, } // The different ways to resolve a request withing a package #[derive(TraceRawVcs, Hash, PartialEq, Eq, Clone, Debug, Serialize, Deserialize)] pub enum ResolveInPackage { /// Using a alias field which allows to map requests - AliasField(String), + AliasField(Arc), /// Using the [imports] field. /// /// [imports]: https://nodejs.org/api/packages.html#imports @@ -88,13 +88,13 @@ pub enum ResolveInPackage { #[turbo_tasks::value(shared)] #[derive(Clone)] pub enum ImportMapping { - External(Option, ExternalType), + External(Option>, ExternalType), /// An already resolved result that will be returned directly. Direct(Vc), /// A request alias that will be resolved first, and fall back to resolving /// the original request if it fails. Useful for the tsconfig.json /// `compilerOptions.paths` option and Next aliases. - PrimaryAlternative(String, Option>), + PrimaryAlternative(Arc, Option>), Ignore, Empty, Alternatives(Vec>), @@ -103,7 +103,7 @@ pub enum ImportMapping { impl ImportMapping { pub fn primary_alternatives( - list: Vec, + list: Vec>, lookup_path: Option>, ) -> ImportMapping { if list.is_empty() { @@ -129,13 +129,19 @@ impl AliasTemplate for Vc { Ok(match this { ImportMapping::External(name, ty) => { if let Some(name) = name { - ImportMapping::External(Some(name.clone().replace('*', capture)), *ty) + ImportMapping::External( + Some(name.clone().replace('*', capture).into()), + *ty, + ) } else { ImportMapping::External(None, *ty) } } ImportMapping::PrimaryAlternative(name, context) => { - ImportMapping::PrimaryAlternative(name.clone().replace('*', capture), *context) + ImportMapping::PrimaryAlternative( + name.clone().replace('*', capture).into(), + *context, + ) } ImportMapping::Direct(_) | ImportMapping::Ignore | ImportMapping::Empty => { this.clone() @@ -148,7 +154,7 @@ impl AliasTemplate for Vc { .await?, ), ImportMapping::Dynamic(replacement) => { - (*replacement.replace(capture.to_string()).await?).clone() + (*replacement.replace(capture.to_string().into()).await?).clone() } } .cell()) @@ -220,11 +226,12 @@ impl ImportMap { prefix: impl Into + 'a, context_path: Vc, ) { - let prefix = prefix.into(); - let wildcard_prefix = prefix.clone() + "/"; - let wildcard_alias: String = prefix.clone() + "/*"; + let prefix: String = prefix.into(); + let prefix = Arc::new(prefix); + let wildcard_prefix = (*prefix).clone() + "/"; + let wildcard_alias = Arc::new((*prefix).clone() + "/*"); self.insert_exact_alias( - &prefix, + &*prefix, ImportMapping::PrimaryAlternative(prefix.clone(), Some(context_path)).cell(), ); self.insert_wildcard_alias( @@ -269,9 +276,9 @@ async fn import_mapping_to_result( ImportMapping::Direct(result) => ImportMapResult::Result(*result), ImportMapping::External(name, ty) => ImportMapResult::Result( ResolveResult::primary(if let Some(name) = name { - ResolveResultItem::External(name.to_string(), *ty) + ResolveResultItem::External(name.clone(), *ty) } else if let Some(request) = request.await?.request() { - ResolveResultItem::External(request, *ty) + ResolveResultItem::External(request.into(), *ty) } else { bail!("Cannot resolve external reference without request") }) @@ -420,7 +427,7 @@ pub struct ResolveOptions { /// request first. pub prefer_relative: bool, /// The extensions that should be added to a request when resolving. - pub extensions: Vec, + pub extensions: Vec>, /// The locations where to resolve modules. pub modules: Vec, /// How to resolve packages. @@ -428,7 +435,7 @@ pub struct ResolveOptions { /// How to resolve in packages. pub in_package: Vec, /// The default files to resolve in a folder. - pub default_files: Vec, + pub default_files: Vec>, /// An import map to use before resolving a request. pub import_map: Option>, /// An import map to use when a request is otherwise unresolveable. @@ -500,7 +507,7 @@ impl ResolveOptions { #[derive(Hash, Clone, Debug)] pub struct ResolveModulesOptions { pub modules: Vec, - pub extensions: Vec, + pub extensions: Vec>, } #[turbo_tasks::function] @@ -517,7 +524,7 @@ pub async fn resolve_modules_options( #[turbo_tasks::value_trait] pub trait ImportMappingReplacement { - fn replace(self: Vc, capture: String) -> Vc; + fn replace(self: Vc, capture: Arc) -> Vc; fn result( self: Vc, lookup_path: Vc, diff --git a/crates/turbopack-core/src/resolve/origin.rs b/crates/turbopack-core/src/resolve/origin.rs index 54c8bec9b0c18..59ff384cd2161 100644 --- a/crates/turbopack-core/src/resolve/origin.rs +++ b/crates/turbopack-core/src/resolve/origin.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::{Upcast, Value, Vc}; use turbo_tasks_fs::FileSystemPath; @@ -137,7 +139,7 @@ impl ResolveOrigin for PlainResolveOrigin { #[turbo_tasks::value] struct ResolveOriginWithTransition { previous: Vc>, - transition: String, + transition: Arc, } #[turbo_tasks::value_impl] diff --git a/crates/turbopack-core/src/resolve/parse.rs b/crates/turbopack-core/src/resolve/parse.rs index baf6256ce752d..5e2d70e148137 100644 --- a/crates/turbopack-core/src/resolve/parse.rs +++ b/crates/turbopack-core/src/resolve/parse.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use lazy_static::lazy_static; use regex::Regex; @@ -259,7 +261,7 @@ impl Request { } #[turbo_tasks::function] - pub fn parse_string(request: String) -> Vc { + pub fn parse_string(request: Arc) -> Vc { Self::cell(Request::parse_ref(request.into())) } @@ -325,18 +327,18 @@ impl Request { query: _, fragment: _, } => { - let mut pat = Pattern::Constant(format!("./{module}")); + let mut pat = Pattern::Constant(format!("./{module}").into()); pat.push(path.clone()); // TODO add query Self::parse(Value::new(pat)) } Request::PackageInternal { path } => { - let mut pat = Pattern::Constant("./".to_string()); + let mut pat = Pattern::Constant("./".to_string().into()); pat.push(path.clone()); Self::parse(Value::new(pat)) } Request::Unknown { path } => { - let mut pat = Pattern::Constant("./".to_string()); + let mut pat = Pattern::Constant("./".to_string().into()); pat.push(path.clone()); Self::parse(Value::new(pat)) } diff --git a/crates/turbopack-core/src/resolve/pattern.rs b/crates/turbopack-core/src/resolve/pattern.rs index 2cb128cb9b571..7620a55568df5 100644 --- a/crates/turbopack-core/src/resolve/pattern.rs +++ b/crates/turbopack-core/src/resolve/pattern.rs @@ -1,4 +1,4 @@ -use std::{collections::HashSet, fmt::Display, mem::take}; +use std::{collections::HashSet, fmt::Display, mem::take, sync::Arc}; use anyhow::Result; use lazy_static::lazy_static; @@ -13,7 +13,7 @@ use turbo_tasks_fs::{ #[turbo_tasks::value(serialization = "auto_for_input")] #[derive(PartialOrd, Ord, Hash, Clone, Debug, Default)] pub enum Pattern { - Constant(String), + Constant(Arc), #[default] Dynamic, Alternatives(Vec), @@ -715,6 +715,12 @@ enum NextConstantUntilResult<'a, 'b> { impl From for Pattern { fn from(s: String) -> Self { + Pattern::Constant(Arc::new(s)) + } +} + +impl From> for Pattern { + fn from(s: Arc) -> Self { Pattern::Constant(s) } } @@ -754,8 +760,8 @@ impl ValueToString for Pattern { #[derive(Debug, PartialEq, Eq, Clone, PartialOrd, Ord, TraceRawVcs, Serialize, Deserialize)] pub enum PatternMatch { - File(String, Vc), - Directory(String, Vc), + File(Arc, Vc), + Directory(Arc, Vc), } // TODO this isn't super efficient @@ -774,11 +780,11 @@ pub struct PatternMatches(Vec); #[turbo_tasks::function] pub async fn read_matches( lookup_dir: Vc, - prefix: String, + prefix: Arc, force_in_lookup_dir: bool, pattern: Vc, ) -> Result> { - let mut prefix = prefix; + let mut prefix = (*prefix).clone(); let pat = pattern.await?; let mut results = Vec::new(); let mut nested = Vec::new(); @@ -812,12 +818,12 @@ pub async fn read_matches( FileSystemEntryType::File => { results.push(( index, - PatternMatch::File(prefix.to_string(), fs_path), + PatternMatch::File(prefix.to_string().into(), fs_path), )); } FileSystemEntryType::Directory => results.push(( index, - PatternMatch::Directory(prefix.to_string(), fs_path), + PatternMatch::Directory(prefix.to_string().into(), fs_path), )), FileSystemEntryType::Symlink => { if let LinkContent::Link { link_type, .. } = @@ -827,14 +833,17 @@ pub async fn read_matches( results.push(( index, PatternMatch::Directory( - prefix.clone(), + prefix.clone().into(), fs_path, ), )); } else { results.push(( index, - PatternMatch::File(prefix.clone(), fs_path), + PatternMatch::File( + prefix.clone().into(), + fs_path, + ), )) } } @@ -849,9 +858,11 @@ pub async fn read_matches( let subpath = &str[..=str.rfind('/').unwrap()]; if handled.insert(subpath) { if let Some(fs_path) = &*if force_in_lookup_dir { - lookup_dir.try_join_inside(subpath.to_string()).await? + lookup_dir + .try_join_inside(subpath.to_string().into()) + .await? } else { - lookup_dir.try_join(subpath.to_string()).await? + lookup_dir.try_join(subpath.to_string().into()).await? } { let fs_path = fs_path.resolve().await?; let len = prefix.len(); @@ -860,7 +871,7 @@ pub async fn read_matches( 0, read_matches( fs_path, - prefix.to_string(), + prefix.to_string().into(), force_in_lookup_dir, pattern, ), @@ -888,7 +899,7 @@ pub async fn read_matches( if let Some(pos) = pat.match_position(&prefix) { results.push(( pos, - PatternMatch::Directory(prefix.clone(), lookup_dir.parent()), + PatternMatch::Directory(prefix.clone().into(), lookup_dir.parent()), )); } @@ -897,13 +908,13 @@ pub async fn read_matches( if let Some(pos) = pat.match_position(&prefix) { results.push(( pos, - PatternMatch::Directory(prefix.clone(), lookup_dir.parent()), + PatternMatch::Directory(prefix.clone().into(), lookup_dir.parent()), )); } if let Some(pos) = pat.could_match_position(&prefix) { nested.push(( pos, - read_matches(lookup_dir.parent(), prefix.clone(), false, pattern), + read_matches(lookup_dir.parent(), prefix.clone().into(), false, pattern), )); } prefix.pop(); @@ -914,18 +925,24 @@ pub async fn read_matches( prefix.push('.'); // {prefix}. if let Some(pos) = pat.match_position(&prefix) { - results.push((pos, PatternMatch::Directory(prefix.clone(), lookup_dir))); + results.push(( + pos, + PatternMatch::Directory(prefix.clone().into(), lookup_dir), + )); } prefix.pop(); } if prefix.is_empty() { if let Some(pos) = pat.match_position("./") { - results.push((pos, PatternMatch::Directory("./".to_string(), lookup_dir))); + results.push(( + pos, + PatternMatch::Directory("./".to_string().into(), lookup_dir), + )); } if let Some(pos) = pat.could_match_position("./") { nested.push(( pos, - read_matches(lookup_dir, "./".to_string(), false, pattern), + read_matches(lookup_dir, "./".to_string().into(), false, pattern), )); } } else { @@ -934,7 +951,7 @@ pub async fn read_matches( if let Some(pos) = pat.could_match_position(&prefix) { nested.push(( pos, - read_matches(lookup_dir, prefix.to_string(), false, pattern), + read_matches(lookup_dir, prefix.to_string().into(), false, pattern), )); } prefix.pop(); @@ -943,7 +960,7 @@ pub async fn read_matches( if let Some(pos) = pat.could_match_position(&prefix) { nested.push(( pos, - read_matches(lookup_dir, prefix.to_string(), false, pattern), + read_matches(lookup_dir, prefix.to_string().into(), false, pattern), )); } prefix.pop(); @@ -958,7 +975,10 @@ pub async fn read_matches( prefix.push_str(key); // {prefix}{key} if let Some(pos) = pat.match_position(&prefix) { - results.push((pos, PatternMatch::File(prefix.clone(), *path))); + results.push(( + pos, + PatternMatch::File(prefix.clone().into(), *path), + )); } prefix.truncate(len) } @@ -972,7 +992,7 @@ pub async fn read_matches( if let Some(pos) = pat.match_position(&prefix) { results.push(( pos, - PatternMatch::Directory(prefix.clone(), *path), + PatternMatch::Directory(prefix.clone().into(), *path), )); } prefix.push('/'); @@ -980,13 +1000,13 @@ pub async fn read_matches( if let Some(pos) = pat.match_position(&prefix) { results.push(( pos, - PatternMatch::Directory(prefix.clone(), *path), + PatternMatch::Directory(prefix.clone().into(), *path), )); } if let Some(pos) = pat.could_match_position(&prefix) { nested.push(( pos, - read_matches(*path, prefix.clone(), true, pattern), + read_matches(*path, prefix.clone().into(), true, pattern), )); } prefix.truncate(len) @@ -1005,12 +1025,15 @@ pub async fn read_matches( if link_type.contains(LinkType::DIRECTORY) { results.push(( pos, - PatternMatch::Directory(prefix.clone(), *fs_path), + PatternMatch::Directory( + prefix.clone().into(), + *fs_path, + ), )); } else { results.push(( pos, - PatternMatch::File(prefix.clone(), *fs_path), + PatternMatch::File(prefix.clone().into(), *fs_path), )); } } @@ -1023,7 +1046,10 @@ pub async fn read_matches( if link_type.contains(LinkType::DIRECTORY) { results.push(( pos, - PatternMatch::Directory(prefix.clone(), *fs_path), + PatternMatch::Directory( + prefix.clone().into(), + *fs_path, + ), )); } } @@ -1035,7 +1061,10 @@ pub async fn read_matches( if link_type.contains(LinkType::DIRECTORY) { results.push(( pos, - PatternMatch::Directory(prefix.clone(), *fs_path), + PatternMatch::Directory( + prefix.clone().into(), + *fs_path, + ), )); } } @@ -1075,10 +1104,10 @@ mod tests { #[test] fn normalize() { - let a = Pattern::Constant("a".to_string()); - let b = Pattern::Constant("b".to_string()); - let c = Pattern::Constant("c".to_string()); - let s = Pattern::Constant("/".to_string()); + let a = Pattern::Constant("a".to_string().into()); + let b = Pattern::Constant("b".to_string().into()); + let c = Pattern::Constant("c".to_string().into()); + let s = Pattern::Constant("/".to_string().into()); let d = Pattern::Dynamic; { let mut p = Pattern::Concatenation(vec![ @@ -1090,8 +1119,8 @@ mod tests { assert_eq!( p, Pattern::Alternatives(vec![ - Pattern::Constant("a/c".to_string()), - Pattern::Constant("b/c".to_string()), + Pattern::Constant("a/c".to_string().into()), + Pattern::Constant("b/c".to_string().into()), ]) ); } @@ -1108,29 +1137,29 @@ mod tests { assert_eq!( p, Pattern::Alternatives(vec![ - Pattern::Constant("a/b".to_string()), - Pattern::Constant("b/b".to_string()), + Pattern::Constant("a/b".to_string().into()), + Pattern::Constant("b/b".to_string().into()), Pattern::Concatenation(vec![ Pattern::Dynamic, - Pattern::Constant("/b".to_string()) + Pattern::Constant("/b".to_string().into()) ]), - Pattern::Constant("a/c".to_string()), - Pattern::Constant("b/c".to_string()), + Pattern::Constant("a/c".to_string().into()), + Pattern::Constant("b/c".to_string().into()), Pattern::Concatenation(vec![ Pattern::Dynamic, - Pattern::Constant("/c".to_string()) + Pattern::Constant("/c".to_string().into()) ]), Pattern::Concatenation(vec![ - Pattern::Constant("a/".to_string()), + Pattern::Constant("a/".to_string().into()), Pattern::Dynamic ]), Pattern::Concatenation(vec![ - Pattern::Constant("b/".to_string()), + Pattern::Constant("b/".to_string().into()), Pattern::Dynamic ]), Pattern::Concatenation(vec![ Pattern::Dynamic, - Pattern::Constant("/".to_string()), + Pattern::Constant("/".to_string().into()), Pattern::Dynamic ]), ]) @@ -1141,10 +1170,10 @@ mod tests { #[test] fn is_match() { let pat = Pattern::Concatenation(vec![ - Pattern::Constant(".".to_string()), - Pattern::Constant("/".to_string()), + Pattern::Constant(".".to_string().into()), + Pattern::Constant("/".to_string().into()), Pattern::Dynamic, - Pattern::Constant(".js".to_string()), + Pattern::Constant(".js".to_string().into()), ]); assert!(pat.could_match("")); assert!(pat.could_match("./")); @@ -1169,7 +1198,7 @@ mod tests { #[rstest] #[case::dynamic(Pattern::Dynamic)] - #[case::dynamic_concat(Pattern::Concatenation(vec![Pattern::Dynamic, Pattern::Constant(".js".to_string())]))] + #[case::dynamic_concat(Pattern::Concatenation(vec![Pattern::Dynamic, Pattern::Constant(".js".to_string().into())]))] fn dynamic_match(#[case] pat: Pattern) { assert!(pat.could_match("")); assert!(pat.is_match("index.js")); @@ -1223,7 +1252,7 @@ mod tests { fn dynamic_match2() { let pat = Pattern::Concatenation(vec![ Pattern::Dynamic, - Pattern::Constant("/".to_string()), + Pattern::Constant("/".to_string().into()), Pattern::Dynamic, ]); assert!(pat.could_match("dir")); @@ -1275,16 +1304,16 @@ mod tests { #[rstest] #[case::dynamic(Pattern::Dynamic)] - #[case::dynamic_concat(Pattern::Concatenation(vec![Pattern::Dynamic, Pattern::Constant(".js".to_string())]))] + #[case::dynamic_concat(Pattern::Concatenation(vec![Pattern::Dynamic, Pattern::Constant(".js".to_string().into())]))] #[case::dynamic_concat2(Pattern::Concatenation(vec![ Pattern::Dynamic, - Pattern::Constant("/".to_string()), + Pattern::Constant("/".to_string().into()), Pattern::Dynamic, ]))] #[case::dynamic_alt_concat(Pattern::alternatives(vec![ Pattern::Concatenation(vec![ Pattern::Dynamic, - Pattern::Constant("/".to_string()), + Pattern::Constant("/".to_string().into()), Pattern::Dynamic, ]), Pattern::Dynamic, @@ -1298,28 +1327,28 @@ mod tests { #[rstest] #[case::dynamic(Pattern::Dynamic, "feijf", None)] #[case::dynamic_concat( - Pattern::Concatenation(vec![Pattern::Dynamic, Pattern::Constant(".js".to_string())]), + Pattern::Concatenation(vec![Pattern::Dynamic, Pattern::Constant(".js".to_string().into())]), "hello.", None )] - #[case::constant(Pattern::Constant("Hello World".to_string()), "Hello ", Some(vec![("World", true)]))] + #[case::constant(Pattern::Constant("Hello World".to_string().into()), "Hello ", Some(vec![("World", true)]))] #[case::alternatives( Pattern::Alternatives(vec![ - Pattern::Constant("Hello World".to_string()), - Pattern::Constant("Hello All".to_string()) + Pattern::Constant("Hello World".to_string().into()), + Pattern::Constant("Hello All".to_string().into()) ]), "Hello ", Some(vec![("World", true), ("All", true)]) )] #[case::alternatives_non_end( Pattern::Alternatives(vec![ - Pattern::Constant("Hello World".to_string()), - Pattern::Constant("Hello All".to_string()), - Pattern::Concatenation(vec![Pattern::Constant("Hello more".to_string()), Pattern::Dynamic]) + Pattern::Constant("Hello World".to_string().into()), + Pattern::Constant("Hello All".to_string().into()), + Pattern::Concatenation(vec![Pattern::Constant("Hello more".to_string().into()), Pattern::Dynamic]) ]), "Hello ", Some(vec![("World", true), ("All", true), ("more", false)]) )] #[case::request_with_extensions( Pattern::Alternatives(vec![ - Pattern::Constant("./file.js".to_string()), - Pattern::Constant("./file.ts".to_string()), - Pattern::Constant("./file.cjs".to_string()), + Pattern::Constant("./file.js".to_string().into()), + Pattern::Constant("./file.ts".to_string().into()), + Pattern::Constant("./file.cjs".to_string().into()), ]), "./", Some(vec![("file.js", true), ("file.ts", true), ("file.cjs", true)]) )] fn next_constants( diff --git a/crates/turbopack-core/src/source_map/mod.rs b/crates/turbopack-core/src/source_map/mod.rs index 86c751187aa7c..9f9f1e8e78fd6 100644 --- a/crates/turbopack-core/src/source_map/mod.rs +++ b/crates/turbopack-core/src/source_map/mod.rs @@ -30,7 +30,7 @@ pub trait GenerateSourceMap { fn generate_source_map(self: Vc) -> Vc; /// Returns an individual section of the larger source map, if found. - fn by_section(self: Vc, _section: String) -> Vc { + fn by_section(self: Vc, _section: Arc) -> Vc { Vc::cell(None) } } @@ -396,7 +396,11 @@ impl SourceMap { origin: Vc, ) -> Result<(Arc, Arc)> { Ok( - if let Some(path) = *origin.parent().try_join(source_request.to_string()).await? { + if let Some(path) = *origin + .parent() + .try_join(source_request.to_string().into()) + .await? + { let path_str = path.to_string().await?; let source = format!("{SOURCE_MAP_PREFIX}{}", path_str); let source_content = if let Some(source_content) = source_content { @@ -533,7 +537,7 @@ impl GenerateSourceMap for SourceMap { } #[turbo_tasks::function] - fn by_section(&self, _section: String) -> Vc { + fn by_section(&self, _section: Arc) -> Vc { Vc::cell(None) } } diff --git a/crates/turbopack-core/src/source_map/source_map_asset.rs b/crates/turbopack-core/src/source_map/source_map_asset.rs index ab66b6f8a93ed..f9b45a64d41bc 100644 --- a/crates/turbopack-core/src/source_map/source_map_asset.rs +++ b/crates/turbopack-core/src/source_map/source_map_asset.rs @@ -32,7 +32,7 @@ impl OutputAsset for SourceMapAsset { // NOTE(alexkirsz) We used to include the asset's version id in the path, // but this caused `all_assets_map` to be recomputed on every change. Ok(AssetIdent::from_path( - self.asset.ident().path().append(".map".to_string()), + self.asset.ident().path().append(".map".to_string().into()), )) } } diff --git a/crates/turbopack-css/src/chunk/mod.rs b/crates/turbopack-css/src/chunk/mod.rs index ba8bd0b062f38..cbad99c6a1fdb 100644 --- a/crates/turbopack-css/src/chunk/mod.rs +++ b/crates/turbopack-css/src/chunk/mod.rs @@ -128,18 +128,41 @@ pub async fn write_import_context( if let Some(import_context) = import_context { let import_context = &*import_context.await?; if !&import_context.layers.is_empty() { - writeln!(body, "@layer {} {{", import_context.layers.join("."))?; + writeln!( + body, + "@layer {} {{", + import_context + .layers + .iter() + .map(|v| &***v) + .intersperse(".") + .collect::() + )?; close.push_str("\n}"); } if !&import_context.media.is_empty() { - writeln!(body, "@media {} {{", import_context.media.join(" and "))?; + writeln!( + body, + "@media {} {{", + import_context + .media + .iter() + .map(|v| &***v) + .intersperse(" and ") + .collect::() + )?; close.push_str("\n}"); } if !&import_context.supports.is_empty() { writeln!( body, "@supports {} {{", - import_context.supports.join(" and ") + import_context + .supports + .iter() + .map(|v| &***v) + .intersperse(" and ") + .collect::() )?; close.push_str("\n}"); } @@ -285,7 +308,7 @@ impl OutputAsset for CssChunk { Ok(AssetIdent::from_path(this.chunking_context.chunk_path( AssetIdent::new(Value::new(ident)), - ".css".to_string(), + ".css".to_string().into(), ))) } diff --git a/crates/turbopack-css/src/chunk/single_item_chunk/chunk.rs b/crates/turbopack-css/src/chunk/single_item_chunk/chunk.rs index c6575fa543c41..287b58cea8faf 100644 --- a/crates/turbopack-css/src/chunk/single_item_chunk/chunk.rs +++ b/crates/turbopack-css/src/chunk/single_item_chunk/chunk.rs @@ -106,7 +106,7 @@ impl OutputAsset for SingleItemCssChunk { self.item .asset_ident() .with_modifier(single_item_modifier()), - ".css".to_string(), + ".css".to_string().into(), ), )) } diff --git a/crates/turbopack-css/src/chunk/single_item_chunk/source_map.rs b/crates/turbopack-css/src/chunk/single_item_chunk/source_map.rs index 2706cac561f90..5f025d5298d4b 100644 --- a/crates/turbopack-css/src/chunk/single_item_chunk/source_map.rs +++ b/crates/turbopack-css/src/chunk/single_item_chunk/source_map.rs @@ -30,7 +30,7 @@ impl OutputAsset for SingleItemCssChunkSourceMapAsset { #[turbo_tasks::function] async fn ident(&self) -> Result> { Ok(AssetIdent::from_path( - self.chunk.path().append(".map".to_string()), + self.chunk.path().append(".map".to_string().into()), )) } } diff --git a/crates/turbopack-css/src/chunk/source_map.rs b/crates/turbopack-css/src/chunk/source_map.rs index 854dd0c2c869a..4e122d21a3639 100644 --- a/crates/turbopack-css/src/chunk/source_map.rs +++ b/crates/turbopack-css/src/chunk/source_map.rs @@ -30,7 +30,7 @@ impl OutputAsset for CssChunkSourceMapAsset { #[turbo_tasks::function] async fn ident(&self) -> Result> { Ok(AssetIdent::from_path( - self.chunk.path().append(".map".to_string()), + self.chunk.path().append(".map".to_string().into()), )) } } diff --git a/crates/turbopack-css/src/references/import.rs b/crates/turbopack-css/src/references/import.rs index 48607176cdc8d..b92809cd1ec03 100644 --- a/crates/turbopack-css/src/references/import.rs +++ b/crates/turbopack-css/src/references/import.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use lightningcss::{ media_query::MediaList, @@ -150,15 +152,15 @@ impl ImportAttributes { } => turbopack_core::reference_type::ImportAttributes { layer: layer_name .as_ref() - .map(|l| l.to_css_string(Default::default()).unwrap()), + .map(|l| l.to_css_string(Default::default()).unwrap().into()), supports: supports .as_ref() - .map(|s| s.to_css_string(Default::default()).unwrap()), + .map(|s| s.to_css_string(Default::default()).unwrap().into()), media: { if media.always_matches() { None } else { - Some(media.to_css_string(Default::default()).unwrap()) + Some(media.to_css_string(Default::default()).unwrap().into()) } }, }, @@ -167,11 +169,11 @@ impl ImportAttributes { supports, media, } => turbopack_core::reference_type::ImportAttributes { - layer: layer_name.as_ref().map(gen_swc_node), - supports: supports.as_ref().map(gen_swc_node), + layer: layer_name.as_ref().map(gen_swc_node).map(Arc::new), + supports: supports.as_ref().map(gen_swc_node).map(Arc::new), media: media .as_ref() - .map(|queries| queries.iter().map(gen_swc_node).collect()), + .map(|queries| queries.iter().map(gen_swc_node).collect::().into()), }, } } diff --git a/crates/turbopack-css/src/references/url.rs b/crates/turbopack-css/src/references/url.rs index 889c65b28c9f2..209294911ce6a 100644 --- a/crates/turbopack-css/src/references/url.rs +++ b/crates/turbopack-css/src/references/url.rs @@ -124,7 +124,7 @@ pub async fn resolve_url_reference( // currently works as all chunks are in the same directory. let chunk_path = chunking_context.chunk_path( AssetIdent::from_path(this.origin.origin_path()), - ".css".to_string(), + ".css".to_string().into(), ); let context_path = chunk_path.parent().await?; diff --git a/crates/turbopack-dev-server/src/html.rs b/crates/turbopack-dev-server/src/html.rs index ca54ad8bd9622..9f0bf8870d4b7 100644 --- a/crates/turbopack-dev-server/src/html.rs +++ b/crates/turbopack-dev-server/src/html.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::{anyhow, Result}; use mime_guess::mime::TEXT_HTML_UTF_8; use turbo_tasks::{ReadRef, TryJoinIterExt, Value, Vc}; @@ -31,7 +33,7 @@ type DevHtmlEntry = ( pub struct DevHtmlAsset { path: Vc, entries: Vec, - body: Option, + body: Option>, } #[turbo_tasks::function] @@ -80,7 +82,7 @@ impl DevHtmlAsset { pub fn new_with_body( path: Vc, entries: Vec, - body: String, + body: Arc, ) -> Vc { DevHtmlAsset { path, @@ -101,7 +103,7 @@ impl DevHtmlAsset { } #[turbo_tasks::function] - pub async fn with_body(self: Vc, body: String) -> Result> { + pub async fn with_body(self: Vc, body: Arc) -> Result> { let mut html: DevHtmlAsset = self.await?.clone_value(); html.body = Some(body); Ok(html.cell()) @@ -118,7 +120,7 @@ impl DevHtmlAsset { for chunk in &*self.chunks().await? { let chunk_path = &*chunk.ident().path().await?; if let Some(relative_path) = context_path.get_path_to(chunk_path) { - chunk_paths.push(format!("/{relative_path}")); + chunk_paths.push(format!("/{relative_path}").into()); } } @@ -167,12 +169,12 @@ impl DevHtmlAsset { #[turbo_tasks::value] struct DevHtmlAssetContent { - chunk_paths: Vec, - body: Option, + chunk_paths: Vec>, + body: Option>, } impl DevHtmlAssetContent { - fn new(chunk_paths: Vec, body: Option) -> Vc { + fn new(chunk_paths: Vec>, body: Option>) -> Vc { DevHtmlAssetContent { chunk_paths, body }.cell() } } diff --git a/crates/turbopack-dev-server/src/http.rs b/crates/turbopack-dev-server/src/http.rs index 739bc152a76c4..d08bd348f3836 100644 --- a/crates/turbopack-dev-server/src/http.rs +++ b/crates/turbopack-dev-server/src/http.rs @@ -103,15 +103,15 @@ pub async fn process_request_with_content_source( for (header_name, header_value) in headers { header_map.append( - HeaderName::try_from(header_name.clone())?, + HeaderName::try_from((**header_name).clone())?, hyper::header::HeaderValue::try_from(header_value.as_str())?, ); } for (header_name, header_value) in header_overwrites.iter() { header_map.insert( - HeaderName::try_from(header_name.clone())?, - hyper::header::HeaderValue::try_from(header_value)?, + HeaderName::try_from((**header_name).clone())?, + hyper::header::HeaderValue::try_from(header_value.as_str())?, ); } diff --git a/crates/turbopack-dev-server/src/introspect/mod.rs b/crates/turbopack-dev-server/src/introspect/mod.rs index a1f97d72defe9..ff8d037d829dd 100644 --- a/crates/turbopack-dev-server/src/introspect/mod.rs +++ b/crates/turbopack-dev-server/src/introspect/mod.rs @@ -1,4 +1,4 @@ -use std::{borrow::Cow, collections::HashSet, fmt::Display}; +use std::{borrow::Cow, collections::HashSet, fmt::Display, sync::Arc}; use anyhow::Result; use turbo_tasks::{ReadRef, TryJoinIterExt, Vc}; @@ -89,7 +89,7 @@ impl GetContentSourceContent for IntrospectionSource { #[turbo_tasks::function] async fn get( self: Vc, - path: String, + path: Arc, _data: turbo_tasks::Value, ) -> Result> { // get last segment diff --git a/crates/turbopack-dev-server/src/source/asset_graph.rs b/crates/turbopack-dev-server/src/source/asset_graph.rs index cb8d174b30f03..6d3d29e9cbd06 100644 --- a/crates/turbopack-dev-server/src/source/asset_graph.rs +++ b/crates/turbopack-dev-server/src/source/asset_graph.rs @@ -1,6 +1,7 @@ use std::{ collections::{HashSet, VecDeque}, iter::once, + sync::Arc, }; use anyhow::Result; @@ -223,7 +224,7 @@ impl ContentSource for AssetGraphContentSource { RouteType::Exact, Vc::upcast(AssetGraphGetContentSourceContent::new( self, - path.to_string(), + path.to_string().into(), *asset, )), ) @@ -236,7 +237,7 @@ impl ContentSource for AssetGraphContentSource { #[turbo_tasks::value] struct AssetGraphGetContentSourceContent { source: Vc, - path: String, + path: Arc, asset: Vc>, } @@ -245,7 +246,7 @@ impl AssetGraphGetContentSourceContent { #[turbo_tasks::function] pub fn new( source: Vc, - path: String, + path: Arc, asset: Vc>, ) -> Vc { Self::cell(AssetGraphGetContentSourceContent { @@ -261,7 +262,7 @@ impl GetContentSourceContent for AssetGraphGetContentSourceContent { #[turbo_tasks::function] async fn get( self: Vc, - _path: String, + _path: Arc, _data: Value, ) -> Result> { let this = self.await?; diff --git a/crates/turbopack-dev-server/src/source/conditional.rs b/crates/turbopack-dev-server/src/source/conditional.rs index ff6f66fd4c623..c970c3bf52559 100644 --- a/crates/turbopack-dev-server/src/source/conditional.rs +++ b/crates/turbopack-dev-server/src/source/conditional.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::{Completion, State, Value, Vc}; use turbopack_core::introspect::{Introspectable, IntrospectableChildren}; @@ -164,7 +166,7 @@ impl GetContentSourceContent for ActivateOnGetContentSource { #[turbo_tasks::function] async fn get( self: Vc, - path: String, + path: Arc, data: Value, ) -> Result> { turbo_tasks::emit(Vc::upcast::>(self)); diff --git a/crates/turbopack-dev-server/src/source/issue_context.rs b/crates/turbopack-dev-server/src/source/issue_context.rs index b4d71bacad438..b4b4d26290c01 100644 --- a/crates/turbopack-dev-server/src/source/issue_context.rs +++ b/crates/turbopack-dev-server/src/source/issue_context.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::{Value, Vc}; use turbo_tasks_fs::FileSystemPath; @@ -15,7 +17,7 @@ use super::{ #[turbo_tasks::value] pub struct IssueFilePathContentSource { file_path: Option>, - description: String, + description: Arc, source: Vc>, } @@ -24,7 +26,7 @@ impl IssueFilePathContentSource { #[turbo_tasks::function] pub fn new_file_path( file_path: Vc, - description: String, + description: Arc, source: Vc>, ) -> Vc { IssueFilePathContentSource { @@ -36,7 +38,10 @@ impl IssueFilePathContentSource { } #[turbo_tasks::function] - pub fn new_description(description: String, source: Vc>) -> Vc { + pub fn new_description( + description: Arc, + source: Vc>, + ) -> Vc { IssueFilePathContentSource { file_path: None, description, @@ -54,7 +59,7 @@ impl ContentSource for IssueFilePathContentSource { let routes = this .source .get_routes() - .issue_file_path(this.file_path, &this.description) + .issue_file_path(this.file_path, &*this.description) .await?; Ok(routes.map_routes(Vc::upcast( IssueContextContentSourceMapper { source: self }.cell(), @@ -103,7 +108,7 @@ impl GetContentSourceContent for IssueContextGetContentSourceContent { let result = self .get_content .vary() - .issue_file_path(source.file_path, &source.description) + .issue_file_path(source.file_path, &*source.description) .await?; Ok(result) } @@ -111,14 +116,14 @@ impl GetContentSourceContent for IssueContextGetContentSourceContent { #[turbo_tasks::function] async fn get( &self, - path: String, + path: Arc, data: Value, ) -> Result> { let source = self.source.await?; let result = self .get_content .get(path, data) - .issue_file_path(source.file_path, &source.description) + .issue_file_path(source.file_path, &*source.description) .await?; Ok(result) } @@ -148,7 +153,7 @@ impl Introspectable for IssueFilePathContentSource { let title = source.title().await?; Vc::cell(format!("{}: {}", self.description, title)) } else { - Vc::cell(self.description.clone()) + Vc::cell((*self.description).clone()) }, ) } diff --git a/crates/turbopack-dev-server/src/source/mod.rs b/crates/turbopack-dev-server/src/source/mod.rs index 2b03c7dd95d50..067d21368850f 100644 --- a/crates/turbopack-dev-server/src/source/mod.rs +++ b/crates/turbopack-dev-server/src/source/mod.rs @@ -12,7 +12,7 @@ pub mod router; pub mod static_assets; pub mod wrapping_source; -use std::collections::BTreeSet; +use std::{collections::BTreeSet, sync::Arc}; use anyhow::Result; use futures::{stream::Stream as StreamTrait, TryStreamExt}; @@ -36,7 +36,7 @@ pub struct ProxyResult { /// The HTTP status code to return. pub status: u16, /// Headers arranged as contiguous (name, value) pairs. - pub headers: Vec<(String, String)>, + pub headers: Vec<(Arc, Arc)>, /// The body to return. pub body: Body, } @@ -71,7 +71,7 @@ pub trait GetContentSourceContent { /// Get the content fn get( self: Vc, - path: String, + path: Arc, data: Value, ) -> Vc; } @@ -111,7 +111,7 @@ impl GetContentSourceContent for ContentSourceContent { #[turbo_tasks::function] fn get( self: Vc, - _path: String, + _path: Arc, _data: Value, ) -> Vc { self @@ -158,12 +158,12 @@ impl ContentSourceContent { /// A list of headers arranged as contiguous (name, value) pairs. #[turbo_tasks::value(transparent)] -pub struct HeaderList(Vec<(String, String)>); +pub struct HeaderList(Vec<(Arc, Arc)>); #[turbo_tasks::value_impl] impl HeaderList { #[turbo_tasks::function] - pub fn new(headers: Vec<(String, String)>) -> Vc { + pub fn new(headers: Vec<(Arc, Arc)>) -> Vc { HeaderList(headers).cell() } @@ -423,7 +423,7 @@ pub trait ContentSourceExt: Send { fn issue_file_path( self: Vc, file_path: Vc, - description: String, + description: Arc, ) -> Vc>; } @@ -434,7 +434,7 @@ where fn issue_file_path( self: Vc, file_path: Vc, - description: String, + description: Arc, ) -> Vc> { Vc::upcast(IssueFilePathContentSource::new_file_path( file_path, @@ -480,7 +480,7 @@ pub enum RewriteType { Location { /// The new path and query used to lookup content. This _does not_ need /// to be the original path or query. - path_and_query: String, + path_and_query: Arc, }, ContentSource { /// [Vc>]s from which to restart the lookup @@ -489,7 +489,7 @@ pub enum RewriteType { source: Vc>, /// The new path and query used to lookup content. This _does not_ need /// to be the original path or query. - path_and_query: String, + path_and_query: Arc, }, Sources { /// [GetContentSourceContent]s from which to restart the lookup @@ -520,7 +520,7 @@ pub struct RewriteBuilder { } impl RewriteBuilder { - pub fn new(path_and_query: String) -> Self { + pub fn new(path_and_query: Arc) -> Self { Self { rewrite: Rewrite { ty: RewriteType::Location { path_and_query }, @@ -532,7 +532,7 @@ impl RewriteBuilder { pub fn new_source_with_path_and_query( source: Vc>, - path_and_query: String, + path_and_query: Arc, ) -> Self { Self { rewrite: Rewrite { diff --git a/crates/turbopack-dev-server/src/source/resolve.rs b/crates/turbopack-dev-server/src/source/resolve.rs index 88a4d3ab86371..9a0ce83e27bd0 100644 --- a/crates/turbopack-dev-server/src/source/resolve.rs +++ b/crates/turbopack-dev-server/src/source/resolve.rs @@ -1,6 +1,9 @@ use std::{ collections::btree_map::Entry, - sync::atomic::{AtomicU64, Ordering}, + sync::{ + atomic::{AtomicU64, Ordering}, + Arc, + }, }; use anyhow::Result; @@ -43,7 +46,7 @@ pub async fn resolve_source_request( ) -> Result> { let original_path = request.uri.path().to_string(); // Remove leading slash. - let mut current_asset_path = urlencoding::decode(&original_path[1..])?.into_owned(); + let mut current_asset_path = Arc::new(urlencoding::decode(&original_path[1..])?.into_owned()); let mut request_overwrites = (*request).clone(); let mut response_header_overwrites = Vec::new(); let mut route_tree = source.get_routes().resolve_strongly_consistent().await?; @@ -66,17 +69,18 @@ pub async fn resolve_source_request( request_overwrites.headers.clear(); for (name, value) in &*headers.await? { request_overwrites.headers.insert( - HyperHeaderName::try_from(name)?, - HyperHeaderValue::try_from(value)?, + HyperHeaderName::try_from(&**name)?, + HyperHeaderValue::try_from(&**value)?, ); } } // do the rewrite match &rewrite.ty { RewriteType::Location { path_and_query } => { - let new_uri = Uri::try_from(path_and_query)?; - let new_asset_path = - urlencoding::decode(&new_uri.path()[1..])?.into_owned(); + let new_uri = Uri::try_from(&**path_and_query)?; + let new_asset_path = Arc::new( + urlencoding::decode(&new_uri.path()[1..])?.into_owned(), + ); request_overwrites.uri = new_uri; current_asset_path = new_asset_path; continue 'routes; @@ -85,9 +89,10 @@ pub async fn resolve_source_request( source, path_and_query, } => { - let new_uri = Uri::try_from(path_and_query)?; - let new_asset_path = - urlencoding::decode(&new_uri.path()[1..])?.into_owned(); + let new_uri = Uri::try_from(&**path_and_query)?; + let new_asset_path = Arc::new( + urlencoding::decode(&new_uri.path()[1..])?.into_owned(), + ); request_overwrites.uri = new_uri; current_asset_path = new_asset_path; route_tree = diff --git a/crates/turbopack-dev-server/src/source/route_tree.rs b/crates/turbopack-dev-server/src/source/route_tree.rs index 373f62a7814ef..b54dc645c4666 100644 --- a/crates/turbopack-dev-server/src/source/route_tree.rs +++ b/crates/turbopack-dev-server/src/source/route_tree.rs @@ -1,4 +1,4 @@ -use std::{fmt::Write, mem::replace}; +use std::{fmt::Write, mem::replace, sync::Arc}; use anyhow::Result; use indexmap::IndexMap; @@ -20,7 +20,7 @@ pub enum RouteType { /// Some normal segment of a route. #[derive(TaskInput, Clone, Debug, PartialEq, Eq, Serialize, Deserialize, TraceRawVcs)] pub enum BaseSegment { - Static(String), + Static(Arc), Dynamic, } @@ -28,7 +28,7 @@ impl BaseSegment { pub fn from_static_pathname(str: &str) -> impl Iterator + '_ { str.split('/') .filter(|s| !s.is_empty()) - .map(|s| BaseSegment::Static(s.to_string())) + .map(|s| BaseSegment::Static(s.to_string().into())) } } @@ -98,7 +98,7 @@ impl RouteTrees { pub struct RouteTree { base: Vec, sources: Vec>>, - static_segments: IndexMap>, + static_segments: IndexMap, Vc>, dynamic_segments: Vec>, catch_all_sources: Vec>>, fallback_sources: Vec>>, @@ -255,7 +255,7 @@ impl RouteTree { // TODO(WEB-1252) It's unneccesary to compute all [`GetContentSourceContent`]s at once, we could // return some lazy iterator to make it more efficient. #[turbo_tasks::function] - pub async fn get(self: Vc, path: String) -> Result> { + pub async fn get(self: Vc, path: Arc) -> Result> { let RouteTree { base, sources, @@ -279,7 +279,7 @@ impl RouteTree { }; match base { BaseSegment::Static(str) => { - if str != segment { + if &**str != segment { return Ok(Vc::cell(vec![])); } } @@ -290,12 +290,23 @@ impl RouteTree { } if let Some(segment) = segments.next() { + let segment = Arc::new(segment.to_owned()); let remainder = segments.remainder().unwrap_or(""); - if let Some(tree) = static_segments.get(segment) { - results.extend(tree.get(remainder.to_string()).await?.iter().copied()); + if let Some(tree) = static_segments.get(&segment) { + results.extend( + tree.get(remainder.to_string().into()) + .await? + .iter() + .copied(), + ); } for tree in dynamic_segments.iter() { - results.extend(tree.get(remainder.to_string()).await?.iter().copied()); + results.extend( + tree.get(remainder.to_string().into()) + .await? + .iter() + .copied(), + ); } } else { results.extend(sources.iter().copied()); diff --git a/crates/turbopack-dev-server/src/source/router.rs b/crates/turbopack-dev-server/src/source/router.rs index e8cf233190635..c7f9fc2b2b2af 100644 --- a/crates/turbopack-dev-server/src/source/router.rs +++ b/crates/turbopack-dev-server/src/source/router.rs @@ -1,4 +1,6 @@ -use std::iter::once; +#![allow(clippy::type_complexity)] + +use std::{iter::once, sync::Arc}; use anyhow::Result; use turbo_tasks::{TryJoinIterExt, Value, Vc}; @@ -18,7 +20,7 @@ use crate::source::{route_tree::MapGetContentSourceContent, ContentSources}; #[turbo_tasks::value(shared)] pub struct PrefixedRouterContentSource { pub prefix: Vc, - pub routes: Vec<(String, Vc>)>, + pub routes: Vec<(Arc, Vc>)>, pub fallback: Vc>, } @@ -27,7 +29,7 @@ impl PrefixedRouterContentSource { #[turbo_tasks::function] pub async fn new( prefix: Vc, - routes: Vec<(String, Vc>)>, + routes: Vec<(Arc, Vc>)>, fallback: Vc>, ) -> Result> { Ok(PrefixedRouterContentSource { @@ -40,7 +42,7 @@ impl PrefixedRouterContentSource { } fn get_children( - routes: &[(String, Vc>)], + routes: &[(Arc, Vc>)], fallback: &Vc>, ) -> Vc { Vc::cell( @@ -53,18 +55,18 @@ fn get_children( } async fn get_introspection_children( - routes: &[(String, Vc>)], + routes: &[(Arc, Vc>)], fallback: &Vc>, ) -> Result> { Ok(Vc::cell( routes .iter() .cloned() - .chain(std::iter::once((String::new(), *fallback))) + .chain(std::iter::once((Arc::default(), *fallback))) .map(|(path, source)| async move { Ok(Vc::try_resolve_sidecast::>(source) .await? - .map(|i| (Vc::cell(path), i))) + .map(|i| (Vc::cell((*path).clone()), i))) }) .try_join() .await? @@ -120,7 +122,7 @@ impl ContentSource for PrefixedRouterContentSource { #[turbo_tasks::value] struct PrefixedRouterContentSourceMapper { prefix: Vc, - path: String, + path: Arc, } #[turbo_tasks::value_impl] @@ -156,17 +158,17 @@ impl GetContentSourceContent for PrefixedRouterGetContentSourceContent { #[turbo_tasks::function] async fn get( &self, - path: String, + path: Arc, data: Value, ) -> Result> { let prefix = self.mapper.await?.prefix.await?; if let Some(path) = path.strip_prefix(&*prefix) { if path.is_empty() { - return Ok(self.get_content.get("".to_string(), data)); + return Ok(self.get_content.get("".to_string().into(), data)); } else if prefix.is_empty() { - return Ok(self.get_content.get(path.to_string(), data)); + return Ok(self.get_content.get(path.to_string().into(), data)); } else if let Some(path) = path.strip_prefix('/') { - return Ok(self.get_content.get(path.to_string(), data)); + return Ok(self.get_content.get(path.to_string().into(), data)); } } Ok(ContentSourceContent::not_found()) diff --git a/crates/turbopack-dev-server/src/source/static_assets.rs b/crates/turbopack-dev-server/src/source/static_assets.rs index 0e938d7751d23..27b6eeb43463a 100644 --- a/crates/turbopack-dev-server/src/source/static_assets.rs +++ b/crates/turbopack-dev-server/src/source/static_assets.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::{Value, Vc}; use turbo_tasks_fs::{DirectoryContent, DirectoryEntry, FileSystemPath}; @@ -23,8 +25,8 @@ pub struct StaticAssetsContentSource { impl StaticAssetsContentSource { // TODO(WEB-1151): Remove this method and migrate users to `with_prefix`. #[turbo_tasks::function] - pub fn new(prefix: String, dir: Vc) -> Vc { - StaticAssetsContentSource::with_prefix(Vc::cell(prefix), dir) + pub fn new(prefix: Arc, dir: Vc) -> Vc { + StaticAssetsContentSource::with_prefix(Vc::cell((*prefix).clone()), dir) } #[turbo_tasks::function] @@ -54,14 +56,14 @@ async fn get_routes_from_directory(dir: Vc) -> Result { Some(RouteTree::new_route( - vec![BaseSegment::Static(name.clone())], + vec![BaseSegment::Static(name.clone().into())], RouteType::Exact, Vc::upcast(StaticAssetsContentSourceItem::new(*path)), )) } DirectoryEntry::Directory(path) => Some( get_routes_from_directory(*path) - .with_prepended_base(vec![BaseSegment::Static(name.clone())]), + .with_prepended_base(vec![BaseSegment::Static(name.clone().into())]), ), _ => None, }) @@ -95,7 +97,7 @@ impl StaticAssetsContentSourceItem { #[turbo_tasks::value_impl] impl GetContentSourceContent for StaticAssetsContentSourceItem { #[turbo_tasks::function] - fn get(&self, _path: String, _data: Value) -> Vc { + fn get(&self, _path: Arc, _data: Value) -> Vc { let content = Vc::upcast::>(FileSource::new(self.path)).content(); ContentSourceContent::static_content(content.versioned()) } diff --git a/crates/turbopack-dev-server/src/source/wrapping_source.rs b/crates/turbopack-dev-server/src/source/wrapping_source.rs index 55eebeaeec162..aad7e5baae7d5 100644 --- a/crates/turbopack-dev-server/src/source/wrapping_source.rs +++ b/crates/turbopack-dev-server/src/source/wrapping_source.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::{Value, Vc}; @@ -49,7 +51,7 @@ impl GetContentSourceContent for WrappedGetContentSourceContent { #[turbo_tasks::function] async fn get( &self, - path: String, + path: Arc, data: Value, ) -> Result> { let res = self.inner.get(path, data); diff --git a/crates/turbopack-dev-server/src/update/server.rs b/crates/turbopack-dev-server/src/update/server.rs index dbff868f5a20a..04e41444ef0e9 100644 --- a/crates/turbopack-dev-server/src/update/server.rs +++ b/crates/turbopack-dev-server/src/update/server.rs @@ -74,7 +74,7 @@ impl UpdateServer

{ ) } }; - match UpdateStream::new(resource.to_string(), TransientInstance::new(Box::new(get_content))).await { + match UpdateStream::new(resource.to_string().into(), TransientInstance::new(Box::new(get_content))).await { Ok(stream) => { streams.insert(resource, stream); } diff --git a/crates/turbopack-dev-server/src/update/stream.rs b/crates/turbopack-dev-server/src/update/stream.rs index af23e8f1598b3..ea8e1b554cffd 100644 --- a/crates/turbopack-dev-server/src/update/stream.rs +++ b/crates/turbopack-dev-server/src/update/stream.rs @@ -1,4 +1,4 @@ -use std::pin::Pin; +use std::{pin::Pin, sync::Arc}; use anyhow::Result; use futures::prelude::*; @@ -42,7 +42,7 @@ fn extend_issues(issues: &mut Vec>, new_issues: Vec, from: Vc, get_content: TransientInstance, ) -> Result> { @@ -55,7 +55,7 @@ async fn get_update_stream_item( Err(e) => { plain_issues.push( FatalStreamIssue { - resource: resource.to_string(), + resource, description: StyledString::Text(format!("{}", PrettyPrintError(&e))).cell(), } .cell() @@ -159,7 +159,7 @@ async fn get_update_stream_item( #[turbo_tasks::function] async fn compute_update_stream( - resource: String, + resource: Arc, from: Vc, get_content: TransientInstance, sender: TransientInstance>>>, @@ -181,7 +181,7 @@ pub(super) struct UpdateStream( impl UpdateStream { #[tracing::instrument(skip(get_content), name = "UpdateStream::new")] pub async fn new( - resource: String, + resource: Arc, get_content: TransientInstance, ) -> Result { let (sx, rx) = tokio::sync::mpsc::channel(32); @@ -281,7 +281,7 @@ pub enum UpdateStreamItem { #[turbo_tasks::value(serialization = "none")] struct FatalStreamIssue { description: Vc, - resource: String, + resource: Arc, } #[turbo_tasks::value_impl] diff --git a/crates/turbopack-ecmascript-runtime/src/build_runtime.rs b/crates/turbopack-ecmascript-runtime/src/build_runtime.rs index 2c7a309fbf63e..0b4c834514b7f 100644 --- a/crates/turbopack-ecmascript-runtime/src/build_runtime.rs +++ b/crates/turbopack-ecmascript-runtime/src/build_runtime.rs @@ -13,18 +13,20 @@ pub async fn get_nodejs_runtime_code(environment: Vc) -> Result "dev/runtime/none/runtime-backend-none.ts".to_string(), - ChunkLoading::NodeJs => "dev/runtime/nodejs/runtime-backend-nodejs.ts".to_string(), - ChunkLoading::Dom => "dev/runtime/dom/runtime-backend-dom.ts".to_string(), + ChunkLoading::None => "dev/runtime/none/runtime-backend-none.ts" + .to_string() + .into(), + ChunkLoading::NodeJs => "dev/runtime/nodejs/runtime-backend-nodejs.ts" + .to_string() + .into(), + ChunkLoading::Dom => "dev/runtime/dom/runtime-backend-dom.ts".to_string().into(), }, ); @@ -72,7 +76,7 @@ pub async fn get_browser_runtime_code( code.push_code( &*embed_static_code( asset_context, - "shared-node/base-externals-utils.ts".to_string(), + "shared-node/base-externals-utils.ts".to_string().into(), ) .await?, ); @@ -81,15 +85,18 @@ pub async fn get_browser_runtime_code( code.push_code( &*embed_static_code( asset_context, - "shared-node/node-externals-utils.ts".to_string(), + "shared-node/node-externals-utils.ts".to_string().into(), ) .await?, ); } if *environment.supports_wasm().await? { code.push_code( - &*embed_static_code(asset_context, "shared-node/node-wasm-utils.ts".to_string()) - .await?, + &*embed_static_code( + asset_context, + "shared-node/node-wasm-utils.ts".to_string().into(), + ) + .await?, ); } diff --git a/crates/turbopack-ecmascript-runtime/src/embed_js.rs b/crates/turbopack-ecmascript-runtime/src/embed_js.rs index bd503baa98ba0..92d7c5ba91971 100644 --- a/crates/turbopack-ecmascript-runtime/src/embed_js.rs +++ b/crates/turbopack-ecmascript-runtime/src/embed_js.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use turbo_tasks::Vc; use turbo_tasks_fs::{embed_directory, FileContent, FileSystem, FileSystemPath}; use turbopack_core::{code_builder::Code, context::AssetContext}; @@ -9,16 +11,16 @@ pub fn embed_fs() -> Vc> { } #[turbo_tasks::function] -pub fn embed_file(path: String) -> Vc { +pub fn embed_file(path: Arc) -> Vc { embed_fs().root().join(path).read() } #[turbo_tasks::function] -pub fn embed_file_path(path: String) -> Vc { +pub fn embed_file_path(path: Arc) -> Vc { embed_fs().root().join(path) } #[turbo_tasks::function] -pub fn embed_static_code(asset_context: Vc>, path: String) -> Vc { +pub fn embed_static_code(asset_context: Vc>, path: Arc) -> Vc { StaticEcmascriptCode::new(asset_context, embed_file_path(path)).code() } diff --git a/crates/turbopack-ecmascript/src/chunk/placeable.rs b/crates/turbopack-ecmascript/src/chunk/placeable.rs index c63ebf097dc8a..ecd2ced5248ac 100644 --- a/crates/turbopack-ecmascript/src/chunk/placeable.rs +++ b/crates/turbopack-ecmascript/src/chunk/placeable.rs @@ -50,9 +50,9 @@ async fn side_effects_from_package_json( .filter_map(|side_effect| { if let Some(side_effect) = side_effect.as_str() { if side_effect.contains('/') { - Some(Glob::new(side_effect.to_string())) + Some(Glob::new(side_effect.to_string().into())) } else { - Some(Glob::new(format!("**/{side_effect}"))) + Some(Glob::new(format!("**/{side_effect}").into())) } } else { SideEffectsInPackageJsonIssue { diff --git a/crates/turbopack-ecmascript/src/references/esm/base.rs b/crates/turbopack-ecmascript/src/references/esm/base.rs index 0c279eec9daaf..b1fdc3e88079d 100644 --- a/crates/turbopack-ecmascript/src/references/esm/base.rs +++ b/crates/turbopack-ecmascript/src/references/esm/base.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::{anyhow, bail, Result}; use lazy_static::lazy_static; use swc_core::{ @@ -36,7 +38,7 @@ use crate::{ #[turbo_tasks::value] pub enum ReferencedAsset { Some(Vc>), - External(String, ExternalType), + External(Arc, ExternalType), None, } @@ -107,7 +109,7 @@ impl EsmAssetReference { fn get_origin(&self) -> Vc> { let mut origin = self.origin; if let Some(transition) = self.annotations.transition() { - origin = origin.with_transition(transition.to_string()); + origin = origin.with_transition(transition.to_string().into()); } origin } @@ -291,7 +293,7 @@ impl CodeGenerateable for EsmAssetReference { request ); } - let request = request.clone(); + let request = (**request).clone(); visitors.push(create_visitor!(visit_mut_program(program: &mut Program) { let stmt = if import_externals { quote!( @@ -325,7 +327,7 @@ impl CodeGenerateable for EsmAssetReference { request ); } - let request = request.clone(); + let request = (**request).clone(); visitors.push(create_visitor!(visit_mut_program(program: &mut Program) { let stmt = quote!( "var $name = __turbopack_external_require__($id, true);" as Stmt, diff --git a/crates/turbopack-ecmascript/src/references/esm/binding.rs b/crates/turbopack-ecmascript/src/references/esm/binding.rs index f2f4df7d86bb5..28a70b744a69e 100644 --- a/crates/turbopack-ecmascript/src/references/esm/binding.rs +++ b/crates/turbopack-ecmascript/src/references/esm/binding.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use swc_core::{ common::{Span, SyntaxContext}, @@ -23,7 +25,7 @@ use crate::{ #[derive(Hash, Debug)] pub struct EsmBinding { pub reference: Vc, - pub export: Option, + pub export: Option>, pub ast_path: Vc, } @@ -32,7 +34,7 @@ impl EsmBinding { #[turbo_tasks::function] pub fn new( reference: Vc, - export: Option, + export: Option>, ast_path: Vc, ) -> Vc { EsmBinding { @@ -110,7 +112,7 @@ impl CodeGenerateable for EsmBinding { if let Some(imported_ident) = imported_module.as_deref() { *prop = Prop::KeyValue(KeyValueProp { key: PropName::Ident(ident.clone()), - value: Box::new(make_expr(imported_ident, this.export.as_deref(), ident.span, false)) + value: Box::new(make_expr(imported_ident, this.export.as_ref().map(|v| &***v), ident.span, false)) }); } } @@ -132,7 +134,7 @@ impl CodeGenerateable for EsmBinding { create_visitor!(exact ast_path, visit_mut_expr(expr: &mut Expr) { if let Some(ident) = imported_module.as_deref() { use swc_core::common::Spanned; - *expr = make_expr(ident, this.export.as_deref(), expr.span(), in_call); + *expr = make_expr(ident, this.export.as_ref().map(|v| &***v), expr.span(), in_call); } // If there's no identifier for the imported module, // resolution failed and will insert code that throws diff --git a/crates/turbopack-ecmascript/src/references/esm/export.rs b/crates/turbopack-ecmascript/src/references/esm/export.rs index 8338c61aa65fc..f5b62a04a7d7a 100644 --- a/crates/turbopack-ecmascript/src/references/esm/export.rs +++ b/crates/turbopack-ecmascript/src/references/esm/export.rs @@ -1,6 +1,7 @@ use std::{ collections::{BTreeMap, HashSet}, ops::ControlFlow, + sync::Arc, }; use anyhow::Result; @@ -60,14 +61,14 @@ pub enum FoundExportType { #[turbo_tasks::value] pub struct FollowExportsResult { pub module: Vc>, - pub export_name: Option, + pub export_name: Option>, pub ty: FoundExportType, } #[turbo_tasks::function] pub async fn follow_reexports( module: Vc>, - export_name: String, + export_name: Arc, side_effect_free_packages: Vc, ) -> Result> { if !*module @@ -110,7 +111,7 @@ pub async fn follow_reexports( } // Try to find the export in the star exports - if !exports_ref.star_exports.is_empty() && export_name != "default" { + if !exports_ref.star_exports.is_empty() && *export_name != "default" { let result = get_all_export_names(module).await?; if let Some(m) = result.esm_exports.get(&export_name) { module = *m; @@ -148,10 +149,11 @@ pub async fn follow_reexports( async fn handle_declared_export( module: Vc>, - export_name: String, + export_name: Arc, export: &EsmExport, side_effect_free_packages: Vc, -) -> Result>, String)>> { +) -> Result>, Arc)>> +{ match export { EsmExport::ImportedBinding(reference, name, _) => { if let ReferencedAsset::Some(module) = @@ -163,11 +165,11 @@ async fn handle_declared_export( { return Ok(ControlFlow::Break(FollowExportsResult { module, - export_name: Some(name.to_string()), + export_name: Some(name.clone()), ty: FoundExportType::SideEffects, })); } - return Ok(ControlFlow::Continue((module, name.to_string()))); + return Ok(ControlFlow::Continue((module, name.clone()))); } } EsmExport::ImportedNamespace(reference) => { @@ -205,7 +207,7 @@ async fn handle_declared_export( #[turbo_tasks::value] struct AllExportNamesResult { - esm_exports: IndexMap>>, + esm_exports: IndexMap, Vc>>, dynamic_exporting_modules: Vec>>, } @@ -263,7 +265,7 @@ async fn get_all_export_names( #[turbo_tasks::value] pub struct ExpandStarResult { - pub star_exports: Vec, + pub star_exports: Vec>, pub has_dynamic_exports: bool, } @@ -280,7 +282,13 @@ pub async fn expand_star_exports( match &*exports.await? { EcmascriptExports::EsmExports(exports) => { let exports = exports.await?; - set.extend(exports.exports.keys().filter(|n| *n != "default").cloned()); + set.extend( + exports + .exports + .keys() + .filter(|n| ***n != "default") + .cloned(), + ); for esm_ref in exports.star_exports.iter() { if let ReferencedAsset::Some(asset) = &*ReferencedAsset::from_resolve_result(esm_ref.resolve_reference()).await? @@ -352,7 +360,7 @@ fn emit_star_exports_issue(source_ident: Vc, message: String) { #[turbo_tasks::value(shared)] #[derive(Hash, Debug)] pub struct EsmExports { - pub exports: BTreeMap, + pub exports: BTreeMap, EsmExport>, pub star_exports: Vec>>, } diff --git a/crates/turbopack-env/src/asset.rs b/crates/turbopack-env/src/asset.rs index 0533718cbb832..a2f3de58db77a 100644 --- a/crates/turbopack-env/src/asset.rs +++ b/crates/turbopack-env/src/asset.rs @@ -34,7 +34,7 @@ impl ProcessEnvAsset { impl Source for ProcessEnvAsset { #[turbo_tasks::function] fn ident(&self) -> Vc { - AssetIdent::from_path(self.root.join(".env.js".to_string())) + AssetIdent::from_path(self.root.join(".env.js".to_string().into())) } } diff --git a/crates/turbopack-env/src/dotenv.rs b/crates/turbopack-env/src/dotenv.rs index e66925fa2528a..7a11862232534 100644 --- a/crates/turbopack-env/src/dotenv.rs +++ b/crates/turbopack-env/src/dotenv.rs @@ -12,7 +12,7 @@ use crate::TryDotenvProcessEnv; pub async fn load_env(project_path: Vc) -> Result>> { let env: Vc> = Vc::upcast(CommandLineProcessEnv::new()); - let node_env = env.read("NODE_ENV".to_string()).await?; + let node_env = env.read("NODE_ENV".to_string().into()).await?; let node_env = node_env.as_deref().unwrap_or("development"); let env = Vc::upcast(CustomProcessEnv::new( @@ -36,7 +36,7 @@ pub async fn load_env(project_path: Vc) -> Result Result>> { + async fn read(&self, name: Arc) -> Result>> { let prior = self.prior.read(name).await?; let encoded = prior.as_deref().map(|s| StringifyJs(s).to_string()); Ok(Vc::cell(encoded)) diff --git a/crates/turbopack-node/src/embed_js.rs b/crates/turbopack-node/src/embed_js.rs index a13d379e216da..b0abdb953da25 100644 --- a/crates/turbopack-node/src/embed_js.rs +++ b/crates/turbopack-node/src/embed_js.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use turbo_tasks::Vc; use turbo_tasks_fs::{embed_directory, FileContent, FileSystem, FileSystemPath}; @@ -7,11 +9,11 @@ pub fn embed_fs() -> Vc> { } #[turbo_tasks::function] -pub(crate) fn embed_file(path: String) -> Vc { +pub(crate) fn embed_file(path: Arc) -> Vc { embed_fs().root().join(path).read() } #[turbo_tasks::function] -pub(crate) fn embed_file_path(path: String) -> Vc { +pub(crate) fn embed_file_path(path: Arc) -> Vc { embed_fs().root().join(path) } diff --git a/crates/turbopack-node/src/evaluate.rs b/crates/turbopack-node/src/evaluate.rs index ddecfdf496333..ee900e724d4ea 100644 --- a/crates/turbopack-node/src/evaluate.rs +++ b/crates/turbopack-node/src/evaluate.rs @@ -93,7 +93,7 @@ pub async fn get_evaluate_pool( let runtime_asset = asset_context .process( Vc::upcast(FileSource::new(embed_file_path( - "ipc/evaluate.ts".to_string(), + "ipc/evaluate.ts".to_string().into(), ))), Value::new(ReferenceType::Internal(InnerAssets::empty())), ) @@ -108,11 +108,16 @@ pub async fn get_evaluate_pool( } else { Cow::Owned(format!("{file_name}.js")) }; - let path = chunking_context.output_root().join(file_name.to_string()); + let path = chunking_context + .output_root() + .join(file_name.to_string().into()); let entry_module = asset_context .process( Vc::upcast(VirtualSource::new( - runtime_asset.ident().path().join("evaluate.js".to_string()), + runtime_asset + .ident() + .path() + .join("evaluate.js".to_string().into()), AssetContent::file( File::from("import { run } from 'RUNTIME'; run(() => import('INNER'))").into(), ), @@ -137,7 +142,9 @@ pub async fn get_evaluate_pool( let runtime_entries = { let globals_module = asset_context .process( - Vc::upcast(FileSource::new(embed_file_path("globals.ts".to_string()))), + Vc::upcast(FileSource::new(embed_file_path( + "globals.ts".to_string().into(), + ))), Value::new(ReferenceType::Internal(InnerAssets::empty())), ) .module(); diff --git a/crates/turbopack-node/src/lib.rs b/crates/turbopack-node/src/lib.rs index 92c77f407b5a9..488e5c27fd511 100644 --- a/crates/turbopack-node/src/lib.rs +++ b/crates/turbopack-node/src/lib.rs @@ -4,7 +4,7 @@ #![feature(arbitrary_self_types)] #![feature(extract_if)] -use std::{collections::HashMap, iter::once, thread::available_parallelism}; +use std::{collections::HashMap, iter::once, sync::Arc, thread::available_parallelism}; use anyhow::{bail, Result}; use indexmap::IndexSet; @@ -200,7 +200,7 @@ async fn separate_assets( fn emit_package_json(dir: Vc) -> Vc { emit( Vc::upcast(VirtualOutputAsset::new( - dir.join("package.json".to_string()), + dir.join("package.json".to_string().into()), AssetContent::file(File::from("{\"type\": \"commonjs\"}").into()), )), dir, @@ -266,7 +266,7 @@ pub async fn get_intermediate_asset( ) -> Result>> { Ok(Vc::upcast( NodeJsBootstrapAsset { - path: chunking_context.chunk_path(main_entry.ident(), ".js".to_string()), + path: chunking_context.chunk_path(main_entry.ident(), ".js".to_string().into()), chunking_context, evaluatable_assets: other_entries.with_entry(main_entry), } @@ -278,7 +278,7 @@ pub async fn get_intermediate_asset( #[turbo_tasks::value(shared)] pub struct ResponseHeaders { pub status: u16, - pub headers: Vec<(String, String)>, + pub headers: Vec<(Arc, Arc)>, } pub fn register() { diff --git a/crates/turbopack-node/src/render/error_page.rs b/crates/turbopack-node/src/render/error_page.rs index 05567b8363f34..9b04d4f7393fe 100644 --- a/crates/turbopack-node/src/render/error_page.rs +++ b/crates/turbopack-node/src/render/error_page.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::{Context, Result}; use turbo_tasks::Vc; use turbo_tasks_fs::embed_file; @@ -5,8 +7,8 @@ use turbo_tasks_fs::embed_file; #[turbo_tasks::function] pub(super) async fn error_html( status_code: u16, - title: String, - details: String, + title: Arc, + details: Arc, ) -> Result> { let html = create_html(status_code, title, details).await?; @@ -16,8 +18,8 @@ pub(super) async fn error_html( #[turbo_tasks::function] pub(super) async fn error_html_body( status_code: u16, - title: String, - details: String, + title: Arc, + details: Arc, ) -> Result> { let html = create_html(status_code, title, details).await?; @@ -27,7 +29,7 @@ pub(super) async fn error_html_body( Ok(Vc::cell(body.to_string())) } -async fn create_html(status_code: u16, title: String, details: String) -> Result { +async fn create_html(status_code: u16, title: Arc, details: Arc) -> Result { let file_content = embed_file!("src/render/error.html").await?; let file = file_content .as_content() diff --git a/crates/turbopack-node/src/render/mod.rs b/crates/turbopack-node/src/render/mod.rs index 1ccc03f3c0955..5dbdcf5e0ef3f 100644 --- a/crates/turbopack-node/src/render/mod.rs +++ b/crates/turbopack-node/src/render/mod.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use serde_json::Value as JsonValue; @@ -54,8 +56,8 @@ enum RenderStaticIncomingMessage { #[serde(rename_all = "camelCase")] Response { status_code: u16, - headers: Vec<(String, String)>, - body: String, + headers: Vec<(Arc, Arc)>, + body: Arc, }, Headers { data: ResponseHeaders, @@ -65,7 +67,7 @@ enum RenderStaticIncomingMessage { }, BodyEnd, Rewrite { - path: String, + path: Arc, }, Error(StructuredError), } diff --git a/crates/turbopack-node/src/render/node_api_source.rs b/crates/turbopack-node/src/render/node_api_source.rs index e179ff7027b36..8b108366d07c6 100644 --- a/crates/turbopack-node/src/render/node_api_source.rs +++ b/crates/turbopack-node/src/render/node_api_source.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::{anyhow, Result}; use indexmap::IndexSet; use serde_json::Value as JsonValue; @@ -109,7 +111,7 @@ impl GetContentSourceContent for NodeApiContentSource { #[turbo_tasks::function] async fn get( &self, - path: String, + path: Arc, data: Value, ) -> Result> { let Some(params) = &*self.route_match.params(path.clone()).await? else { diff --git a/crates/turbopack-node/src/render/render_proxy.rs b/crates/turbopack-node/src/render/render_proxy.rs index d22fe871b1e83..3f3f977d4037f 100644 --- a/crates/turbopack-node/src/render/render_proxy.rs +++ b/crates/turbopack-node/src/render/render_proxy.rs @@ -111,8 +111,10 @@ async fn proxy_error( let status_code = 500; let body = error_html( status_code, - "An error occurred while proxying the request to Node.js".to_string(), - format!("{message}\n\n{}", details.join("\n")), + "An error occurred while proxying the request to Node.js" + .to_string() + .into(), + format!("{message}\n\n{}", details.join("\n")).into(), ) .await? .clone_value(); @@ -289,8 +291,8 @@ async fn render_stream_internal( yield RenderItem::Headers(ResponseHeaders { status, headers: vec![( - "content-type".to_string(), - "text/html; charset=utf-8".to_string(), + "content-type".to_string().into(), + "text/html; charset=utf-8".to_string().into(), )], }); yield RenderItem::BodyChunk(body.into()); diff --git a/crates/turbopack-node/src/render/render_static.rs b/crates/turbopack-node/src/render/render_static.rs index 458abe38977b7..eb65087ea1bcf 100644 --- a/crates/turbopack-node/src/render/render_static.rs +++ b/crates/turbopack-node/src/render/render_static.rs @@ -160,9 +160,13 @@ async fn static_error( .to_string(); body.push_str( - error_html_body(500, "Error rendering page".to_string(), message) - .await? - .as_str(), + error_html_body( + 500, + "Error rendering page".to_string().into(), + message.into(), + ) + .await? + .as_str(), ); let issue = RenderingIssue { @@ -173,7 +177,7 @@ async fn static_error( issue.cell().emit(); - let html = fallback_page.with_body(body); + let html = fallback_page.with_body(body.into()); Ok(html.content()) } diff --git a/crates/turbopack-node/src/render/rendered_source.rs b/crates/turbopack-node/src/render/rendered_source.rs index 51301c93a5d67..4be45f5bc0692 100644 --- a/crates/turbopack-node/src/render/rendered_source.rs +++ b/crates/turbopack-node/src/render/rendered_source.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::{anyhow, Result}; use indexmap::IndexSet; use serde_json::Value as JsonValue; @@ -167,7 +169,7 @@ impl GetContentSourceContent for NodeRenderContentSource { #[turbo_tasks::function] async fn get( &self, - path: String, + path: Arc, data: Value, ) -> Result> { let pathname = self.pathname.await?; diff --git a/crates/turbopack-node/src/route_matcher.rs b/crates/turbopack-node/src/route_matcher.rs index 4341da5d97a17..eb24590bed52e 100644 --- a/crates/turbopack-node/src/route_matcher.rs +++ b/crates/turbopack-node/src/route_matcher.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use indexmap::IndexMap; use turbo_tasks::Vc; @@ -26,8 +28,8 @@ pub trait RouteMatcherRef { #[turbo_tasks::value_trait] pub trait RouteMatcher { /// Returns whether the given path is a match for the route. - fn matches(self: Vc, path: String) -> Vc; + fn matches(self: Vc, path: Arc) -> Vc; /// Returns the parameters extracted from the given path. - fn params(self: Vc, path: String) -> Vc; + fn params(self: Vc, path: Arc) -> Vc; } diff --git a/crates/turbopack-node/src/source_map/mod.rs b/crates/turbopack-node/src/source_map/mod.rs index 28484719207c1..fef96c7f425c2 100644 --- a/crates/turbopack-node/src/source_map/mod.rs +++ b/crates/turbopack-node/src/source_map/mod.rs @@ -225,7 +225,7 @@ async fn resolve_source_mapping( let Some(sm) = *generate_source_map.generate_source_map().await? else { return Ok(ResolvedSourceMapping::NoSourceMap); }; - let trace = SourceMapTrace::new(sm, line, column, name.map(|s| s.to_string())) + let trace = SourceMapTrace::new(sm, line, column, name.map(|s| s.to_string().into())) .trace() .await?; match &*trace { @@ -237,7 +237,7 @@ async fn resolve_source_mapping( PROJECT_FILESYSTEM_NAME, "]/" )) { - let fs_path = project_dir.join(project_path.to_string()); + let fs_path = project_dir.join(project_path.to_string().into()); if lib_code { return Ok(ResolvedSourceMapping::MappedLibrary { frame: frame.clone(), diff --git a/crates/turbopack-node/src/source_map/trace.rs b/crates/turbopack-node/src/source_map/trace.rs index 48e82ca7a5b50..1d4a09e562a26 100644 --- a/crates/turbopack-node/src/source_map/trace.rs +++ b/crates/turbopack-node/src/source_map/trace.rs @@ -1,4 +1,4 @@ -use std::{borrow::Cow, fmt::Display}; +use std::{borrow::Cow, fmt::Display, sync::Arc}; use anyhow::Result; use mime::APPLICATION_JSON; @@ -88,7 +88,7 @@ pub struct SourceMapTrace { map: Vc, line: usize, column: usize, - name: Option, + name: Option>, } /// The result of performing a source map trace. @@ -106,7 +106,7 @@ impl SourceMapTrace { map: Vc, line: usize, column: usize, - name: Option, + name: Option>, ) -> Vc { SourceMapTrace { map, @@ -140,7 +140,11 @@ impl SourceMapTrace { file: t.original_file.clone().into(), line: Some(t.original_line.saturating_add(1)), column: Some(t.original_column.saturating_add(1)), - name: t.name.clone().or_else(|| this.name.clone()).map(Into::into), + name: t + .name + .clone() + .or_else(|| this.name.as_deref().cloned()) + .map(Into::into), }), _ => TraceResult::NotFound, }; diff --git a/crates/turbopack-node/src/transforms/postcss.rs b/crates/turbopack-node/src/transforms/postcss.rs index 8b242611ad2e6..de9289a872df4 100644 --- a/crates/turbopack-node/src/transforms/postcss.rs +++ b/crates/turbopack-node/src/transforms/postcss.rs @@ -193,8 +193,8 @@ async fn extra_configs_changed( let parent_path = postcss_config_path.parent(); let config_paths = [ - parent_path.join("tailwind.config.js".to_string()), - parent_path.join("tailwind.config.ts".to_string()), + parent_path.join("tailwind.config.js".to_string().into()), + parent_path.join("tailwind.config.ts".to_string().into()), ]; let configs = config_paths @@ -264,7 +264,7 @@ pub(crate) async fn config_loader_source( }; Ok(Vc::upcast(VirtualSource::new( - postcss_config_path.append("_.loader.mjs".to_string()), + postcss_config_path.append("_.loader.mjs".to_string().into()), AssetContent::file(File::from(code).into()), ))) } @@ -284,8 +284,8 @@ async fn postcss_executor( Ok(asset_context.process( Vc::upcast(VirtualSource::new( - postcss_config_path.join("transform.ts".to_string()), - AssetContent::File(embed_file("transforms/postcss.ts".to_string())).cell(), + postcss_config_path.join("transform.ts".to_string().into()), + AssetContent::File(embed_file("transforms/postcss.ts".to_string().into())).cell(), )), Value::new(ReferenceType::Internal(Vc::cell(indexmap! { "CONFIG".to_string() => config_asset diff --git a/crates/turbopack-node/src/transforms/util.rs b/crates/turbopack-node/src/transforms/util.rs index 39c22db52d85a..f3604bc3fd029 100644 --- a/crates/turbopack-node/src/transforms/util.rs +++ b/crates/turbopack-node/src/transforms/util.rs @@ -35,7 +35,7 @@ pub fn emitted_assets_to_virtual_sources( .map(|(file, (content, _source_map))| { // TODO handle SourceMap VirtualSource::new( - ServerFileSystem::new().root().join(file), + ServerFileSystem::new().root().join(file.into()), AssetContent::File(FileContent::Content(File::from(content)).cell()).cell(), ) }) diff --git a/crates/turbopack-node/src/transforms/webpack.rs b/crates/turbopack-node/src/transforms/webpack.rs index 745ca87cb0cfd..efe5c235a68ef 100644 --- a/crates/turbopack-node/src/transforms/webpack.rs +++ b/crates/turbopack-node/src/transforms/webpack.rs @@ -1,4 +1,4 @@ -use std::mem::take; +use std::{mem::take, sync::Arc}; use anyhow::{bail, Context, Result}; use async_trait::async_trait; @@ -78,7 +78,7 @@ pub struct WebpackLoaders { evaluate_context: Vc>, execution_context: Vc, loaders: Vc, - rename_as: Option, + rename_as: Option>, resolve_options_context: Vc, } @@ -89,7 +89,7 @@ impl WebpackLoaders { evaluate_context: Vc>, execution_context: Vc, loaders: Vc, - rename_as: Option, + rename_as: Option>, resolve_options_context: Vc, ) -> Vc { WebpackLoaders { @@ -129,7 +129,7 @@ impl Source for WebpackLoadersProcessedAsset { async fn ident(&self) -> Result> { Ok( if let Some(rename_as) = self.transform.await?.rename_as.as_deref() { - self.source.ident().rename_as(rename_as.to_string()) + self.source.ident().rename_as(rename_as.to_string().into()) } else { self.source.ident() }, @@ -164,7 +164,7 @@ struct ProcessWebpackLoadersResult { fn webpack_loaders_executor(evaluate_context: Vc>) -> Vc { evaluate_context.process( Vc::upcast(FileSource::new(embed_file_path( - "transforms/webpack-loaders.ts".to_string(), + "transforms/webpack-loaders.ts".to_string().into(), ))), Value::new(ReferenceType::Internal(InnerAssets::empty())), ) @@ -329,13 +329,13 @@ pub enum InfoMessage { #[serde(rename_all = "camelCase")] pub struct WebpackResolveOptions { - alias_fields: Option>, - condition_names: Option>, + alias_fields: Option>>, + condition_names: Option>>, no_package_json: bool, - extensions: Option>, - main_fields: Option>, + extensions: Option>>, + main_fields: Option>>, no_exports_field: bool, - main_files: Option>, + main_files: Option>>, no_modules: bool, prefer_relative: bool, } @@ -430,13 +430,13 @@ impl EvaluateContext for WebpackLoaderContext { // TODO We might miss some changes that happened during execution // Read dependencies to make them a dependencies of this task. This task will // execute again when they change. - self.cwd.join(path).read().await?; + self.cwd.join(path.into()).read().await?; } InfoMessage::BuildDependency { path } => { // TODO We might miss some changes that happened during execution BuildDependencyIssue { context_ident: self.context_ident_for_issue, - path: self.cwd.join(path), + path: self.cwd.join(path.into()), } .cell() .emit(); @@ -445,7 +445,12 @@ impl EvaluateContext for WebpackLoaderContext { // TODO We might miss some changes that happened during execution // Read dependencies to make them a dependencies of this task. This task will // execute again when they change. - dir_dependency(self.cwd.join(path).read_glob(Glob::new(glob), false)).await?; + dir_dependency( + self.cwd + .join(path.into()) + .read_glob(Glob::new(glob.into()), false), + ) + .await?; } InfoMessage::EmittedError { error, severity } => { EvaluateEmittedErrorIssue { @@ -481,7 +486,7 @@ impl EvaluateContext for WebpackLoaderContext { let Some(resolve_options_context) = self.resolve_options_context else { bail!("Resolve options are not available in this context"); }; - let lookup_path = self.cwd.join(lookup_path); + let lookup_path = self.cwd.join(lookup_path.into()); let request = Request::parse(Value::new(Pattern::Constant(request))); let options = resolve_options(lookup_path, resolve_options_context); @@ -566,7 +571,7 @@ async fn apply_webpack_resolve_options( .extract_if(|field| matches!(field, ResolveInPackage::AliasField(..))) .collect::>(); for field in alias_fields { - if field == "..." { + if &**field == "..." { resolve_options.in_package.extend(take(&mut old)); } else { resolve_options @@ -579,10 +584,10 @@ async fn apply_webpack_resolve_options( for conditions in get_condition_maps(&mut resolve_options) { let mut old = take(conditions); for name in &condition_names { - if name == "..." { + if &***name == "..." { conditions.extend(take(&mut old)); } else { - conditions.insert(name.clone(), ConditionValue::Set); + conditions.insert((**name).clone(), ConditionValue::Set); } } } @@ -596,7 +601,7 @@ async fn apply_webpack_resolve_options( }); } if let Some(mut extensions) = webpack_resolve_options.extensions { - if let Some(pos) = extensions.iter().position(|ext| ext == "...") { + if let Some(pos) = extensions.iter().position(|ext| &***ext == "...") { extensions.splice(pos..=pos, take(&mut resolve_options.extensions)); } resolve_options.extensions = extensions; @@ -607,7 +612,7 @@ async fn apply_webpack_resolve_options( .extract_if(|field| matches!(field, ResolveIntoPackage::MainField { .. })) .collect::>(); for field in main_fields { - if field == "..." { + if &**field == "..." { resolve_options.into_package.extend(take(&mut old)); } else { resolve_options @@ -698,7 +703,7 @@ async fn dir_dependency_shallow(glob: Vc) -> Result { - dir_dependency(dir.read_glob(Glob::new("**".to_string()), false)).await?; + dir_dependency(dir.read_glob(Glob::new("**".to_string().into()), false)).await?; } DirectoryEntry::Symlink(symlink) => { symlink.read_link().await?; diff --git a/crates/turbopack-nodejs/src/chunking_context.rs b/crates/turbopack-nodejs/src/chunking_context.rs index 438c8dc7eb624..79767e3f8ea77 100644 --- a/crates/turbopack-nodejs/src/chunking_context.rs +++ b/crates/turbopack-nodejs/src/chunking_context.rs @@ -1,4 +1,4 @@ -use std::iter::once; +use std::{iter::once, sync::Arc}; use anyhow::{bail, Context, Result}; use tracing::Instrument; @@ -262,11 +262,11 @@ impl ChunkingContext for NodeJsChunkingContext { async fn chunk_path( &self, ident: Vc, - extension: String, + extension: Arc, ) -> Result> { let root_path = self.chunk_root_path; let name = ident.output_name(self.context_path, extension).await?; - Ok(root_path.join(name.clone_value())) + Ok(root_path.join(name.clone_value().into())) } #[turbo_tasks::function] @@ -277,7 +277,7 @@ impl ChunkingContext for NodeJsChunkingContext { #[turbo_tasks::function] async fn asset_path( &self, - content_hash: String, + content_hash: Arc, original_asset_ident: Vc, ) -> Result> { let source_path = original_asset_ident.path().await?; @@ -293,7 +293,7 @@ impl ChunkingContext for NodeJsChunkingContext { content_hash = &content_hash[..8] ), }; - Ok(self.asset_root_path.join(asset_path)) + Ok(self.asset_root_path.join(asset_path.into())) } #[turbo_tasks::function] diff --git a/crates/turbopack-nodejs/src/ecmascript/node/chunk.rs b/crates/turbopack-nodejs/src/ecmascript/node/chunk.rs index 034c971686157..684a18268eccc 100644 --- a/crates/turbopack-nodejs/src/ecmascript/node/chunk.rs +++ b/crates/turbopack-nodejs/src/ecmascript/node/chunk.rs @@ -69,7 +69,10 @@ impl OutputAsset for EcmascriptBuildNodeChunk { #[turbo_tasks::function] fn ident(&self) -> Vc { let ident = self.chunk.ident().with_modifier(modifier()); - AssetIdent::from_path(self.chunking_context.chunk_path(ident, ".js".to_string())) + AssetIdent::from_path( + self.chunking_context + .chunk_path(ident, ".js".to_string().into()), + ) } #[turbo_tasks::function] diff --git a/crates/turbopack-nodejs/src/ecmascript/node/entry/runtime.rs b/crates/turbopack-nodejs/src/ecmascript/node/entry/runtime.rs index 2bffe218d5986..ed71c0eb16c29 100644 --- a/crates/turbopack-nodejs/src/ecmascript/node/entry/runtime.rs +++ b/crates/turbopack-nodejs/src/ecmascript/node/entry/runtime.rs @@ -103,10 +103,13 @@ impl OutputAsset for EcmascriptBuildNodeRuntimeChunk { let ident = AssetIdent::from_path( turbopack_ecmascript_runtime::embed_fs() .root() - .join("runtime.js".to_string()), + .join("runtime.js".to_string().into()), ); - AssetIdent::from_path(self.chunking_context.chunk_path(ident, ".js".to_string())) + AssetIdent::from_path( + self.chunking_context + .chunk_path(ident, ".js".to_string().into()), + ) } #[turbo_tasks::function] diff --git a/crates/turbopack-resolve/src/node_native_binding.rs b/crates/turbopack-resolve/src/node_native_binding.rs index 52a0afaf6b2b2..bbf0636f734f1 100644 --- a/crates/turbopack-resolve/src/node_native_binding.rs +++ b/crates/turbopack-resolve/src/node_native_binding.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use indexmap::IndexMap; use lazy_static::lazy_static; @@ -109,7 +111,7 @@ pub async fn resolve_node_pre_gyp_files( let config_file_dir = config_file_path.parent(); let node_pre_gyp_config: NodePreGypConfigJson = parse_json_rope_with_source_context(config_file.content())?; - let mut sources: IndexMap>> = IndexMap::new(); + let mut sources: IndexMap, Vc>> = IndexMap::new(); for version in node_pre_gyp_config.binary.napi_versions.iter() { let native_binding_path = NAPI_VERSION_TEMPLATE.replace( node_pre_gyp_config.binary.module_path.as_str(), @@ -131,9 +133,9 @@ pub async fn resolve_node_pre_gyp_files( ); for (key, entry) in config_file_dir - .join(native_binding_path.to_string()) + .join(native_binding_path.to_string().into()) .read_glob( - Glob::new(format!("*.{}", compile_target.dylib_ext())), + Glob::new(format!("*.{}", compile_target.dylib_ext()).into()), false, ) .await? @@ -144,16 +146,16 @@ pub async fn resolve_node_pre_gyp_files( entry { sources.insert( - format!("{native_binding_path}/{key}"), + format!("{native_binding_path}/{key}").into(), Vc::upcast(FileSource::new(dylib)), ); } } - let node_file_path = format!( + let node_file_path = Arc::new(format!( "{}/{}.node", native_binding_path, node_pre_gyp_config.binary.module_name - ); + )); let resolved_file_vc = config_file_dir.join(node_file_path.clone()); sources.insert( node_file_path, @@ -163,8 +165,8 @@ pub async fn resolve_node_pre_gyp_files( for (key, entry) in config_file_dir // TODO // read the dependencies path from `bindings.gyp` - .join("deps/lib".to_string()) - .read_glob(Glob::new("*".to_string()), false) + .join("deps/lib".to_string().into()) + .read_glob(Glob::new("*".to_string().into()), false) .await? .results .iter() @@ -172,7 +174,7 @@ pub async fn resolve_node_pre_gyp_files( match *entry { DirectoryEntry::File(dylib) => { sources.insert( - format!("deps/lib/{key}"), + format!("deps/lib/{key}").into(), Vc::upcast(FileSource::new(dylib)), ); } @@ -182,7 +184,7 @@ pub async fn resolve_node_pre_gyp_files( affecting_paths.push(symlink); } sources.insert( - format!("deps/lib/{key}"), + format!("deps/lib/{key}").into(), Vc::upcast(FileSource::new(realpath_with_links.path)), ); } @@ -265,11 +267,11 @@ pub async fn resolve_node_gyp_build_files( if let Some(captured) = GYP_BUILD_TARGET_NAME.captures(&config_file.content().to_str()?) { - let mut resolved: IndexMap>> = + let mut resolved: IndexMap, Vc>> = IndexMap::with_capacity(captured.len()); for found in captured.iter().skip(1).flatten() { let name = found.as_str(); - let target_path = context_dir.join("build/Release".to_string()); + let target_path = context_dir.join("build/Release".to_string().into()); let resolved_prebuilt_file = resolve_raw( target_path, Pattern::new(Pattern::Constant(format!("{}.node", name))), @@ -280,7 +282,7 @@ pub async fn resolve_node_gyp_build_files( resolved_prebuilt_file.primary.first() { resolved.insert( - format!("build/Release/{name}.node"), + format!("build/Release/{name}.node").into(), source.resolve().await?, ); merged_affecting_sources @@ -320,13 +322,13 @@ pub async fn resolve_node_gyp_build_files( #[derive(Hash, Clone, Debug)] pub struct NodeBindingsReference { pub context_dir: Vc, - pub file_name: String, + pub file_name: Arc, } #[turbo_tasks::value_impl] impl NodeBindingsReference { #[turbo_tasks::function] - pub fn new(context_dir: Vc, file_name: String) -> Vc { + pub fn new(context_dir: Vc, file_name: Arc) -> Vc { Self::cell(NodeBindingsReference { context_dir, file_name, @@ -356,7 +358,7 @@ impl ValueToString for NodeBindingsReference { #[turbo_tasks::function] pub async fn resolve_node_bindings_files( context_dir: Vc, - file_name: String, + file_name: Arc, ) -> Result> { lazy_static! { static ref BINDINGS_TRY: [&'static str; 5] = [ @@ -392,7 +394,7 @@ pub async fn resolve_node_bindings_files( root_context_dir = parent; } - let try_path = |sub_path: String| async move { + let try_path = |sub_path: Arc| async move { let path = root_context_dir.join(sub_path.clone()); Ok( if matches!(*path.get_type().await?, FileSystemEntryType::File) { @@ -408,7 +410,7 @@ pub async fn resolve_node_bindings_files( let modules = BINDINGS_TRY .iter() - .map(|try_dir| try_path(format!("{}/{}", try_dir, &file_name))) + .map(|try_dir| try_path(format!("{}/{}", try_dir, &file_name).into())) .try_flat_join() .await?; Ok(ModuleResolveResult::modules(modules).cell()) diff --git a/crates/turbopack-resolve/src/resolve.rs b/crates/turbopack-resolve/src/resolve.rs index f4e1c49e8451e..893ccaaeeb62e 100644 --- a/crates/turbopack-resolve/src/resolve.rs +++ b/crates/turbopack-resolve/src/resolve.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::Vc; use turbo_tasks_fs::{FileSystem, FileSystemPath}; @@ -106,7 +108,8 @@ async fn base_resolve_options( for req in EDGE_NODE_EXTERNALS { direct_mappings.insert( AliasPattern::exact(req), - ImportMapping::External(Some(format!("node:{req}")), ExternalType::CommonJs).into(), + ImportMapping::External(Some(format!("node:{req}").into()), ExternalType::CommonJs) + .into(), ); direct_mappings.insert( AliasPattern::exact(format!("node:{req}")), @@ -174,26 +177,32 @@ async fn base_resolve_options( let extensions = if let Some(custom_extension) = &opt.custom_extensions { custom_extension.clone() } else if let Some(environment) = emulating { - environment.resolve_extensions().await?.clone_value() + environment + .resolve_extensions() + .await? + .iter() + .cloned() + .map(Arc::new) + .collect() } else { let mut ext = Vec::new(); if opt.enable_typescript && opt.enable_react { - ext.push(".tsx".to_string()); + ext.push(".tsx".to_string().into()); } if opt.enable_typescript { - ext.push(".ts".to_string()); + ext.push(".ts".to_string().into()); } if opt.enable_react { - ext.push(".jsx".to_string()); + ext.push(".jsx".to_string().into()); } - ext.push(".js".to_string()); + ext.push(".js".to_string().into()); if opt.enable_mjs_extension { - ext.push(".mjs".to_string()); + ext.push(".mjs".to_string().into()); } if opt.enable_node_native_modules { - ext.push(".node".to_string()); + ext.push(".node".to_string().into()); } - ext.push(".json".to_string()); + ext.push(".json".to_string().into()); ext }; Ok(ResolveOptions { @@ -202,7 +211,7 @@ async fn base_resolve_options( if *environment.resolve_node_modules().await? { vec![ResolveModules::Nested( root, - vec!["node_modules".to_string()], + vec!["node_modules".to_string().into()], )] } else { Vec::new() @@ -212,7 +221,7 @@ async fn base_resolve_options( if let Some(dir) = opt.enable_node_modules { mods.push(ResolveModules::Nested( dir, - vec!["node_modules".to_string()], + vec!["node_modules".to_string().into()], )); } mods @@ -224,16 +233,16 @@ async fn base_resolve_options( }]; if opt.browser { resolve_into.push(ResolveIntoPackage::MainField { - field: "browser".to_string(), + field: "browser".to_string().into(), }); } if opt.module { resolve_into.push(ResolveIntoPackage::MainField { - field: "module".to_string(), + field: "module".to_string().into(), }); } resolve_into.push(ResolveIntoPackage::MainField { - field: "main".to_string(), + field: "main".to_string().into(), }); resolve_into }, @@ -243,11 +252,11 @@ async fn base_resolve_options( unspecified_conditions: ConditionValue::Unset, }]; if opt.browser { - resolve_in.push(ResolveInPackage::AliasField("browser".to_string())); + resolve_in.push(ResolveInPackage::AliasField("browser".to_string().into())); } resolve_in }, - default_files: vec!["index".to_string()], + default_files: vec!["index".to_string().into()], import_map: Some(import_map), resolved_map: opt.resolved_map, plugins, diff --git a/crates/turbopack-resolve/src/resolve_options_context.rs b/crates/turbopack-resolve/src/resolve_options_context.rs index 6b38129b2bc25..b94ffcc352543 100644 --- a/crates/turbopack-resolve/src/resolve_options_context.rs +++ b/crates/turbopack-resolve/src/resolve_options_context.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::{ValueDefault, Vc}; use turbo_tasks_fs::FileSystemPath; @@ -44,9 +46,9 @@ pub struct ResolveOptionsContext { /// Enables the "module" field and export condition in package.json pub module: bool, #[serde(default)] - pub custom_conditions: Vec, + pub custom_conditions: Vec>, #[serde(default)] - pub custom_extensions: Option>, + pub custom_extensions: Option>>, #[serde(default)] /// An additional import map to use when resolving modules. /// diff --git a/crates/turbopack-resolve/src/typescript.rs b/crates/turbopack-resolve/src/typescript.rs index a781287fef8e5..579b513dab6d9 100644 --- a/crates/turbopack-resolve/src/typescript.rs +++ b/crates/turbopack-resolve/src/typescript.rs @@ -37,7 +37,7 @@ pub struct TsConfigIssue { #[turbo_tasks::function] async fn json_only(resolve_options: Vc) -> Result> { let mut opts = resolve_options.await?.clone_value(); - opts.extensions = vec![".json".to_string()]; + opts.extensions = vec![".json".to_string().into()]; Ok(opts.cell()) } @@ -113,7 +113,7 @@ async fn resolve_extends( resolve_options: Vc, ) -> Result> { let parent_dir = tsconfig.ident().path().parent(); - let request = Request::parse_string(extends.to_string()); + let request = Request::parse_string(extends.to_string().into()); // TS's resolution is weird, and has special behavior for different import // types. There might be multiple alternatives like @@ -139,7 +139,7 @@ async fn resolve_extends( // An empty extends is treated as "./tsconfig" Request::Empty => { - let request = Request::parse_string("./tsconfig".to_string()); + let request = Request::parse_string("./tsconfig".to_string().into()); Ok(resolve(parent_dir, Value::new(ReferenceType::TypeScript(TypeScriptReferenceSubType::Undefined)), request, resolve_options).first_source()) } @@ -149,7 +149,7 @@ async fn resolve_extends( _ => { let mut result = resolve(parent_dir, Value::new(ReferenceType::TypeScript(TypeScriptReferenceSubType::Undefined)), request, resolve_options).first_source(); if result.await?.is_none() { - let request = Request::parse_string(format!("{extends}/tsconfig")); + let request = Request::parse_string(format!("{extends}/tsconfig").into()); result = resolve(parent_dir, Value::new(ReferenceType::TypeScript(TypeScriptReferenceSubType::Undefined)), request, resolve_options).first_source(); } Ok(result) @@ -177,7 +177,7 @@ async fn resolve_extends_rooted_or_relative( // to try again with it. // https://github.com/microsoft/TypeScript/blob/611a912d/src/compiler/commandLineParser.ts#L3305 if !path.ends_with(".json") && result.await?.is_none() { - let request = Request::parse_string(format!("{path}.json")); + let request = Request::parse_string(format!("{path}.json").into()); result = resolve( lookup_path, Value::new(ReferenceType::TypeScript( @@ -245,7 +245,7 @@ pub async fn tsconfig_resolve_options( .ident() .path() .parent() - .try_join(base_url.to_string()) + .try_join(base_url.to_string().into()) }) }) .await? @@ -261,7 +261,9 @@ pub async fn tsconfig_resolve_options( if let JsonValue::Object(paths) = &json["compilerOptions"]["paths"] { let mut context_dir = source.ident().path().parent(); if let Some(base_url) = json["compilerOptions"]["baseUrl"].as_str() { - if let Some(new_context) = *context_dir.try_join(base_url.to_string()).await? { + if let Some(new_context) = + *context_dir.try_join(base_url.to_string().into()).await? + { context_dir = new_context; } }; @@ -279,9 +281,9 @@ pub async fn tsconfig_resolve_options( entry.map(|s| { // tsconfig paths are always relative requests if s.starts_with("./") || s.starts_with("../") { - s.to_string() + s.to_string().into() } else { - format!("./{s}") + format!("./{s}").into() } }) }) @@ -378,12 +380,12 @@ pub async fn type_resolve( } = &*request.await? { let m = if let Some(stripped) = m.strip_prefix('@') { - stripped.replace('/', "__") + stripped.replace('/', "__").into() } else { m.clone() }; Some(Request::module( - format!("@types/{m}"), + format!("@types/{m}").into(), Value::new(p.clone()), Vc::::default(), Vc::::default(), @@ -458,7 +460,11 @@ async fn apply_typescript_types_options( resolve_options: Vc, ) -> Result> { let mut resolve_options = resolve_options.await?.clone_value(); - resolve_options.extensions = vec![".tsx".to_string(), ".ts".to_string(), ".d.ts".to_string()]; + resolve_options.extensions = vec![ + ".tsx".to_string().into(), + ".ts".to_string().into(), + ".d.ts".to_string().into(), + ]; resolve_options.into_package = resolve_options .into_package .drain(..) @@ -481,7 +487,7 @@ async fn apply_typescript_types_options( resolve_options .into_package .push(ResolveIntoPackage::MainField { - field: "types".to_string(), + field: "types".to_string().into(), }); for conditions in get_condition_maps(&mut resolve_options) { conditions.insert("types".to_string(), ConditionValue::Set); diff --git a/crates/turbopack-static/src/output_asset.rs b/crates/turbopack-static/src/output_asset.rs index 62b441d94be6b..d7a99a57b9ed2 100644 --- a/crates/turbopack-static/src/output_asset.rs +++ b/crates/turbopack-static/src/output_asset.rs @@ -45,7 +45,7 @@ impl OutputAsset for StaticAsset { let content_hash_b16 = turbo_tasks_hash::encode_hex(content_hash); let asset_path = self .chunking_context - .asset_path(content_hash_b16, self.source.ident()); + .asset_path(content_hash_b16.into(), self.source.ident()); Ok(AssetIdent::from_path(asset_path)) } } diff --git a/crates/turbopack-test-utils/src/snapshot.rs b/crates/turbopack-test-utils/src/snapshot.rs index c23f1dd12153b..8e102549f5fe4 100644 --- a/crates/turbopack-test-utils/src/snapshot.rs +++ b/crates/turbopack-test-utils/src/snapshot.rs @@ -48,7 +48,7 @@ pub async fn snapshot_issues>>( }; let hash = encode_hex(plain_issue.internal_hash_ref(true)); - let path = issues_path.join(format!("{title}-{}.txt", &hash[0..6])); + let path = issues_path.join(format!("{title}-{}.txt", &hash[0..6]).into()); if !seen.insert(path) { continue; } diff --git a/crates/turbopack-tests/tests/execution.rs b/crates/turbopack-tests/tests/execution.rs index 05ce103ee09ad..5d3ff28a53e57 100644 --- a/crates/turbopack-tests/tests/execution.rs +++ b/crates/turbopack-tests/tests/execution.rs @@ -3,7 +3,7 @@ mod util; -use std::{collections::HashMap, path::PathBuf}; +use std::{collections::HashMap, path::PathBuf, sync::Arc}; use anyhow::{Context, Result}; use dunce::canonicalize; @@ -158,7 +158,7 @@ async fn run(resource: PathBuf, snapshot_mode: IssueSnapshotMode) -> Result Result> { - let resource_path = canonicalize(&resource)?; +async fn prepare_test(resource: Arc) -> Result> { + let resource_path = canonicalize(&**resource)?; assert!(resource_path.exists(), "{} does not exist", resource); assert!( resource_path.is_dir(), @@ -194,21 +194,23 @@ async fn prepare_test(resource: String) -> Result> { resource_path.to_str().unwrap() ); - let root_fs = DiskFileSystem::new("workspace".to_string(), REPO_ROOT.clone(), vec![]); - let project_fs = DiskFileSystem::new("project".to_string(), REPO_ROOT.clone(), vec![]); + let root_fs = DiskFileSystem::new("workspace".to_string().into(), REPO_ROOT.clone(), vec![]); + let project_fs = DiskFileSystem::new("project".to_string().into(), REPO_ROOT.clone(), vec![]); let project_root = project_fs.root(); - let relative_path = resource_path.strip_prefix(&*REPO_ROOT).context(format!( + let relative_path = resource_path.strip_prefix(&**REPO_ROOT).context(format!( "stripping repo root {:?} from resource path {:?}", &*REPO_ROOT, resource_path.display() ))?; - let relative_path = sys_to_unix(relative_path.to_str().unwrap()); - let path = root_fs.root().join(relative_path.to_string()); - let project_path = project_root.join(relative_path.to_string()); - let tests_path = project_fs.root().join("crates/turbopack-tests".to_string()); + let relative_path = Arc::new(sys_to_unix(relative_path.to_str().unwrap()).to_string()); + let path = root_fs.root().join(relative_path.clone()); + let project_path = project_root.join(relative_path.clone()); + let tests_path = project_fs + .root() + .join("crates/turbopack-tests".to_string().into()); - let options_file = path.join("options.json".to_string()); + let options_file = path.join("options.json".to_string().into()); let mut options = TestOptions::default(); if matches!(*options_file.get_type().await?, FileSystemEntryType::File) { @@ -238,12 +240,12 @@ async fn run_test(prepared_test: Vc) -> Result> ref options, } = *prepared_test.await?; - let jest_runtime_path = tests_path.join("js/jest-runtime.ts".to_string()); - let jest_entry_path = tests_path.join("js/jest-entry.ts".to_string()); - let test_path = project_path.join("input/index.js".to_string()); + let jest_runtime_path = tests_path.join("js/jest-runtime.ts".to_string().into()); + let jest_entry_path = tests_path.join("js/jest-entry.ts".to_string().into()); + let test_path = project_path.join("input/index.js".to_string().into()); - let chunk_root_path = path.join("output".to_string()); - let static_root_path = path.join("static".to_string()); + let chunk_root_path = path.join("output".to_string().into()); + let static_root_path = path.join("static".to_string().into()); let env = Environment::new(Value::new(ExecutionEnvironment::NodeJsBuildTime( NodeJsEnvironment::default().into(), @@ -263,7 +265,8 @@ async fn run_test(prepared_test: Vc) -> Result> let mut import_map = ImportMap::empty(); import_map.insert_wildcard_alias( "esm-external/", - ImportMapping::External(Some("*".to_string()), ExternalType::EcmaScriptModule).cell(), + ImportMapping::External(Some("*".to_string().into()), ExternalType::EcmaScriptModule) + .cell(), ); let asset_context: Vc> = Vc::upcast(ModuleAssetContext::new( @@ -288,12 +291,12 @@ async fn run_test(prepared_test: Vc) -> Result> ResolveOptionsContext { enable_typescript: true, enable_node_modules: Some(project_root), - custom_conditions: vec!["development".to_string()], + custom_conditions: vec!["development".to_string().into()], rules: vec![( ContextCondition::InDirectory("node_modules".to_string()), ResolveOptionsContext { enable_node_modules: Some(project_root), - custom_conditions: vec!["development".to_string()], + custom_conditions: vec!["development".to_string().into()], browser: true, ..Default::default() } @@ -386,7 +389,7 @@ async fn snapshot_issues( turbopack_test_utils::snapshot::snapshot_issues( plain_issues, - path.join("issues".to_string()), + path.join("issues".to_string().into()), &REPO_ROOT, ) .await diff --git a/crates/turbopack-tests/tests/snapshot.rs b/crates/turbopack-tests/tests/snapshot.rs index 3b5de21750c11..ee21b87b6cd93 100644 --- a/crates/turbopack-tests/tests/snapshot.rs +++ b/crates/turbopack-tests/tests/snapshot.rs @@ -6,6 +6,7 @@ use std::{ collections::{HashMap, HashSet, VecDeque}, fs, path::PathBuf, + sync::Arc, }; use anyhow::{bail, Context, Result}; @@ -155,7 +156,7 @@ async fn run(resource: PathBuf) -> Result<()> { let tt = TurboTasks::new(MemoryBackend::default()); let task = tt.spawn_once_task(async move { - let out = run_test(resource.to_str().unwrap().to_string()); + let out = run_test(resource.to_str().unwrap().to_string().into()); let _ = out.resolve_strongly_consistent().await?; let captured_issues = out.peek_issues_with_path().await?; @@ -165,9 +166,13 @@ async fn run(resource: PathBuf) -> Result<()> { .try_join() .await?; - snapshot_issues(plain_issues, out.join("issues".to_string()), &REPO_ROOT) - .await - .context("Unable to handle issues")?; + snapshot_issues( + plain_issues, + out.join("issues".to_string().into()), + &REPO_ROOT, + ) + .await + .context("Unable to handle issues")?; Ok(Vc::<()>::default()) }); tt.wait_task_completion(task, true).await?; @@ -176,8 +181,8 @@ async fn run(resource: PathBuf) -> Result<()> { } #[turbo_tasks::function] -async fn run_test(resource: String) -> Result> { - let test_path = canonicalize(&resource)?; +async fn run_test(resource: Arc) -> Result> { + let test_path = canonicalize(&**resource)?; assert!(test_path.exists(), "{} does not exist", resource); assert!( test_path.is_dir(), @@ -190,16 +195,16 @@ async fn run_test(resource: String) -> Result> { Err(_) => SnapshotOptions::default(), Ok(options_str) => parse_json_with_source_context(&options_str).unwrap(), }; - let root_fs = DiskFileSystem::new("workspace".to_string(), REPO_ROOT.clone(), vec![]); - let project_fs = DiskFileSystem::new("project".to_string(), REPO_ROOT.clone(), vec![]); + let root_fs = DiskFileSystem::new("workspace".to_string().into(), REPO_ROOT.clone(), vec![]); + let project_fs = DiskFileSystem::new("project".to_string().into(), REPO_ROOT.clone(), vec![]); let project_root = project_fs.root(); - let relative_path = test_path.strip_prefix(&*REPO_ROOT)?; - let relative_path = sys_to_unix(relative_path.to_str().unwrap()); - let path = root_fs.root().join(relative_path.to_string()); - let project_path = project_root.join(relative_path.to_string()); + let relative_path = test_path.strip_prefix(&**REPO_ROOT)?; + let relative_path = Arc::new(sys_to_unix(relative_path.to_str().unwrap()).to_string()); + let path = root_fs.root().join(relative_path.clone()); + let project_path = project_root.join(relative_path.clone()); - let entry_asset = project_path.join(options.entry); + let entry_asset = project_path.join(options.entry.into()); let env = Environment::new(Value::new(match options.environment { SnapshotEnvironment::Browser => { @@ -286,12 +291,12 @@ async fn run_test(resource: String) -> Result> { enable_typescript: true, enable_react: true, enable_node_modules: Some(project_root), - custom_conditions: vec!["development".to_string()], + custom_conditions: vec!["development".to_string().into()], rules: vec![( ContextCondition::InDirectory("node_modules".to_string()), ResolveOptionsContext { enable_node_modules: Some(project_root), - custom_conditions: vec!["development".to_string()], + custom_conditions: vec!["development".to_string().into()], ..Default::default() } .cell(), @@ -306,8 +311,8 @@ async fn run_test(resource: String) -> Result> { .await? .map(|asset| EvaluatableAssets::one(asset.to_evaluatable(asset_context))); - let chunk_root_path = path.join("output".to_string()); - let static_root_path = path.join("static".to_string()); + let chunk_root_path = path.join("output".to_string().into()); + let static_root_path = path.join("static".to_string().into()); let chunking_context: Vc> = match options.runtime { Runtime::Dev => Vc::upcast( @@ -380,9 +385,10 @@ async fn run_test(resource: String) -> Result> { .await? .as_deref() .unwrap() - .to_string(), + .to_string() + .into(), ) - .with_extension("entry.js".to_string()), + .with_extension("entry.js".to_string().into()), Vc::upcast(ecmascript), runtime_entries .unwrap_or_else(EvaluatableAssets::empty) @@ -466,7 +472,7 @@ async fn maybe_load_env( _context: Vc>, path: Vc, ) -> Result>>> { - let dotenv_path = path.join("input/.env".to_string()); + let dotenv_path = path.join("input/.env".to_string().into()); if !dotenv_path.read().await?.is_content() { return Ok(None); diff --git a/crates/turbopack-tests/tests/util.rs b/crates/turbopack-tests/tests/util.rs index 6723b8e7f6c93..e805d4d40d92c 100644 --- a/crates/turbopack-tests/tests/util.rs +++ b/crates/turbopack-tests/tests/util.rs @@ -1,15 +1,16 @@ -use std::path::PathBuf; +use std::{path::PathBuf, sync::Arc}; use dunce::canonicalize; use once_cell::sync::Lazy; /// The turbo repo root. Should be used as the root when building with turbopack /// against fixtures in this crate. -pub static REPO_ROOT: Lazy = Lazy::new(|| { +pub static REPO_ROOT: Lazy> = Lazy::new(|| { let package_root = PathBuf::from(env!("TURBO_PNPM_WORKSPACE_DIR")); canonicalize(package_root) .unwrap() .to_str() .unwrap() .to_string() + .into() }); diff --git a/crates/turbopack-wasm/src/loader.rs b/crates/turbopack-wasm/src/loader.rs index ac81ebda5d5f5..6c1be25e4b507 100644 --- a/crates/turbopack-wasm/src/loader.rs +++ b/crates/turbopack-wasm/src/loader.rs @@ -52,7 +52,10 @@ pub(crate) async fn instantiating_loader_source( )?; Ok(Vc::upcast(VirtualSource::new( - source.ident().path().append("_.loader.mjs".to_string()), + source + .ident() + .path() + .append("_.loader.mjs".to_string().into()), AssetContent::file(File::from(code).into()), ))) } @@ -74,7 +77,10 @@ pub(crate) async fn compiling_loader_source( }; Ok(Vc::upcast(VirtualSource::new( - source.ident().path().append("_.loader.mjs".to_string()), + source + .ident() + .path() + .append("_.loader.mjs".to_string().into()), AssetContent::file(File::from(code).into()), ))) } diff --git a/crates/turbopack-wasm/src/output_asset.rs b/crates/turbopack-wasm/src/output_asset.rs index e799d52736db4..2f7fd0705e88e 100644 --- a/crates/turbopack-wasm/src/output_asset.rs +++ b/crates/turbopack-wasm/src/output_asset.rs @@ -43,7 +43,9 @@ impl OutputAsset for WebAssemblyAsset { async fn ident(&self) -> Result> { let ident = self.source.ident().with_modifier(modifier()); - let asset_path = self.chunking_context.chunk_path(ident, ".wasm".to_string()); + let asset_path = self + .chunking_context + .chunk_path(ident, ".wasm".to_string().into()); Ok(AssetIdent::from_path(asset_path)) } diff --git a/crates/turbopack-wasm/src/source.rs b/crates/turbopack-wasm/src/source.rs index 246a57d9548c6..a944817f9f383 100644 --- a/crates/turbopack-wasm/src/source.rs +++ b/crates/turbopack-wasm/src/source.rs @@ -52,10 +52,12 @@ impl Source for WebAssemblySource { fn ident(&self) -> Vc { match self.source_ty { WebAssemblySourceType::Binary => self.source.ident(), - WebAssemblySourceType::Text => self - .source - .ident() - .with_path(self.source.ident().path().append("_.wasm".to_string())), + WebAssemblySourceType::Text => self.source.ident().with_path( + self.source + .ident() + .path() + .append("_.wasm".to_string().into()), + ), } } } diff --git a/crates/turbopack/benches/node_file_trace.rs b/crates/turbopack/benches/node_file_trace.rs index 0dd71a849bca4..64677b8e90833 100644 --- a/crates/turbopack/benches/node_file_trace.rs +++ b/crates/turbopack/benches/node_file_trace.rs @@ -72,8 +72,12 @@ fn bench_emit(b: &mut Bencher, bench_input: &BenchInput) { let input = bench_input.input.clone(); async move { let task = tt.spawn_once_task(async move { - let input_fs = DiskFileSystem::new("tests".to_string(), tests_root.clone(), vec![]); - let input = input_fs.root().join(input.clone()); + let input_fs = DiskFileSystem::new( + "tests".to_string().into(), + tests_root.clone().into(), + vec![], + ); + let input = input_fs.root().join(input.clone().into()); let input_dir = input.parent().parent(); let output_fs: Vc = NullFileSystem.into(); diff --git a/crates/turbopack/examples/turbopack.rs b/crates/turbopack/examples/turbopack.rs index f73cb1cde4162..42142a1afbfbf 100644 --- a/crates/turbopack/examples/turbopack.rs +++ b/crates/turbopack/examples/turbopack.rs @@ -31,14 +31,18 @@ async fn main() -> Result<()> { let task = tt.spawn_root_task(|| { Box::pin(async { let root = current_dir().unwrap().to_str().unwrap().to_string(); - let disk_fs = DiskFileSystem::new(PROJECT_FILESYSTEM_NAME.to_string(), root, vec![]); + let disk_fs = DiskFileSystem::new( + PROJECT_FILESYSTEM_NAME.to_string().into(), + root.into(), + vec![], + ); disk_fs.await?.start_watching()?; // Smart Pointer cast let fs: Vc> = Vc::upcast(disk_fs); - let input = fs.root().join("demo".to_string()); - let output = fs.root().join("out".to_string()); - let entry = fs.root().join("demo/index.js".to_string()); + let input = fs.root().join("demo".to_string().into()); + let output = fs.root().join("out".to_string().into()); + let entry = fs.root().join("demo/index.js".to_string().into()); let source = FileSource::new(entry); let module_asset_context = turbopack::ModuleAssetContext::new( @@ -51,7 +55,7 @@ async fn main() -> Result<()> { enable_typescript: true, enable_react: true, enable_node_modules: Some(fs.root()), - custom_conditions: vec!["development".to_string()], + custom_conditions: vec!["development".to_string().into()], ..Default::default() } .cell(), diff --git a/crates/turbopack/src/evaluate_context.rs b/crates/turbopack/src/evaluate_context.rs index 1d5d1ed39892e..50058a4f33b8a 100644 --- a/crates/turbopack/src/evaluate_context.rs +++ b/crates/turbopack/src/evaluate_context.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use anyhow::Result; use turbo_tasks::{Value, Vc}; use turbo_tasks_env::ProcessEnv; @@ -30,7 +32,7 @@ pub async fn node_evaluate_asset_context( execution_context: Vc, import_map: Option>, transitions: Option>, - layer: String, + layer: Arc, ) -> Result>> { let mut import_map = if let Some(import_map) = import_map { import_map.await?.clone_value() @@ -40,18 +42,21 @@ pub async fn node_evaluate_asset_context( import_map.insert_wildcard_alias( "@vercel/turbopack-node/", ImportMapping::PrimaryAlternative( - "./*".to_string(), + "./*".to_string().into(), Some(turbopack_node::embed_js::embed_fs().root()), ) .cell(), ); let import_map = import_map.cell(); - let node_env = - if let Some(node_env) = &*execution_context.env().read("NODE_ENV".to_string()).await? { - node_env.clone() - } else { - "development".to_string() - }; + let node_env = if let Some(node_env) = &*execution_context + .env() + .read("NODE_ENV".to_string().into()) + .await? + { + node_env.clone() + } else { + "development".to_string() + }; // base context used for node_modules (and context for app code will be derived // from this) @@ -59,7 +64,7 @@ pub async fn node_evaluate_asset_context( enable_node_modules: Some(execution_context.project_path().root().resolve().await?), enable_node_externals: true, enable_node_native_modules: true, - custom_conditions: vec![node_env.clone(), "node".to_string()], + custom_conditions: vec![node_env.clone().into(), "node".to_string().into()], ..Default::default() }; // app code context, includes a rule to switch to the node_modules context @@ -93,6 +98,6 @@ pub async fn node_evaluate_asset_context( } .cell(), resolve_options_context, - Vc::cell(layer), + Vc::cell((*layer).clone()), ))) } diff --git a/crates/turbopack/src/lib.rs b/crates/turbopack/src/lib.rs index 1cea6fdd3ce1c..098f701f561c4 100644 --- a/crates/turbopack/src/lib.rs +++ b/crates/turbopack/src/lib.rs @@ -17,6 +17,7 @@ pub(crate) mod unsupported_sass; use std::{ collections::{HashMap, HashSet}, mem::swap, + sync::Arc, }; use anyhow::{bail, Result}; @@ -716,7 +717,7 @@ impl AssetContext for ModuleAssetContext { } #[turbo_tasks::function] - async fn with_transition(&self, transition: String) -> Result>> { + async fn with_transition(&self, transition: Arc) -> Result>> { Ok( if let Some(transition) = self.transitions.await?.get(&transition) { Vc::upcast(ModuleAssetContext::new_transition( diff --git a/crates/turbopack/src/module_options/mod.rs b/crates/turbopack/src/module_options/mod.rs index 571490016cec0..19facb1129a97 100644 --- a/crates/turbopack/src/module_options/mod.rs +++ b/crates/turbopack/src/module_options/mod.rs @@ -3,6 +3,8 @@ pub mod module_options_context; pub mod module_rule; pub mod rule_condition; +use std::sync::Arc; + use anyhow::{Context, Result}; pub use custom_module_type::CustomModuleType; pub use module_options_context::*; @@ -25,7 +27,7 @@ use crate::{ #[turbo_tasks::function] async fn package_import_map_from_import_mapping( - package_name: String, + package_name: Arc, package_mapping: Vc, ) -> Result> { let mut import_map = ImportMap::default(); @@ -38,7 +40,7 @@ async fn package_import_map_from_import_mapping( #[turbo_tasks::function] async fn package_import_map_from_context( - package_name: String, + package_name: Arc, context_path: Vc, ) -> Result> { let mut import_map = ImportMap::default(); @@ -415,9 +417,12 @@ impl ModuleOptions { .context("execution_context is required for the postcss_transform")?; let import_map = if let Some(postcss_package) = options.postcss_package { - package_import_map_from_import_mapping("postcss".to_string(), postcss_package) + package_import_map_from_import_mapping( + "postcss".to_string().into(), + postcss_package, + ) } else { - package_import_map_from_context("postcss".to_string(), path) + package_import_map_from_context("postcss".to_string().into(), path) }; rules.push(ModuleRule::new( @@ -428,7 +433,7 @@ impl ModuleOptions { execution_context, Some(import_map), None, - "postcss".to_string(), + "postcss".to_string().into(), ), execution_context, options.config_location, @@ -542,23 +547,23 @@ impl ModuleOptions { webpack_loaders_options.loader_runner_package { package_import_map_from_import_mapping( - "loader-runner".to_string(), + "loader-runner".to_string().into(), loader_runner_package, ) } else { - package_import_map_from_context("loader-runner".to_string(), path) + package_import_map_from_context("loader-runner".to_string().into(), path) }; for (glob, rule) in webpack_loaders_options.rules.await?.iter() { rules.push(ModuleRule::new( ModuleRuleCondition::All(vec![ if !glob.contains('/') { ModuleRuleCondition::ResourceBasePathGlob( - Glob::new(glob.clone()).await?, + Glob::new(glob.clone().into()).await?, ) } else { ModuleRuleCondition::ResourcePathGlob { base: execution_context.project_path().await?, - glob: Glob::new(glob.clone()).await?, + glob: Glob::new(glob.clone().into()).await?, } }, ModuleRuleCondition::not(ModuleRuleCondition::ResourceIsVirtualSource), @@ -576,7 +581,7 @@ impl ModuleOptions { execution_context, Some(import_map), None, - "webpack_loaders".to_string(), + "webpack_loaders".to_string().into(), ), execution_context, rule.loaders, diff --git a/crates/turbopack/src/module_options/module_options_context.rs b/crates/turbopack/src/module_options/module_options_context.rs index f8b4e6b841dab..38d55c3720bc5 100644 --- a/crates/turbopack/src/module_options/module_options_context.rs +++ b/crates/turbopack/src/module_options/module_options_context.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use indexmap::IndexMap; use serde::{Deserialize, Serialize}; use turbo_tasks::{trace::TraceRawVcs, ValueDefault, Vc}; @@ -16,7 +18,7 @@ use super::ModuleRule; #[derive(Clone, PartialEq, Eq, Debug, TraceRawVcs, Serialize, Deserialize)] pub struct LoaderRuleItem { pub loaders: Vc, - pub rename_as: Option, + pub rename_as: Option>, } #[derive(Default)] diff --git a/crates/turbopack/src/transition/mod.rs b/crates/turbopack/src/transition/mod.rs index 1a84e66fca005..e2c88dc13a8cb 100644 --- a/crates/turbopack/src/transition/mod.rs +++ b/crates/turbopack/src/transition/mod.rs @@ -1,7 +1,7 @@ pub(crate) mod context_transition; pub(crate) mod full_context_transition; -use std::collections::HashMap; +use std::{collections::HashMap, sync::Arc}; use anyhow::Result; pub use context_transition::ContextTransition; @@ -100,7 +100,7 @@ pub trait Transition { } #[turbo_tasks::value(transparent)] -pub struct TransitionsByName(HashMap>>); +pub struct TransitionsByName(HashMap, Vc>>); #[turbo_tasks::value_impl] impl ValueDefault for TransitionsByName { diff --git a/crates/turbopack/tests/node-file-trace.rs b/crates/turbopack/tests/node-file-trace.rs index 5bda21adb9016..b6c7aa8a8d5e9 100644 --- a/crates/turbopack/tests/node-file-trace.rs +++ b/crates/turbopack/tests/node-file-trace.rs @@ -392,29 +392,32 @@ fn node_file_trace( for _ in 0..run_count { let bench_suites = bench_suites.clone(); - let package_root = package_root.clone(); + let package_root = Arc::new(package_root.clone()); let input_string = input.clone(); - let directory = directory.clone(); + let directory = Arc::new(directory.clone()); #[cfg(not(feature = "bench_against_node_nft"))] - let expected_stderr = expected_stderr.clone(); + let expected_stderr = expected_stderr.clone().map(Arc::new); let task = async move { #[allow(unused)] let bench_suites = bench_suites.clone(); #[cfg(feature = "bench_against_node_nft")] let before_start = Instant::now(); let workspace_fs: Vc> = Vc::upcast(DiskFileSystem::new( - "workspace".to_string(), - package_root.clone(), + "workspace".to_string().into(), + package_root.clone().into(), vec![], )); let input_dir = workspace_fs.root(); - let input = input_dir.join(format!("tests/{input_string}")); + let input = input_dir.join(format!("tests/{input_string}").into()); #[cfg(not(feature = "bench_against_node_nft"))] let original_output = exec_node(package_root, input); - let output_fs = - DiskFileSystem::new("output".to_string(), directory.clone(), vec![]); + let output_fs = DiskFileSystem::new( + "output".to_string().into(), + directory.clone().into(), + vec![], + ); let output_dir = output_fs.root(); let source = FileSource::new(input); @@ -435,7 +438,7 @@ fn node_file_trace( ResolveOptionsContext { enable_node_native_modules: true, enable_node_modules: Some(input_dir), - custom_conditions: vec!["node".to_string()], + custom_conditions: vec!["node".to_string().into()], ..Default::default() } .cell(), @@ -574,11 +577,11 @@ impl Display for CommandOutput { } #[turbo_tasks::function] -async fn exec_node(directory: String, path: Vc) -> Result> { +async fn exec_node(directory: Arc, path: Vc) -> Result> { let mut cmd = Command::new("node"); let p = path.await?; - let f = Path::new(&directory).join(&p.path); + let f = Path::new(&*directory).join(&*p.path); let dir = f.parent().unwrap(); println!("[CWD]: {}", dir.display()); let label = path.to_string().await?; @@ -673,15 +676,15 @@ fn diff(expected: &str, actual: &str) -> String { async fn assert_output( expected: Vc, actual: Vc, - expected_stderr: Option, + expected_stderr: Option>, ) -> Result> { let expected = expected.await?; let actual = actual.await?; Ok(CommandOutput::cell(CommandOutput { stdout: diff(&expected.stdout, &actual.stdout), stderr: if let Some(expected_stderr) = expected_stderr { - if actual.stderr.contains(&expected_stderr) - && expected.stderr.contains(&expected_stderr) + if actual.stderr.contains(&*expected_stderr) + && expected.stderr.contains(&*expected_stderr) { String::new() } else {