Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace turbopack://[project]/... sourcemap uris with file://... in development #71489

Merged
merged 2 commits into from
Oct 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -216,3 +216,6 @@ unsize = "1.1.0"
url = "2.2.2"
urlencoding = "2.1.2"
webbrowser = "0.8.7"

[patch.crates-io]
sourcemap = { git = "https://github.com/wbinnssmith/rust-sourcemap", branch = "wbinnssmith/ignore-list" }
31 changes: 22 additions & 9 deletions crates/napi/src/next_api/project.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,15 @@
TRACING_NEXT_OVERVIEW_TARGETS, TRACING_NEXT_TARGETS, TRACING_NEXT_TURBOPACK_TARGETS,
TRACING_NEXT_TURBO_TASKS_TARGETS,
};
use once_cell::sync::Lazy;
use rand::Rng;
use tokio::{io::AsyncWriteExt, time::Instant};
use tracing::Instrument;
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Registry};
use turbo_tasks::{Completion, RcStr, ReadRef, TransientInstance, UpdateInfo, Vc};
use turbo_tasks_fs::{DiskFileSystem, FileContent, FileSystem, FileSystemPath};
use turbo_tasks_fs::{
util::uri_from_file, DiskFileSystem, FileContent, FileSystem, FileSystemPath,
};
use turbopack_core::{
diagnostics::PlainDiagnostic,
error::PrettyPrintError,
Expand Down Expand Up @@ -52,6 +55,8 @@
/// Used by [`benchmark_file_io`]. This is a noisy benchmark, so set the
/// threshold high.
const SLOW_FILESYSTEM_THRESHOLD: Duration = Duration::from_millis(100);
static SOURCE_MAP_PREFIX_PROJECT: Lazy<String> =
Lazy::new(|| format!("{}[project]/", SOURCE_MAP_PREFIX));

#[napi(object)]
#[derive(Clone, Debug)]
Expand Down Expand Up @@ -929,7 +934,7 @@
}
}

/// Subscribes to lifecycle events of the compilation.

Check warning on line 937 in crates/napi/src/next_api/project.rs

View workflow job for this annotation

GitHub Actions / rustdoc check / build

public documentation for `project_update_info_subscribe` links to private item `UpdateMessage::Start`

Check warning on line 937 in crates/napi/src/next_api/project.rs

View workflow job for this annotation

GitHub Actions / rustdoc check / build

public documentation for `project_update_info_subscribe` links to private item `UpdateMessage::End`

Check warning on line 937 in crates/napi/src/next_api/project.rs

View workflow job for this annotation

GitHub Actions / rustdoc check / build

public documentation for `project_update_info_subscribe` links to private item `UpdateMessage::End`

Check warning on line 937 in crates/napi/src/next_api/project.rs

View workflow job for this annotation

GitHub Actions / rustdoc check / build

public documentation for `project_update_info_subscribe` links to private item `UpdateMessage::Start`
///
/// Emits an [UpdateMessage::Start] event when any computation starts.
/// Emits an [UpdateMessage::End] event when there was no computation for the
Expand Down Expand Up @@ -1085,7 +1090,7 @@

let (original_file, line, column, name) = match &*token {
Token::Original(token) => (
&token.original_file,
urlencoding::decode(&token.original_file)?.into_owned(),
// JS stack frames are 1-indexed, source map tokens are 0-indexed
Some(token.original_line as u32 + 1),
Some(token.original_column as u32 + 1),
Expand All @@ -1095,19 +1100,27 @@
let Some(file) = &token.guessed_original_file else {
return Ok(None);
};
(file, None, None, None)
(file.to_owned(), None, None, None)
}
};

let Some(source_file) = original_file.strip_prefix(SOURCE_MAP_PREFIX) else {
bail!("Original file ({}) outside project", original_file)
};

let project_path_uri =
uri_from_file(project.container.project().project_path(), None).await? + "/";
let (source_file, is_internal) =
if let Some(source_file) = source_file.strip_prefix("[project]/") {
if let Some(source_file) = original_file.strip_prefix(&project_path_uri) {
// Client code uses file://
(source_file, false)
} else {
} else if let Some(source_file) =
original_file.strip_prefix(&*SOURCE_MAP_PREFIX_PROJECT)
{
// Server code uses turbopack://[project]
// TODO should this also be file://?
(source_file, false)
} else if let Some(source_file) = original_file.strip_prefix(SOURCE_MAP_PREFIX) {
// All other code like turbopack://[turbopack] is internal code
(source_file, true)
} else {
bail!("Original file ({}) outside project", original_file)
};

Ok(Some(StackFrame {
Expand Down
2 changes: 1 addition & 1 deletion crates/next-core/src/next_client/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -375,7 +375,7 @@ pub async fn get_client_chunking_context(
.module_id_strategy(module_id_strategy);

if next_mode.is_development() {
builder = builder.hot_module_replacement();
builder = builder.hot_module_replacement().use_file_source_map_uris();
}

Ok(Vc::upcast(builder.build()))
Expand Down
1 change: 1 addition & 0 deletions turbopack/crates/turbo-tasks-fs/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ tracing = { workspace = true }
turbo-tasks = { workspace = true }
turbo-tasks-hash = { workspace = true }
unicode-segmentation = { workspace = true }
urlencoding = { workspace = true }

[dev-dependencies]
criterion = { workspace = true, features = ["async_tokio"] }
Expand Down
30 changes: 29 additions & 1 deletion turbopack/crates/turbo-tasks-fs/src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@ use std::{
path::Path,
};

use anyhow::{anyhow, Result};
use anyhow::{anyhow, Context, Result};
use turbo_tasks::Vc;

use crate::{DiskFileSystem, FileSystemPath};

/// Joins two /-separated paths into a normalized path.
/// Paths are concatenated with /.
Expand Down Expand Up @@ -134,3 +137,28 @@ pub fn extract_disk_access<T>(value: io::Result<T>, path: &Path) -> Result<Optio
Err(e) => Err(anyhow!(e).context(format!("reading file {}", path.display()))),
}
}

pub async fn uri_from_file(root: Vc<FileSystemPath>, path: Option<&str>) -> Result<String> {
let root_fs = root.fs();
let root_fs = &*Vc::try_resolve_downcast_type::<DiskFileSystem>(root_fs)
.await?
.context("Expected root to have a DiskFileSystem")?
.await?;

Ok(format!(
"file://{}",
&sys_to_unix(
&root_fs
.to_sys_path(match path {
Some(path) => root.join(path.into()),
None => root,
})
.await?
.to_string_lossy()
)
.split('/')
.map(|s| urlencoding::encode(s))
.collect::<Vec<_>>()
.join("/")
))
}
13 changes: 13 additions & 0 deletions turbopack/crates/turbopack-browser/src/chunking_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,11 @@ impl BrowserChunkingContextBuilder {
self
}

pub fn use_file_source_map_uris(mut self) -> Self {
self.chunking_context.should_use_file_source_map_uris = true;
self
}

pub fn asset_base_path(mut self, asset_base_path: Vc<Option<RcStr>>) -> Self {
self.chunking_context.asset_base_path = asset_base_path;
self
Expand Down Expand Up @@ -101,6 +106,8 @@ pub struct BrowserChunkingContext {
/// This path get stripped off of chunk paths before generating output asset
/// paths.
context_path: Vc<FileSystemPath>,
/// Whether to write file sources as file:// paths in source maps
should_use_file_source_map_uris: bool,
/// This path is used to compute the url to request chunks from
output_root: Vc<FileSystemPath>,
/// This path is used to compute the url to request assets from
Expand Down Expand Up @@ -150,6 +157,7 @@ impl BrowserChunkingContext {
output_root,
client_root,
chunk_root_path,
should_use_file_source_map_uris: false,
reference_chunk_source_maps: true,
reference_css_chunk_source_maps: true,
asset_root_path,
Expand Down Expand Up @@ -350,6 +358,11 @@ impl ChunkingContext for BrowserChunkingContext {
Vc::cell(self.enable_hot_module_replacement)
}

#[turbo_tasks::function]
fn should_use_file_source_map_uris(&self) -> Vc<bool> {
Vc::cell(self.should_use_file_source_map_uris)
}

#[turbo_tasks::function]
async fn chunk_group(
self: Vc<Self>,
Expand Down
1 change: 1 addition & 0 deletions turbopack/crates/turbopack-cli/src/dev/web_entry_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ pub fn get_client_chunking_context(
RuntimeType::Development,
)
.hot_module_replacement()
.use_file_source_map_uris()
.build(),
)
}
Expand Down
2 changes: 2 additions & 0 deletions turbopack/crates/turbopack-core/src/chunk/chunking_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@ pub struct EntryChunkGroupResult {
#[turbo_tasks::value_trait]
pub trait ChunkingContext {
fn name(self: Vc<Self>) -> Vc<RcStr>;
fn should_use_file_source_map_uris(self: Vc<Self>) -> Vc<bool>;
// Often the project root
fn context_path(self: Vc<Self>) -> Vc<FileSystemPath>;
fn output_root(self: Vc<Self>) -> Vc<FileSystemPath>;

Expand Down
82 changes: 79 additions & 3 deletions turbopack/crates/turbopack-core/src/code_builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,20 @@ use std::{
ops,
};

use anyhow::Result;
use anyhow::{Context, Result};
use indexmap::{IndexMap, IndexSet};
use turbo_tasks::Vc;
use turbo_tasks_fs::rope::{Rope, RopeBuilder};
use turbo_tasks_fs::{
rope::{Rope, RopeBuilder},
util::uri_from_file,
DiskFileSystem, FileSystemPath,
};
use turbo_tasks_hash::hash_xxh3_hash64;

use crate::{
source_map::{GenerateSourceMap, OptionSourceMap, SourceMap, SourceMapSection},
source_pos::SourcePos,
SOURCE_MAP_PREFIX,
};

/// A mapping of byte-offset in the code string to an associated source map.
Expand Down Expand Up @@ -176,7 +182,31 @@ impl GenerateSourceMap for Code {
None => SourceMap::empty(),
Some(map) => match *map.generate_source_map().await? {
None => SourceMap::empty(),
Some(map) => map,
Some(map) => {
let map = &*map.await?;
let map = map.to_source_map().await?;
match map.as_regular_source_map() {
None => SourceMap::empty(),
Some(map) => {
let mut map = map.into_owned();
let mut ignored_ids = IndexSet::new();
for (src_id, src) in map.sources().enumerate() {
if src.starts_with("turbopack://[next]")
|| src.starts_with("turbopack://[turbopack]")
|| src.contains("/node_modules/")
{
ignored_ids.insert(src_id);
}
}

for ignored_id in ignored_ids {
map.add_to_ignore_list(ignored_id as _);
}

SourceMap::new_decoded(sourcemap::DecodedMap::Regular(map)).cell()
}
}
}
},
};

Expand All @@ -197,3 +227,49 @@ impl Code {
Vc::cell(hash)
}
}

/// Turns `turbopack://[project]`` references in sourcemap sources into absolute
/// `file://` uris. This is useful for debugging environments.
#[turbo_tasks::function]
pub async fn fileify_source_map(
map: Vc<OptionSourceMap>,
context_path: Vc<FileSystemPath>,
) -> Result<Vc<OptionSourceMap>> {
let Some(map) = &*map.await? else {
return Ok(OptionSourceMap::none());
};

let flattened = map.await?.to_source_map().await?;
let flattened = flattened.as_regular_source_map();

let Some(flattened) = flattened else {
return Ok(OptionSourceMap::none());
};

let context_fs = context_path.fs();
let context_fs = &*Vc::try_resolve_downcast_type::<DiskFileSystem>(context_fs)
.await?
.context("Expected the chunking context to have a DiskFileSystem")?
.await?;
let prefix = format!("{}[{}]/", SOURCE_MAP_PREFIX, context_fs.name);

let mut transformed = flattened.into_owned();
let mut updates = IndexMap::new();
for (src_id, src) in transformed.sources().enumerate() {
let src = {
match src.strip_prefix(&prefix) {
Some(src) => uri_from_file(context_path, Some(src)).await?,
None => src.to_string(),
}
};
updates.insert(src_id, src);
}

for (src_id, src) in updates {
transformed.set_source(src_id as _, &src);
}

Ok(Vc::cell(Some(
SourceMap::new_decoded(sourcemap::DecodedMap::Regular(transformed)).cell(),
)))
}
29 changes: 21 additions & 8 deletions turbopack/crates/turbopack-core/src/source_map/mod.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use std::{borrow::Cow, io::Write, ops::Deref, sync::Arc};

use anyhow::Result;
use indexmap::IndexSet;
use once_cell::sync::Lazy;
use ref_cast::RefCast;
use regex::Regex;
Expand Down Expand Up @@ -406,18 +407,30 @@ impl SourceMap {
.collect::<Vec<_>>();
let mut new_sources = Vec::with_capacity(count);
let mut new_source_contents = Vec::with_capacity(count);
for (source, source_content) in sources.into_iter().zip(source_contents.into_iter()) {
let mut ignored_sources = IndexSet::new();
for (src_id, (source, source_content)) in sources
.into_iter()
.zip(source_contents.into_iter())
.enumerate()
{
let (source, name) = resolve_source(source, source_content, origin).await?;
if source.starts_with("turbopack://[next]")
|| source.starts_with("turbopack://[turbopack]")
|| source.contains("/node_modules/")
{
ignored_sources.insert(src_id);
}
new_sources.push(source);
new_source_contents.push(Some(name));
}
Ok(RegularMap::new(
file,
tokens,
names,
new_sources,
Some(new_source_contents),
))
let mut map =
RegularMap::new(file, tokens, names, new_sources, Some(new_source_contents));

for ignored_source in ignored_sources {
map.add_to_ignore_list(ignored_source as _);
}

Ok(map)
}
async fn decoded_map_with_resolved_sources(
map: &CrateMapWrapper,
Expand Down
21 changes: 19 additions & 2 deletions turbopack/crates/turbopack-css/src/chunk/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use turbopack_core::{
round_chunk_item_size, AsyncModuleInfo, Chunk, ChunkItem, ChunkItemWithAsyncModuleInfo,
ChunkType, ChunkableModule, ChunkingContext, ModuleId, OutputChunk, OutputChunkRuntimeInfo,
},
code_builder::{Code, CodeBuilder},
code_builder::{fileify_source_map, Code, CodeBuilder},
ident::AssetIdent,
introspect::{
module::IntrospectableModule,
Expand Down Expand Up @@ -78,7 +78,24 @@ impl CssChunk {
writeln!(body, "/* {} */", id)?;
let close = write_import_context(&mut body, content.import_context).await?;

body.push_source(&content.inner_code, content.source_map.map(Vc::upcast));
let source_map = if *self
.chunking_context()
.should_use_file_source_map_uris()
.await?
{
let source_map = content.source_map.map(|m| m.generate_source_map());
match source_map {
Some(map) => {
(*(fileify_source_map(map, self.chunking_context().context_path()).await?))
.map(Vc::upcast)
}
None => None,
}
} else {
content.source_map.map(Vc::upcast)
};

body.push_source(&content.inner_code, source_map);

writeln!(body, "{close}")?;
writeln!(body)?;
Expand Down
Loading
Loading