Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Better dotenv error messages #4464

Merged
merged 6 commits into from
Apr 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 28 additions & 18 deletions crates/turbo-tasks-env/src/dotenv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,21 +22,22 @@ impl DotenvProcessEnvVc {
pub fn new(prior: Option<ProcessEnvVc>, path: FileSystemPathVc) -> Self {
DotenvProcessEnv { prior, path }.cell()
}
}

#[turbo_tasks::value_impl]
impl ProcessEnv for DotenvProcessEnv {
#[turbo_tasks::function]
async fn read_all(&self) -> Result<EnvMapVc> {
let prior = if let Some(p) = self.prior {
Some(p.read_all().await?)
} else {
None
};
let empty = IndexMap::new();
let prior = prior.as_deref().unwrap_or(&empty);
pub async fn read_prior(self) -> Result<EnvMapVc> {
let this = self.await?;
match this.prior {
None => Ok(EnvMapVc::empty()),
Some(p) => Ok(p.read_all()),
}
}

#[turbo_tasks::function]
pub async fn read_all_with_prior(self, prior: EnvMapVc) -> Result<EnvMapVc> {
let this = self.await?;
let prior = prior.await?;

let file = self.path.read().await?;
let file = this.path.read().await?;
if let FileContent::Content(f) = &*file {
let res;
let vars;
Expand All @@ -48,7 +49,7 @@ impl ProcessEnv for DotenvProcessEnv {
// state.
let initial = env::vars().collect();

restore_env(&initial, prior, &lock);
restore_env(&initial, &prior, &lock);

// from_read will load parse and evalute the Read, and set variables
// into the global env. If a later dotenv defines an already defined
Expand All @@ -59,20 +60,29 @@ impl ProcessEnv for DotenvProcessEnv {
restore_env(&vars, &initial, &lock);
}

if res.is_err() {
res.context(anyhow!(
if let Err(e) = res {
return Err(e).context(anyhow!(
"unable to read {} for env vars",
self.path.to_string().await?
))?;
this.path.to_string().await?
));
}

Ok(EnvMapVc::cell(vars))
} else {
Ok(EnvMapVc::cell(prior.clone()))
Ok(EnvMapVc::cell(prior.clone_value()))
}
}
}

#[turbo_tasks::value_impl]
impl ProcessEnv for DotenvProcessEnv {
#[turbo_tasks::function]
async fn read_all(self_vc: DotenvProcessEnvVc) -> Result<EnvMapVc> {
let prior = self_vc.read_prior();
Ok(self_vc.read_all_with_prior(prior))
}
}

/// Restores the global env variables to mirror `to`.
fn restore_env(
from: &IndexMap<String, String>,
Expand Down
7 changes: 7 additions & 0 deletions crates/turbo-tasks/src/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,13 @@ impl<'de> Deserialize<'de> for SharedError {
}
}

impl Deref for SharedError {
type Target = Arc<Error>;
fn deref(&self) -> &Self::Target {
&self.inner
}
}

pub struct FormatDuration(pub Duration);

impl Display for FormatDuration {
Expand Down
2 changes: 1 addition & 1 deletion crates/turbopack-cli-utils/src/issue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -369,7 +369,7 @@ impl IssueReporter for ConsoleUi {
.iter_with_shortest_path()
.map(|(issue, path)| async move {
let plain_issue = issue.into_plain(path);
let id = plain_issue.internal_hash().await?;
let id = plain_issue.internal_hash(false).await?;
Ok((plain_issue.await?, *id))
})
.try_join()
Expand Down
77 changes: 51 additions & 26 deletions crates/turbopack-core/src/issue/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -503,42 +503,67 @@ pub struct PlainIssue {
pub processing_path: PlainIssueProcessingPathReadRef,
}

fn hash_plain_issue(issue: &PlainIssue, hasher: &mut Xxh3Hash64Hasher, full: bool) {
hasher.write_ref(&issue.severity);
hasher.write_ref(&issue.context);
hasher.write_ref(&issue.category);
hasher.write_ref(&issue.title);
hasher.write_ref(
// Normalize syspaths from Windows. These appear in stack traces.
&issue.description.replace('\\', "/"),
);
hasher.write_ref(&issue.detail);
hasher.write_ref(&issue.documentation_link);

if let Some(source) = &issue.source {
hasher.write_value(1_u8);
// I'm assuming we don't need to hash the contents. Not 100% correct, but
// probably 99%.
hasher.write_ref(&source.start);
hasher.write_ref(&source.end);
} else {
hasher.write_value(0_u8);
}

if full {
hasher.write_value(issue.sub_issues.len());
for i in &issue.sub_issues {
hash_plain_issue(i, hasher, full);
}

hasher.write_ref(&issue.processing_path);
}
}

impl PlainIssue {
/// We need deduplicate issues that can come from unique paths, but
/// represent the same underlying problem. Eg, a parse error for a file
/// that is compiled in both client and server contexts.
pub fn internal_hash(&self) -> u64 {
///
/// Passing [full] will also hash any sub-issues and processing paths. While
/// useful for generating exact matching hashes, it's possible for the
/// same issue to pass from multiple processing paths, making for overly
/// verbose logging.
pub fn internal_hash(&self, full: bool) -> u64 {
let mut hasher = Xxh3Hash64Hasher::new();
hasher.write_ref(&self.severity);
hasher.write_ref(&self.context);
hasher.write_ref(&self.category);
hasher.write_ref(&self.title);
hasher.write_ref(
// Normalize syspaths from Windows. These appear in stack traces.
&self.description.replace('\\', "/"),
);
hasher.write_ref(&self.detail);
hasher.write_ref(&self.documentation_link);

if let Some(source) = &self.source {
hasher.write_value(1_u8);
// I'm assuming we don't need to hash the contents. Not 100% correct, but
// probably 99%.
hasher.write_ref(&source.start);
hasher.write_ref(&source.end);
} else {
hasher.write_value(0_u8);
}

hash_plain_issue(self, &mut hasher, full);
hasher.finish()
}
}

#[turbo_tasks::value_impl]
impl PlainIssueVc {
/// We need deduplicate issues that can come from unique paths, but
/// represent the same underlying problem. Eg, a parse error for a file
/// that is compiled in both client and server contexts.
///
/// Passing [full] will also hash any sub-issues and processing paths. While
/// useful for generating exact matching hashes, it's possible for the
/// same issue to pass from multiple processing paths, making for overly
/// verbose logging.
#[turbo_tasks::function]
pub async fn internal_hash(self) -> Result<U64Vc> {
Ok(U64Vc::cell(self.await?.internal_hash()))
pub async fn internal_hash(self, full: bool) -> Result<U64Vc> {
Ok(U64Vc::cell(self.await?.internal_hash(full)))
}
}

Expand Down Expand Up @@ -631,11 +656,11 @@ impl PlainAssetVc {
}

#[turbo_tasks::value(transparent, serialization = "none")]
#[derive(Clone, Debug)]
#[derive(Clone, Debug, DeterministicHash)]
pub struct PlainIssueProcessingPath(Option<Vec<PlainIssueProcessingPathItemReadRef>>);

#[turbo_tasks::value(serialization = "none")]
#[derive(Clone, Debug)]
#[derive(Clone, Debug, DeterministicHash)]
pub struct PlainIssueProcessingPathItem {
pub context: Option<StringReadRef>,
pub description: StringReadRef,
Expand Down
71 changes: 35 additions & 36 deletions crates/turbopack-env/src/try_env.rs
Original file line number Diff line number Diff line change
@@ -1,62 +1,61 @@
use std::future::IntoFuture;

use anyhow::Result;
use turbo_tasks::primitives::{OptionStringVc, StringVc};
use turbo_tasks::primitives::StringVc;
use turbo_tasks_env::{DotenvProcessEnvVc, EnvMapVc, ProcessEnv, ProcessEnvVc};
use turbo_tasks_fs::FileSystemPathVc;

use crate::ProcessEnvIssue;

#[turbo_tasks::value]
pub struct TryDotenvProcessEnv {
dotenv: DotenvProcessEnvVc,
prior: ProcessEnvVc,
path: FileSystemPathVc,
}

impl TryDotenvProcessEnv {
async fn with_issue<T, V: Copy + IntoFuture<Output = Result<T>>>(
&self,
op: impl Fn(ProcessEnvVc) -> V,
) -> Result<V> {
let r = op(DotenvProcessEnvVc::new(Some(self.prior), self.path).as_process_env());
match r.await {
Ok(_) => Ok(r),
Err(e) => {
let r = op(self.prior);
// If the prior process env also reports an error we don't want to report our
// issue
r.await?;

ProcessEnvIssue {
path: self.path,
description: StringVc::cell(e.to_string()),
}
.cell()
.as_issue()
.emit();
Ok(r)
}
}
}
}

#[turbo_tasks::value_impl]
impl TryDotenvProcessEnvVc {
#[turbo_tasks::function]
pub fn new(prior: ProcessEnvVc, path: FileSystemPathVc) -> Self {
TryDotenvProcessEnv { prior, path }.cell()
let dotenv = DotenvProcessEnvVc::new(Some(prior), path);
TryDotenvProcessEnv {
dotenv,
prior,
path,
}
.cell()
}
}

#[turbo_tasks::value_impl]
impl ProcessEnv for TryDotenvProcessEnv {
#[turbo_tasks::function]
async fn read_all(&self) -> Result<EnvMapVc> {
self.with_issue(|e| e.read_all()).await
}
let dotenv = self.dotenv;
let prior = dotenv.read_prior();

#[turbo_tasks::function]
async fn read(&self, name: &str) -> Result<OptionStringVc> {
self.with_issue(|e| e.read(name)).await
// Ensure prior succeeds. If it doesn't, then we don't want to attempt to read
// the dotenv file (and potentially emit an Issue), just trust that the prior
// will have emitted its own.
prior.await?;

let vars = dotenv.read_all_with_prior(prior);
match vars.await {
Ok(_) => Ok(vars),
Err(e) => {
// If parsing the dotenv file fails (but getting the prior value didn't), then
// we want to emit an Issue and fall back to the prior's read.
ProcessEnvIssue {
path: self.path,
// read_all_with_prior will wrap a current error with a context containing the
// failing file, which we don't really care about (we report the filepath as the
// Issue context, not the description). So extract the real error.
description: StringVc::cell(e.root_cause().to_string()),
}
.cell()
.as_issue()
.emit();
Ok(prior)
}
}
}
}
6 changes: 4 additions & 2 deletions crates/turbopack-test-utils/src/snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ pub async fn snapshot_issues<
let expected_issues = expected(issues_path).await?;
let mut seen = HashSet::new();
for (plain_issue, debug_string) in captured_issues.into_iter() {
let hash = encode_hex(plain_issue.internal_hash());
let hash = encode_hex(plain_issue.internal_hash(true));

let path = issues_path.join(&format!(
"{}-{}.txt",
Expand All @@ -44,7 +44,9 @@ pub async fn snapshot_issues<
.replace('?', "__q__"),
&hash[0..6]
));
seen.insert(path);
if !seen.insert(path) {
continue;
}

// Annoyingly, the PlainIssue.source -> PlainIssueSource.asset ->
// PlainAsset.path -> FileSystemPath.fs -> DiskFileSystem.root changes
Expand Down