Skip to content

Commit

Permalink
More fine-grained logging
Browse files Browse the repository at this point in the history
  • Loading branch information
0xmichalis committed Jan 29, 2025
1 parent 15a5368 commit 901ac01
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 23 deletions.
10 changes: 5 additions & 5 deletions src/chain/evm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use std::time::Duration;
use std::{future::Future, path::Path};
use tokio::fs;
use tokio::time::sleep;
use tracing::{error, info, warn};
use tracing::{debug, error, warn};

use crate::content::{
extensions::fetch_and_save_additional_content, fetch_and_save_content, Options,
Expand Down Expand Up @@ -212,7 +212,7 @@ pub async fn process_nfts(
.collect::<Vec<_>>();

for contract in contracts {
info!("Processing contract {} on {}", contract.address, chain_name);
debug!("Processing contract {} on {}", contract.address, chain_name);
let contract_addr = match contract.address.parse::<Address>() {
Ok(addr) => addr,
Err(e) => {
Expand Down Expand Up @@ -240,7 +240,7 @@ pub async fn process_nfts(
};

// Save metadata
info!("Fetching metadata from {}", token_uri);
debug!("Fetching metadata from {}", token_uri);
let contract_address = format!("{:#x}", contract_addr);
let token_id_str = token_id.to_string();
let metadata_content = fetch_and_save_content(
Expand All @@ -263,7 +263,7 @@ pub async fn process_nfts(
if let Some(image_url) = &metadata.image {
let (image_url, extension) =
get_uri_and_extension_from_metadata(&metadata, image_url, true, false);
info!("Downloading image from {}", image_url);
debug!("Downloading image from {}", image_url);
fetch_and_save_content(
&image_url,
chain_name,
Expand All @@ -282,7 +282,7 @@ pub async fn process_nfts(
if let Some(animation_url) = &metadata.animation_url {
let (animation_url, extension) =
get_uri_and_extension_from_metadata(&metadata, animation_url, false, true);
info!("Downloading animation from {}", animation_url);
debug!("Downloading animation from {}", animation_url);
fetch_and_save_content(
&animation_url,
chain_name,
Expand Down
10 changes: 5 additions & 5 deletions src/chain/tezos.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use tezos_michelson::michelson::data;
use tezos_rpc::client::TezosRpc;
use tezos_rpc::http::default::HttpClient;
use tokio::fs;
use tracing::info;
use tracing::debug;

use crate::content::{
extensions::fetch_and_save_additional_content, fetch_and_save_content, Options,
Expand Down Expand Up @@ -113,10 +113,10 @@ pub async fn process_nfts(
.collect::<Vec<_>>();

for contract in contracts {
info!("Processing contract {}", contract.address);
debug!("Processing contract {}", contract.address);

if let Some(uri) = get_uri(&rpc, &contract).await? {
info!("Fetching metadata from {}", uri);
debug!("Fetching metadata from {}", uri);

let metadata_content = fetch_and_save_content(
&uri,
Expand Down Expand Up @@ -178,11 +178,11 @@ pub async fn process_nfts(
// Only download if we haven't seen this URL before
let inserted = downloaded.insert(url.clone());
if !inserted {
info!("Skipping duplicate content from {}", url);
debug!("Skipping duplicate {} from {}", file_name, url);
continue;
}

info!("Downloading content from {}", url);
debug!("Downloading {} from {}", file_name, url);
fetch_and_save_content(
&url,
"tezos",
Expand Down
8 changes: 4 additions & 4 deletions src/content/extensions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@ use flate2::read::GzDecoder;
use std::io::Read;
use std::path::Path;
use tokio::fs;
use tracing::info;
use tracing::{debug, info};

async fn extend_croquet_challenge_content(
output_path: &Path,
contract: &str,
token_id: &str,
) -> Result<()> {
info!(
debug!(
"Fetching additional content for Ethereum contract {} token {}",
contract, token_id
);
Expand Down Expand Up @@ -39,11 +39,11 @@ async fn extend_croquet_challenge_content(

// Skip if file already exists
if fs::try_exists(&file_path).await? {
info!("File already exists at {}", file_path.display());
debug!("File already exists at {}", file_path.display());
continue;
}

info!("Downloading {} as {}", url, target_file);
info!("Saving {} as {}", url, target_file);
let response = client.get(&url).send().await?;
let content = response.bytes().await?;

Expand Down
9 changes: 5 additions & 4 deletions src/content/html.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use anyhow::Result;
use scraper::{Html, Selector};
use std::path::Path;
use tokio::fs;
use tracing::{info, warn};
use tracing::{debug, info, warn};
use url::Url;

pub async fn download_html_resources(
Expand Down Expand Up @@ -50,21 +50,22 @@ pub async fn download_html_resources(

// Skip if file already exists
if fs::try_exists(&resource_path).await? {
info!("Resource already exists at {}", resource_path.display());
debug!("Resource already exists at {}", resource_path.display());
continue;
}

// Download and save the resource
info!("Downloading resource: {}", absolute_url);
debug!("Downloading HTML resource: {}", absolute_url);
let client = reqwest::Client::new();
// TODO: Handle resources with invalid URL schemes
match client.get(&absolute_url).send().await {
Ok(response) => {
info!("Saving HTML resource at {}", resource_path.display());
let content = response.bytes().await?;
fs::write(resource_path, content).await?;
}
Err(e) => {
warn!("Failed to download resource {}: {}", absolute_url, e);
warn!("Failed to download HTML resource {}: {}", absolute_url, e);
}
}
}
Expand Down
10 changes: 5 additions & 5 deletions src/content/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use crate::url::{
use anyhow::Result;
use std::path::{Path, PathBuf};
use tokio::fs;
use tracing::{info, warn};
use tracing::{debug, info};

pub mod extensions;
pub mod html;
Expand Down Expand Up @@ -110,7 +110,7 @@ pub async fn fetch_and_save_content(

// Check if file exists before downloading
if fs::try_exists(&file_path).await? {
info!("File already exists at {}", file_path.display());
debug!("File already exists at {}", file_path.display());
// TODO: Instead of returning we should check whether we can
// download additional files, in case this is an HTML file
return Ok(file_path);
Expand All @@ -132,7 +132,7 @@ pub async fn fetch_and_save_content(
if !file_path.to_string_lossy().ends_with(".html") {
file_path = file_path.with_extension("html");
}
warn!("Downloading HTML content from {}. The saved files may be incomplete as they may have more dependencies.", url);
debug!("Downloading HTML content from {}. The saved files may be incomplete as they may have more dependencies.", url);
let content_str = String::from_utf8_lossy(&content);
html::download_html_resources(&content_str, url, file_path.parent().unwrap()).await?;
} else if content_type.contains("application/json") {
Expand All @@ -149,12 +149,12 @@ pub async fn fetch_and_save_content(
if file_path.extension().is_none() {
if let Some(ext) = detect_media_extension(&content) {
file_path = file_path.with_extension(ext);
info!("Detected media extension: {}", ext);
debug!("Detected media extension: {}", ext);
}
}

// TODO: Check whether the file already exists before overwriting
// if the file is HTML.
// if the file is HTML or its extension was automatically detected.
info!("Saving {}", file_path.display());
fs::write(&file_path, &content).await?;

Expand Down

0 comments on commit 901ac01

Please sign in to comment.