Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

change verbosity of logs to clean up clutter #436

Merged
merged 1 commit into from
Feb 13, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions runhouse/resources/envs/conda_env.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def install(self, force=False):
install_hash = hash(str(env_config))
# Check the existing hash
if local_env_exists and install_hash in obj_store.installed_envs and not force:
logger.info("Env already installed, skipping")
logger.debug("Env already installed, skipping")
return
obj_store.installed_envs[install_hash] = self.name

Expand All @@ -131,7 +131,7 @@ def install(self, force=False):
else:
raise ValueError(f"package {package} not recognized")

logger.info(f"Installing package: {str(pkg)}")
logger.debug(f"Installing package: {str(pkg)}")
pkg._install(self)

return (
Expand Down
4 changes: 2 additions & 2 deletions runhouse/resources/envs/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def install(self, force=False):
install_hash = hash(str(env_config))
# Check the existing hash
if install_hash in obj_store.installed_envs and not force:
logger.info("Env already installed, skipping")
logger.debug("Env already installed, skipping")
return
obj_store.installed_envs[install_hash] = self.name

Expand All @@ -157,7 +157,7 @@ def install(self, force=False):
else:
raise ValueError(f"package {package} not recognized")

logger.info(f"Installing package: {str(pkg)}")
logger.debug(f"Installing package: {str(pkg)}")
pkg._install(self)
return self.run(self.setup_cmds) if self.setup_cmds else None

Expand Down
14 changes: 7 additions & 7 deletions runhouse/resources/hardware/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,7 @@ def check_server(self, restart_server=True):
if not self.client:
try:
self.connect_server_client()
logger.info(f"Checking server {self.name}")
logger.debug(f"Checking server {self.name}")
self.client.check_server()
logger.info(f"Server {self.name} is up.")
except (
Expand Down Expand Up @@ -623,7 +623,7 @@ def restart_server(

if resync_rh:
self._sync_runhouse_to_cluster(_install_url=_rh_install_url)
logger.info("Finished syncing Runhouse to cluster.")
logger.debug("Finished syncing Runhouse to cluster.")

https_flag = self._use_https
caddy_flag = self._use_caddy
Expand Down Expand Up @@ -692,7 +692,7 @@ def restart_server(
raise ValueError("Cluster must have a name in order to enable HTTPS.")

if not self.client:
logger.info("Reconnecting server client. Server restarted with HTTPS.")
logger.debug("Reconnecting server client. Server restarted with HTTPS.")
self.connect_server_client()

# Refresh the client params to use HTTPS
Expand Down Expand Up @@ -984,7 +984,7 @@ def _copy_certs_to_cluster(self):
]
)

logger.info(f"Copied local certs onto the cluster in path: {dest}")
logger.debug(f"Copied local certs onto the cluster in path: {dest}")

def run(
self,
Expand Down Expand Up @@ -1056,7 +1056,7 @@ def run(

# Register the completed Run
r._register_cmd_run_completion(return_codes)
logger.info(f"Saved Run to path: {r.folder.path}")
logger.debug(f"Saved Run to path: {r.folder.path}")
return return_codes

def _run_commands_with_ssh(
Expand Down Expand Up @@ -1086,7 +1086,7 @@ def _run_commands_with_ssh(
if not pwd:
for command in commands:
command = f"{cmd_prefix} {command}" if cmd_prefix else command
logger.info(f"Running command on {self.name}: {command}")
logger.debug(f"Running command on {self.name}: {command}")
ret_code = runner.run(
command,
require_outputs=require_outputs,
Expand All @@ -1099,7 +1099,7 @@ def _run_commands_with_ssh(

for command in commands:
command = f"{cmd_prefix} {command}" if cmd_prefix else command
logger.info(f"Running command on {self.name}: {command}")
logger.debug(f"Running command on {self.name}: {command}")
# We need to quiet the SSH output here or it will print
# "Shared connection to ____ closed." at the end, which messes with the output.
ssh_command = runner.run(
Expand Down
2 changes: 1 addition & 1 deletion runhouse/resources/hardware/cluster_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@ def kubernetes_cluster(
stderr=subprocess.PIPE,
text=True,
)
logger.info(process.stdout)
logger.debug(process.stdout)
logger.info(f"Kubernetes namespace set to {namespace}")

except subprocess.CalledProcessError as e:
Expand Down
2 changes: 1 addition & 1 deletion runhouse/resources/hardware/on_demand_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -412,7 +412,7 @@ def _start_ray_workers(self, ray_port):
# NOTE: Using external worker address here because we're running from local
worker_ips.append(external)

logger.info(f"Internal head IP: {internal_head_ip}")
logger.debug(f"Internal head IP: {internal_head_ip}")

for host in worker_ips:
logger.info(
Expand Down
4 changes: 2 additions & 2 deletions runhouse/resources/hardware/sagemaker/sagemaker_cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -671,7 +671,7 @@ def _run_commands_with_ssh(
ssh_control_name=f"{self.name}:{self.ssh_port}",
)
command = f"{cmd_prefix} {command}" if cmd_prefix else command
logger.info(f"Running command on {self.name}: {command}")
logger.debug(f"Running command on {self.name}: {command}")
return_code, stdout, stderr = runner.run(
command,
require_outputs=require_outputs,
Expand Down Expand Up @@ -832,7 +832,7 @@ def _create_ssm_session_with_cluster(self, num_ports_to_try: int = 5):
f"Make sure SSH keys exist in local path: {self._abs_ssh_key_path}"
)

logger.info(f"Running command: {command}")
logger.debug(f"Running command: {command}")

# Define an event to signal completion of the SSH tunnel setup
tunnel_setup_complete = threading.Event()
Expand Down
8 changes: 4 additions & 4 deletions runhouse/resources/hardware/sky_ssh_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def run(
executable = "/bin/bash"

# RH MODIFIED: Return command instead of running it
logging.info(f"Running command: {' '.join(command)}")
logging.debug(f"Running command: {' '.join(command)}")
if return_cmd:
return " ".join(command)

Expand All @@ -230,7 +230,7 @@ def tunnel(self, local_port, remote_port):
ssh_mode=SshMode.NON_INTERACTIVE, port_forward=[(local_port, remote_port)]
)
command = " ".join(base_cmd + ["tail"])
logger.info(f"Running command: {command}")
logger.debug(f"Running command: {command}")
proc = subprocess.Popen(shlex.split(command), stdout=subprocess.PIPE)

time.sleep(3)
Expand Down Expand Up @@ -431,7 +431,7 @@ def ssh_tunnel(
runner.tunnel(local_port, remote_port)
ssh_tunnel = runner # Just to keep the object in memory
else:
logger.info(
logger.debug(
f"Attempting to bind "
f"{LOCALHOST}:{remote_port} via ssh port {ssh_port} "
f"on remote server {address} "
Expand All @@ -454,7 +454,7 @@ def ssh_tunnel(
)
ssh_tunnel.start()
connected = True
logger.info(
logger.debug(
f"Successfully bound "
f"{LOCALHOST}:{remote_port} via ssh port {ssh_port} "
f"on remote server {address} "
Expand Down
6 changes: 3 additions & 3 deletions runhouse/resources/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -884,9 +884,9 @@ def _extract_pointers(raw_cls_or_fn: Union[Type, Callable], reqs: List[str]):
]

if len(base_dirs) != 1:
logger.info(f"Module files: {module_path}")
logger.info(f"Package paths: {package_paths}")
logger.info(f"Base dirs: {base_dirs}")
logger.debug(f"Module files: {module_path}")
logger.debug(f"Package paths: {package_paths}")
logger.debug(f"Base dirs: {base_dirs}")
raise Exception("Wasn't able to find the package directory!")
root_path = os.path.dirname(base_dirs[0])
module_name = py_module.__spec__.name
Expand Down
18 changes: 9 additions & 9 deletions runhouse/resources/provenance.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,7 @@ def save(
if not config_for_rns["name"] or name:
config_for_rns["name"] = resolve_rns_path(name or self.name)
self._write_config(config=config_for_rns)
logger.info(f"Updated Run config name in path: {config_path}")
logger.debug(f"Updated Run config name in path: {config_path}")

return super().save(name, overwrite)

Expand Down Expand Up @@ -326,23 +326,23 @@ def result(self):
)
return self._load_blob_from_path(path=results_path).fetch()
elif run_status == RunStatus.ERROR:
logger.info("Run failed, returning stderr")
logger.debug("Run failed, returning stderr")
return self.stderr()
else:
logger.info(f"Run status: {self.status}, returning stdout")
logger.debug(f"Run status: {self.status}, returning stdout")
return self.stdout()

def stdout(self) -> str:
"""Read the stdout saved on the system for the Run."""
stdout_path = self._stdout_path
logger.info(f"Reading stdout from path: {stdout_path}")
logger.debug(f"Reading stdout from path: {stdout_path}")

return self._load_blob_from_path(path=stdout_path).fetch().decode().strip()

def stderr(self) -> str:
"""Read the stderr saved on the system for the Run."""
stderr_path = self._stderr_path
logger.info(f"Reading stderr from path: {stderr_path}")
logger.debug(f"Reading stderr from path: {stderr_path}")

return self._load_blob_from_path(stderr_path).fetch().decode().strip()

Expand All @@ -364,23 +364,23 @@ def _register_new_run(self):
self.status = RunStatus.RUNNING

# Write config data for the Run to its config file on the system
logger.info(f"Registering new Run on system in path: {self.folder.path}")
logger.debug(f"Registering new Run on system in path: {self.folder.path}")
self._write_config()

def _register_fn_run_completion(self, run_status: RunStatus):
"""Update a function based Run's config after its finished running on the system."""
self.end_time = self._current_timestamp()
self.status = run_status

logger.info(f"Registering a completed fn Run with status: {run_status}")
logger.debug(f"Registering a completed fn Run with status: {run_status}")
self._write_config()

def _register_cmd_run_completion(self, return_codes: list):
"""Update a cmd based Run's config and register its stderr and stdout after running on the system."""
run_status = RunStatus.ERROR if return_codes[0][0] != 0 else RunStatus.COMPLETED
self.status = run_status

logger.info(f"Registering a completed cmd Run with status: {run_status}")
logger.debug(f"Registering a completed cmd Run with status: {run_status}")
self._write_config()

# Write the stdout and stderr of the commands Run to the Run's folder
Expand All @@ -395,7 +395,7 @@ def _write_config(self, config: dict = None, overwrite: bool = True):
overwrite (Optional[bool]): Overwrite the config if one is already saved down. Defaults to ``True``.
"""
config_to_write = config or self.config_for_rns
logger.info(f"Config to save on system: {config_to_write}")
logger.debug(f"Config to save on system: {config_to_write}")
self.folder.put(
{self.RUN_CONFIG_FILE: config_to_write},
overwrite=overwrite,
Expand Down
2 changes: 1 addition & 1 deletion runhouse/resources/resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -443,7 +443,7 @@ def share(
# Update the resource in Den with this global visibility value
self.visibility = visibility

logger.info(f"Updating resource with visibility: {self.visibility}")
logger.debug(f"Updating resource with visibility: {self.visibility}")

self.save()

Expand Down
12 changes: 6 additions & 6 deletions runhouse/rns/rns_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,13 +324,13 @@ def load_config(
)

resource_uri = self.resource_uri(name)
logger.info(f"Attempting to load config for {rns_address} from RNS.")
logger.debug(f"Attempting to load config for {rns_address} from RNS.")
resp = self.session.get(
f"{self.api_server_url}/resource/{resource_uri}",
headers=request_headers,
)
if resp.status_code != 200:
logger.info(f"No config found in RNS: {load_resp_content(resp)}")
logger.debug(f"No config found in RNS: {load_resp_content(resp)}")
# No config found, so return empty config
return {}

Expand All @@ -352,7 +352,7 @@ def _load_config_from_local(self, rns_address=None, path=None) -> Optional[dict]
if not config_path.exists():
return None

logger.info(f"Loading config from local file {config_path}")
logger.debug(f"Loading config from local file {config_path}")
with open(config_path, "r") as f:
try:
config = json.load(f)
Expand Down Expand Up @@ -418,11 +418,11 @@ def _save_config_in_rns(self, config, resource_name):
f"{self.api_server_url}/{uri}", data=json.dumps(payload), headers=headers
)
if resp.status_code == 200:
logger.info(f"Config updated in Den for resource: {uri}")
logger.debug(f"Config updated in Den for resource: {uri}")
elif resp.status_code == 422: # No changes made to existing Resource
logger.info(f"Config for {uri} has not changed, nothing to update")
logger.debug(f"Config for {uri} has not changed, nothing to update")
elif resp.status_code == 404: # Resource not found
logger.info(f"Saving new resource in Den for resource: {uri}")
logger.debug(f"Saving new resource in Den for resource: {uri}")
# Resource does not yet exist, in which case we need to create from scratch
resp = self.session.post(
f"{self.api_server_url}/resource",
Expand Down
14 changes: 7 additions & 7 deletions runhouse/servers/caddy/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,13 @@ def caddyfile(self):
def configure(self):
"""Configure Caddy to proxy requests to the Fast API HTTP server"""
if not self._is_configured() or self.force_reinstall:
logger.info(f"Configuring Caddy for address: {self.address}")
logger.debug(f"Configuring Caddy for address: {self.address}")
self._install()
self._build_template()
self._start_caddy()

# Reload Caddy with the updated config
logger.info("Reloading Caddy")
logger.debug("Reloading Caddy")
self.reload()

if not self._is_configured():
Expand Down Expand Up @@ -138,11 +138,11 @@ def _install(self):
text=True,
)
if result.returncode == 0:
logger.info("Caddy is already installed, skipping install.")
logger.debug("Caddy is already installed, skipping install.")
else:
# Install caddy as a service (or background process if we can't use systemctl)
# https://caddyserver.com/docs/running#using-the-service
logger.info("Installing Caddy.")
logger.info("Installing Caddy...")

commands = [
"sudo apt update && sudo apt install -y debian-keyring debian-archive-keyring apt-transport-https",
Expand Down Expand Up @@ -251,7 +251,7 @@ def _build_template(self):
logger.info("Successfully built and formatted Caddy template.")

def _is_configured(self) -> bool:
logger.info("Checking Caddy configuration.")
logger.debug("Checking Caddy configuration.")
result = subprocess.run(
["sudo", "systemctl", "status", "caddy"],
capture_output=True,
Expand Down Expand Up @@ -288,7 +288,7 @@ def _start_caddy(self):
# Will receive an error that looks like:
# caddy : user NOT in sudoers ; TTY=unknown ; PWD=/ ; USER=root
# https://github.com/caddyserver/caddy/issues/4248
logger.info("Adding Caddy as trusted app.")
logger.debug("Adding Caddy as trusted app.")
try:
subprocess.run(
"sudo mkdir -p /var/lib/caddy/.local && "
Expand All @@ -301,7 +301,7 @@ def _start_caddy(self):
except subprocess.CalledProcessError as e:
raise e

logger.info("Starting Caddy.")
logger.debug("Starting Caddy.")
run_cmd = ["sudo", "systemctl", "start", "caddy"]
result = subprocess.run(
run_cmd,
Expand Down
2 changes: 1 addition & 1 deletion runhouse/servers/http/http_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -1135,7 +1135,7 @@ def _log_cluster_data(data: dict, labels: dict):
# Note: running the FastAPI app on a higher, non-privileged port (8000) and using Caddy as a reverse
# proxy to forward requests from port 80 (HTTP) or 443 (HTTPS) to the app's port.
if use_caddy:
logger.info("Using Caddy as a reverse proxy")
logger.debug("Using Caddy as a reverse proxy")
if certs_address is None and domain is None:
raise ValueError(
"Must provide the server address or domain to configure Caddy. No address or domain found in the "
Expand Down
Loading