Skip to content

Commit

Permalink
Revert public names change
Browse files Browse the repository at this point in the history
  • Loading branch information
Avasam committed May 22, 2024
1 parent 20ae2a8 commit c82ba1a
Show file tree
Hide file tree
Showing 4 changed files with 14 additions and 16 deletions.
1 change: 0 additions & 1 deletion newsfragments/4366.feature.rst

This file was deleted.

4 changes: 2 additions & 2 deletions pkg_resources/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1222,7 +1222,7 @@ class ResourceManager:
extraction_path: Optional[str] = None

def __init__(self):
self.cached_files = set()
self.cached_files = {}

def resource_exists(self, package_or_requirement: _PkgReqType, resource_name: str):
"""Does the named resource exist?"""
Expand Down Expand Up @@ -1310,7 +1310,7 @@ def get_cache_path(self, archive_name: str, names: Iterable[str] = ()):

self._warn_unsafe_extraction_path(extract_path)

self.cached_files.add(target_path)
self.cached_files[target_path] = True
return target_path

@staticmethod
Expand Down
21 changes: 10 additions & 11 deletions setuptools/package_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import configparser
import html
import http.client
from typing import Dict, List, Optional, Set
import urllib.parse
import urllib.request
import urllib.error
Expand Down Expand Up @@ -308,14 +307,14 @@ def __init__(
verify_ssl=True,
*args,
**kw,
) -> None:
):
super().__init__(*args, **kw)
self.index_url = index_url + "/"[: not index_url.endswith('/')]
self.scanned_urls: Set[str] = set()
self.fetched_urls: Set[str] = set()
self.package_pages: Dict[str, Set[str]] = {}
self.scanned_urls = {}
self.fetched_urls = {}
self.package_pages = {}
self.allows = re.compile('|'.join(map(translate, hosts))).match
self.to_scan: Optional[List[str]] = []
self.to_scan = []
self.opener = urllib.request.urlopen

def add(self, dist):
Expand All @@ -331,7 +330,7 @@ def process_url(self, url, retrieve=False): # noqa: C901
"""Evaluate a URL as a possible download, and maybe retrieve it"""
if url in self.scanned_urls and not retrieve:
return
self.scanned_urls.add(url)
self.scanned_urls[url] = True
if not URL_SCHEME(url):
self.process_filename(url)
return
Expand All @@ -347,18 +346,18 @@ def process_url(self, url, retrieve=False): # noqa: C901
return # don't need the actual page

if not self.url_ok(url):
self.fetched_urls.add(url)
self.fetched_urls[url] = True
return

self.info("Reading %s", url)
self.fetched_urls.add(url) # prevent multiple fetch attempts
self.fetched_urls[url] = True # prevent multiple fetch attempts
tmpl = "Download error on %s: %%s -- Some packages may not be found!"
f = self.open_url(url, tmpl % url)
if f is None:
return
if isinstance(f, urllib.error.HTTPError) and f.code == 401:
self.info("Authentication error: %s" % f.msg)
self.fetched_urls.add(f.url)
self.fetched_urls[f.url] = True
if 'html' not in f.headers.get('content-type', '').lower():
f.close() # not html, we can't process it
return
Expand Down Expand Up @@ -451,7 +450,7 @@ def _scan(self, link):
# it's a package page, sanitize and index it
pkg = safe_name(parts[0])
ver = safe_version(parts[1])
self.package_pages.setdefault(pkg.lower(), set()).add(link)
self.package_pages.setdefault(pkg.lower(), {})[link] = True
return to_filename(pkg), to_filename(ver)

def process_index(self, url, page):
Expand Down
4 changes: 2 additions & 2 deletions setuptools/tests/test_easy_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def test_no_find_links(self):
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
cmd.args = ['ok']
cmd.ensure_finalized()
assert cmd.package_index.scanned_urls == set()
assert cmd.package_index.scanned_urls == {}

# let's try without it (default behavior)
cmd = ei.easy_install(dist)
Expand All @@ -99,7 +99,7 @@ def test_no_find_links(self):
cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok')
cmd.args = ['ok']
cmd.ensure_finalized()
keys = sorted(cmd.package_index.scanned_urls)
keys = sorted(cmd.package_index.scanned_urls.keys())
assert keys == ['link1', 'link2']

def test_write_exception(self):
Expand Down

0 comments on commit c82ba1a

Please sign in to comment.