Skip to content

Commit

Permalink
bugfix: keep blank parameters in query string, close #411 (#412)
Browse files Browse the repository at this point in the history
  • Loading branch information
lexiforest authored Oct 21, 2024
1 parent 8cdf5a8 commit dc206f5
Show file tree
Hide file tree
Showing 5 changed files with 39 additions and 15 deletions.
20 changes: 12 additions & 8 deletions .github/workflows/build-and-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,20 +32,24 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: |
make preprocess
pipx run build --sdist
- uses: actions/upload-artifact@v4 # https://github.com/actions/upload-artifact/issues/478
with:
path: ./dist/*.tar.gz
overwrite: true

- uses: actions/setup-python@v5
with:
python-version: '3.10'
- run: |

- name: build sdist
run: |
make preprocess
pip install build
python -m build --sdist
pip install ./dist/*.tar.gz
- name: upload artifacts
uses: actions/upload-artifact@v4 # https://github.com/actions/upload-artifact/issues/478
with:
path: ./dist/*.tar.gz
overwrite: true

bdist:
name: Build bdist wheels and test
runs-on: ${{ matrix.os }}
Expand Down
4 changes: 2 additions & 2 deletions curl_cffi/requests/session.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def _is_absolute_url(url: str) -> bool:
def _quote_path_and_params(url: str, quote_str: str = ""):
safe = "".join(SAFE_CHARS - set(quote_str))
parsed_url = urlparse(url)
parsed_get_args = parse_qsl(parsed_url.query)
parsed_get_args = parse_qsl(parsed_url.query, keep_blank_values=True)
encoded_get_args = urlencode(parsed_get_args, doseq=True, safe=safe)
return ParseResult(
parsed_url.scheme,
Expand Down Expand Up @@ -141,7 +141,7 @@ def _update_url_params(url: str, params: Union[Dict, List, Tuple]) -> str:
parsed_url = urlparse(url)

# Extracting URL arguments from parsed URL, NOTE the result is a list, not dict
parsed_get_args = parse_qsl(parsed_url.query)
parsed_get_args = parse_qsl(parsed_url.query, keep_blank_values=True)

# Merging URL arguments dict with new params
old_args_counter = Counter(x[0] for x in parsed_get_args)
Expand Down
16 changes: 12 additions & 4 deletions scripts/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
# this is the upstream libcurl-impersonate version
__version__ = "0.8.0"

tmpdir = None


def detect_arch():
with open(Path(__file__).parent.parent / "libs.json") as f:
Expand All @@ -38,8 +36,13 @@ def detect_arch():
arch["libdir"] = os.path.expanduser(arch["libdir"])
else:
global tmpdir
tmpdir = tempfile.TemporaryDirectory()
arch["libdir"] = tmpdir.name
if "CI" in os.environ:
tmpdir = "./tmplibdir"
os.makedirs(tmpdir, exist_ok=True)
arch["libdir"] = tmpdir
else:
tmpdir = tempfile.TemporaryDirectory()
arch["libdir"] = tmpdir.name
return arch
raise Exception(f"Unsupported arch: {uname}")

Expand Down Expand Up @@ -69,8 +72,13 @@ def download_libcurl():
os.makedirs(arch["libdir"], exist_ok=True)
shutil.unpack_archive(file, arch["libdir"])

print("Files after unpacking")
print(os.listdir(arch["libdir"]))


def get_curl_archives():
print("Files for linking")
print(os.listdir(arch["libdir"]))
if arch["system"] == "Linux" and arch.get("link_type") == "static":
# note that the order of libraries matters
# https://stackoverflow.com/a/36581865
Expand Down
2 changes: 1 addition & 1 deletion tests/unittest/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ async def echo_path(scope, receive, send):


async def echo_params(scope, receive, send):
body = {"params": parse_qs(scope["query_string"].decode())}
body = {"params": parse_qs(scope["query_string"].decode(), keep_blank_values=True)}
await send(
{
"type": "http.response.start",
Expand Down
12 changes: 12 additions & 0 deletions tests/unittest/test_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,13 @@ def test_update_params(server):
)
assert r.content == b'{"params": {"a": ["1", "2"], "foo": ["z", "1", "2"]}}'

# empty values should be kept
r = requests.get(
str(server.url.copy_with(path="/echo_params?a=")),
params=[("foo", "1"), ("foo", "2")],
)
assert r.content == b'{"params": {"a": [""], "foo": ["1", "2"]}}'


def test_url_encode(server):
# https://github.com/lexiforest/curl_cffi/issues/394
Expand Down Expand Up @@ -209,6 +216,11 @@ def test_url_encode(server):
r = requests.get(url, quote=False)
assert r.url == url

# empty values should be kept
url = "http://127.0.0.1:8000/api?param1=value1&param2=&param3=value3"
r = requests.get(url)
assert r.url == url


def test_headers(server):
r = requests.get(str(server.url.copy_with(path="/echo_headers")), headers={"foo": "bar"})
Expand Down

0 comments on commit dc206f5

Please sign in to comment.