Skip to content

Commit

Permalink
[pre-commit.ci] auto fixes from pre-commit.com hooks
Browse files Browse the repository at this point in the history
for more information, see https://pre-commit.ci
  • Loading branch information
pre-commit-ci[bot] committed Dec 15, 2024
1 parent 582bdbb commit e01566a
Show file tree
Hide file tree
Showing 2 changed files with 43 additions and 68 deletions.
84 changes: 33 additions & 51 deletions integration_tests/base_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -1084,132 +1084,114 @@ def create_item(request, body: CreateItemBody, query: CreateItemQueryParamsParam

# --- Streaming responses ---


@app.get("/stream/sync")
def sync_stream():
def number_generator():
for i in range(5):
yield f"Chunk {i}\n".encode()

return Response(
status_code=200,
headers={"Content-Type": "text/plain"},
description=number_generator()
)

return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=number_generator())


@app.get("/stream/async")
async def async_stream():
async def async_generator():
import asyncio

for i in range(5):
await asyncio.sleep(1) # Simulate async work
yield f"Async Chunk {i}\n".encode()

return Response(
status_code=200,
headers={"Content-Type": "text/plain"},
description=async_generator()
)

return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=async_generator())


@app.get("/stream/mixed")
async def mixed_stream():
async def mixed_generator():
import asyncio

# Binary data
yield b"Binary chunk\n"
await asyncio.sleep(0.5)

# String data
yield "String chunk\n".encode()
await asyncio.sleep(0.5)

# Integer data
yield str(42).encode() + b"\n"
await asyncio.sleep(0.5)

# JSON data
import json

data = {"message": "JSON chunk", "number": 123}
yield json.dumps(data).encode() + b"\n"

return Response(
status_code=200,
headers={"Content-Type": "text/plain"},
description=mixed_generator()
)

return Response(status_code=200, headers={"Content-Type": "text/plain"}, description=mixed_generator())


@app.get("/stream/events")
async def server_sent_events():
async def event_generator():
import asyncio
import json
import time

# Regular event
yield f"event: message\ndata: {json.dumps({'time': time.time(), 'type': 'start'})}\n\n".encode()
await asyncio.sleep(1)

# Event with ID
yield f"id: 1\nevent: update\ndata: {json.dumps({'progress': 50})}\n\n".encode()
await asyncio.sleep(1)

# Multiple data lines
data = json.dumps({'status': 'complete', 'results': [1, 2, 3]}, indent=2)
data = json.dumps({"status": "complete", "results": [1, 2, 3]}, indent=2)
yield f"event: complete\ndata: {data}\n\n".encode()

return Response(
status_code=200,
headers={
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Connection": "keep-alive"
},
description=event_generator()
status_code=200, headers={"Content-Type": "text/event-stream", "Cache-Control": "no-cache", "Connection": "keep-alive"}, description=event_generator()
)


@app.get("/stream/large-file")
async def stream_large_file():
async def file_generator():
# Simulate streaming a large file in chunks
chunk_size = 1024 # 1KB chunks
total_size = 10 * chunk_size # 10KB total

for offset in range(0, total_size, chunk_size):
# Simulate reading file chunk
chunk = b"X" * min(chunk_size, total_size - offset)
yield chunk

return Response(
status_code=200,
headers={
"Content-Type": "application/octet-stream",
"Content-Disposition": "attachment; filename=large-file.bin"
},
description=file_generator()
headers={"Content-Type": "application/octet-stream", "Content-Disposition": "attachment; filename=large-file.bin"},
description=file_generator(),
)


@app.get("/stream/csv")
async def stream_csv():
async def csv_generator():
# CSV header
yield "id,name,value\n".encode()

import asyncio
import random

# Generate rows
for i in range(5):
await asyncio.sleep(0.5) # Simulate data processing
row = f"{i},item-{i},{random.randint(1, 100)}\n"
yield row.encode()

return Response(
status_code=200,
headers={
"Content-Type": "text/csv",
"Content-Disposition": "attachment; filename=data.csv"
},
description=csv_generator()
)

return Response(status_code=200, headers={"Content-Type": "text/csv", "Content-Disposition": "attachment; filename=data.csv"}, description=csv_generator())


def main():
app.set_response_header("server", "robyn")
Expand Down
27 changes: 10 additions & 17 deletions integration_tests/test_streaming_responses.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,13 @@

import json
import pytest
from robyn import Robyn
from robyn.robyn import Request
from integration_tests.base_routes import app


@pytest.mark.asyncio
async def test_sync_stream():
"""Test basic synchronous streaming response.
Verifies that:
1. Response has correct content type
2. Chunks are received in correct order
Expand All @@ -45,7 +43,7 @@ async def test_sync_stream():
@pytest.mark.asyncio
async def test_async_stream():
"""Test asynchronous streaming response.
Verifies that:
1. Response has correct content type
2. Chunks are received in correct order with delays
Expand All @@ -68,7 +66,7 @@ async def test_async_stream():
@pytest.mark.asyncio
async def test_mixed_stream():
"""Test streaming of mixed content types.
Verifies that:
1. Response handles different content types:
- Binary data
Expand All @@ -82,12 +80,7 @@ async def test_mixed_stream():
assert response.status_code == 200
assert response.headers["Content-Type"] == "text/plain"

expected = [
b"Binary chunk\n",
b"String chunk\n",
b"42\n",
json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"
]
expected = [b"Binary chunk\n", b"String chunk\n", b"42\n", json.dumps({"message": "JSON chunk", "number": 123}).encode() + b"\n"]

chunks = []
async for chunk in response.content:
Expand All @@ -101,7 +94,7 @@ async def test_mixed_stream():
@pytest.mark.asyncio
async def test_server_sent_events():
"""Test Server-Sent Events (SSE) streaming.
Verifies that:
1. Response has correct SSE headers
2. Events are properly formatted with:
Expand Down Expand Up @@ -143,7 +136,7 @@ async def test_server_sent_events():
@pytest.mark.asyncio
async def test_large_file_stream():
"""Test streaming of large files in chunks.
Verifies that:
1. Response has correct headers for file download
2. Content is streamed in correct chunk sizes
Expand All @@ -166,7 +159,7 @@ async def test_large_file_stream():
@pytest.mark.asyncio
async def test_csv_stream():
"""Test streaming of CSV data.
Verifies that:
1. Response has correct CSV headers
2. CSV content is properly formatted
Expand All @@ -184,11 +177,11 @@ async def test_csv_stream():

# Verify header
assert lines[0] == "id,name,value"

# Verify data rows
assert len(lines) == 6 # Header + 5 data rows
for i, line in enumerate(lines[1:], 0):
id_, name, value = line.split(',')
id_, name, value = line.split(",")
assert int(id_) == i
assert name == f"item-{i}"
assert 1 <= int(value) <= 100
assert 1 <= int(value) <= 100

0 comments on commit e01566a

Please sign in to comment.