summaryrefslogtreecommitdiff
path: root/ex/starlette_web.py
blob: 62aca91d0f7cd023a7464edef1395225a4d48c66 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
PORT="""4567
"""
import sys
import os
sys.path.append(os.path.expanduser('~/git/starlette'))
from starlette.applications import Starlette
from starlette.responses import JSONResponse, PlainTextResponse
from starlette.routing import Route
import uvicorn

uvicorn_server: uvicorn.Server | None = None

async def homepage(request):
    return JSONResponse({'hello': 'world'})

async def large_json(request):
    """
    This endpoint exercises starlette's JSON resposne class.
    """
    data = {"items": [ {"x": i, "text": "A" * 2000} for i in range(5000) ]}
    return JSONResponse(data)

async def upload(request):
    """
    This endpoint exercises starlette's ability to parse very
    large requests.
    """
    data = await request.body()
    return PlainTextResponse(str(len(data)))

async def crash(request):
    """
    This endpoint tests the overhead imposed by crash handlers.
    """
    raise RuntimeError("You've killed me!")

async def kill(request):
    """
    Shuts down the server.
    """
    global uvicorn_server
    uvicorn_server.should_exit = True
    return PlainTextResponse("Shutting down...\n")

routes = [
    Route("/", endpoint=homepage, methods=["GET"]),
    Route("/json", endpoint=large_json, methods=["GET"]),
    Route("/upload", endpoint=upload, methods=["POST"]),
    Route("/crash", endpoint=crash, methods=["GET"]),
    Route("/kill", endpoint=kill, methods=["GET"]),
]

app = Starlette(debug=False, routes=routes)

if __name__ == "__main__":
    conf = uvicorn.Config(
        app,
        host="127.0.0.1",
        port=int(PORT),
        loop="nemesis.causal_event_loop:causal_loop_factory"
    )
    uvicorn_server = uvicorn.Server(conf)
    uvicorn_server.run()