summaryrefslogtreecommitdiff
path: root/ex/starlette_web.py
diff options
context:
space:
mode:
Diffstat (limited to 'ex/starlette_web.py')
-rw-r--r--ex/starlette_web.py63
1 files changed, 63 insertions, 0 deletions
diff --git a/ex/starlette_web.py b/ex/starlette_web.py
new file mode 100644
index 0000000..62aca91
--- /dev/null
+++ b/ex/starlette_web.py
@@ -0,0 +1,63 @@
+PORT="""4567
+"""
+import sys
+import os
+sys.path.append(os.path.expanduser('~/git/starlette'))
+from starlette.applications import Starlette
+from starlette.responses import JSONResponse, PlainTextResponse
+from starlette.routing import Route
+import uvicorn
+
+uvicorn_server: uvicorn.Server | None = None
+
+async def homepage(request):
+ return JSONResponse({'hello': 'world'})
+
+async def large_json(request):
+ """
+ This endpoint exercises starlette's JSON resposne class.
+ """
+ data = {"items": [ {"x": i, "text": "A" * 2000} for i in range(5000) ]}
+ return JSONResponse(data)
+
+async def upload(request):
+ """
+ This endpoint exercises starlette's ability to parse very
+ large requests.
+ """
+ data = await request.body()
+ return PlainTextResponse(str(len(data)))
+
+async def crash(request):
+ """
+ This endpoint tests the overhead imposed by crash handlers.
+ """
+ raise RuntimeError("You've killed me!")
+
+async def kill(request):
+ """
+ Shuts down the server.
+ """
+ global uvicorn_server
+ uvicorn_server.should_exit = True
+ return PlainTextResponse("Shutting down...\n")
+
+routes = [
+ Route("/", endpoint=homepage, methods=["GET"]),
+ Route("/json", endpoint=large_json, methods=["GET"]),
+ Route("/upload", endpoint=upload, methods=["POST"]),
+ Route("/crash", endpoint=crash, methods=["GET"]),
+ Route("/kill", endpoint=kill, methods=["GET"]),
+]
+
+app = Starlette(debug=False, routes=routes)
+
+if __name__ == "__main__":
+ conf = uvicorn.Config(
+ app,
+ host="127.0.0.1",
+ port=int(PORT),
+ loop="nemesis.causal_event_loop:causal_loop_factory"
+ )
+ uvicorn_server = uvicorn.Server(conf)
+ uvicorn_server.run()