diff --git a/bench/falcon_plaintext.py b/bench/falcon_plaintext.py
index cf6d08b..c163756 100644
--- a/bench/falcon_plaintext.py
+++ b/bench/falcon_plaintext.py
@@ -2,6 +2,7 @@ from wsgiref.simple_server import make_server
import falcon
+
class Home:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200 # This is the default status
@@ -12,18 +13,18 @@ class Home:
app = falcon.App()
home = Home()
-app.add_route('/', home)
+app.add_route("/", home)
-if __name__ == '__main__':
- with make_server('', 8000, app) as httpd:
- print('Serving on port 8000...')
+if __name__ == "__main__":
+ with make_server("", 8000, app) as httpd:
+ print("Serving on port 8000...")
# Serve until process is killed
httpd.serve_forever()
-#pypy3 -m gunicorn falcon_plaintext:app -w 4 --worker-class=gevent #recomended for pypy3
-#python3 -m gunicorn falcon_plaintext:app -w 4 #without Cython
-#pypy3 -m gunicorn falcon_plaintext:app -w 4 #without gevent
-#python3 -m gunicorn falcon_plaintext:app -w 4 --worker-class="egg:meinheld#gunicorn_worker" #with Cython
-#meinheld is buggy -> greenlet.c:566:10: error: no member named 'use_tracing' in 'struct _ts'
-#so using pip3 install git+https://github.com/idot/meinheld.git@2bfe452d6608c92688d92337c87b1dd6448f4ccb
+# pypy3 -m gunicorn falcon_plaintext:app -w 4 --worker-class=gevent #recomended for pypy3
+# python3 -m gunicorn falcon_plaintext:app -w 4 #without Cython
+# pypy3 -m gunicorn falcon_plaintext:app -w 4 #without gevent
+# python3 -m gunicorn falcon_plaintext:app -w 4 --worker-class="egg:meinheld#gunicorn_worker" #with Cython
+# meinheld is buggy -> greenlet.c:566:10: error: no member named 'use_tracing' in 'struct _ts'
+# so using pip3 install git+https://github.com/idot/meinheld.git@2bfe452d6608c92688d92337c87b1dd6448f4ccb
diff --git a/bench/robyn_plaintext.py b/bench/robyn_plaintext.py
index 0a21a64..34600c6 100644
--- a/bench/robyn_plaintext.py
+++ b/bench/robyn_plaintext.py
@@ -2,11 +2,13 @@ from robyn import Robyn
app = Robyn(__file__)
+
@app.get("/")
async def h(request):
return "Hello, world!"
+
app.start(port=8000)
# python3 ./robyn_plaintext.py --processes 4 --log-level CRITICAL
-# pypy3 did not compile
\ No newline at end of file
+# pypy3 did not compile
diff --git a/bench/socketify_plaintext.py b/bench/socketify_plaintext.py
index 06baf19..bb7e55a 100644
--- a/bench/socketify_plaintext.py
+++ b/bench/socketify_plaintext.py
@@ -3,21 +3,28 @@ import os
import multiprocessing
-
def run_app():
app = App()
app.get("/", lambda res, req: res.end("Hello, World!"))
- app.listen(8000, lambda config: print("PID %d Listening on port http://localhost:%d now\n" % (os.getpid(), config.port)))
+ app.listen(
+ 8000,
+ lambda config: print(
+ "PID %d Listening on port http://localhost:%d now\n"
+ % (os.getpid(), config.port)
+ ),
+ )
app.run()
+
def create_fork():
n = os.fork()
# n greater than 0 means parent process
if not n > 0:
run_app()
-# fork limiting the cpu count - 1
-# for i in range(1, multiprocessing.cpu_count()):
-# create_fork()
-run_app() # run app on the main process too :)
\ No newline at end of file
+# fork limiting the cpu count - 1
+for i in range(1, multiprocessing.cpu_count()):
+ create_fork()
+
+run_app() # run app on the main process too :)
diff --git a/bench/uvicorn_plaintext.py b/bench/uvicorn_plaintext.py
index 4421118..57a165c 100644
--- a/bench/uvicorn_plaintext.py
+++ b/bench/uvicorn_plaintext.py
@@ -1,17 +1,22 @@
async def app(scope, receive, send):
- assert scope['type'] == 'http'
+ assert scope["type"] == "http"
- await send({
- 'type': 'http.response.start',
- 'status': 200,
- 'headers': [
- [b'content-type', b'text/plain'],
- ],
- })
- await send({
- 'type': 'http.response.body',
- 'body': b'Hello, world!',
- })
+ await send(
+ {
+ "type": "http.response.start",
+ "status": 200,
+ "headers": [
+ [b"content-type", b"text/plain"],
+ ],
+ }
+ )
+ await send(
+ {
+ "type": "http.response.body",
+ "body": b"Hello, world!",
+ }
+ )
-#python3 -m gunicorn uvicorn_guvicorn_plaintext:app -w 1 -k uvicorn.workers.UvicornWorker
-#pypy3 -m gunicorn uvicorn_guvicorn_plaintext:app -w 1 -k uvicorn.workers.UvicornWorker
+
+# python3 -m gunicorn uvicorn_guvicorn_plaintext:app -w 1 -k uvicorn.workers.UvicornWorker
+# pypy3 -m gunicorn uvicorn_guvicorn_plaintext:app -w 1 -k uvicorn.workers.UvicornWorker
diff --git a/bench/websockets/falcon_server.py b/bench/websockets/falcon_server.py
index a45d447..de551ff 100644
--- a/bench/websockets/falcon_server.py
+++ b/bench/websockets/falcon_server.py
@@ -5,16 +5,16 @@ import asyncio
clients = set([])
remaining_clients = 16
+
async def broadcast(message):
# some clients got disconnected if we tried to to all async :/
# tasks = [ws.send_text(message) for ws in client]
# return await asyncio.wait(tasks, return_when=ALL_COMPLETED)
for ws in clients:
await ws.send_text(message)
-
+
class SomeResource:
-
async def on_get(self, req):
pass
@@ -26,7 +26,7 @@ class SomeResource:
remaining_clients = remaining_clients - 1
if remaining_clients == 0:
await broadcast("ready")
-
+
while True:
payload = await ws.receive_text()
await broadcast(payload)
@@ -35,11 +35,8 @@ class SomeResource:
clients.remove(ws)
remaining_clients = remaining_clients + 1
-
-
-
app = falcon.asgi.App()
-app.add_route('/', SomeResource())
+app.add_route("/", SomeResource())
# python3 -m gunicorn falcon_server:app -b 127.0.0.1:4001 -w 1 -k uvicorn.workers.UvicornWorker
-# pypy3 -m gunicorn falcon_server:app -b 127.0.0.1:4001 -w 1 -k uvicorn.workers.UvicornH11Worker
\ No newline at end of file
+# pypy3 -m gunicorn falcon_server:app -b 127.0.0.1:4001 -w 1 -k uvicorn.workers.UvicornH11Worker
diff --git a/bench/websockets/socketify_server.py b/bench/websockets/socketify_server.py
index c8e63d7..70d0374 100644
--- a/bench/websockets/socketify_server.py
+++ b/bench/websockets/socketify_server.py
@@ -2,37 +2,46 @@ from socketify import App, AppOptions, OpCode, CompressOptions
remaining_clients = 16
+
def ws_open(ws):
ws.subscribe("room")
global remaining_clients
remaining_clients = remaining_clients - 1
if remaining_clients == 0:
- print("All clients connected")
- print('Starting benchmark by sending "ready" message')
-
- ws.publish("room", "ready", OpCode.TEXT)
- #publish will send to everyone except it self so send to it self too
- ws.send("ready", OpCode.TEXT)
-
+ print("All clients connected")
+ print('Starting benchmark by sending "ready" message')
+
+ ws.publish("room", "ready", OpCode.TEXT)
+ # publish will send to everyone except it self so send to it self too
+ ws.send("ready", OpCode.TEXT)
+
def ws_message(ws, message, opcode):
- #publish will send to everyone except it self so send to it self too
+ # publish will send to everyone except it self so send to it self too
ws.publish("room", message, opcode)
ws.send(message, opcode)
-
+
+
def ws_close(ws, close, message):
global remaining_clients
remaining_clients = remaining_clients + 1
-app = App()
-app.ws("/*", {
- 'compression': CompressOptions.DISABLED,
- 'max_payload_length': 16 * 1024 * 1024,
- 'idle_timeout': 60,
- 'open': ws_open,
- 'message': ws_message,
- 'close': ws_close
-})
-app.any("/", lambda res,req: res.end("Nothing to see here!'"))
-app.listen(4001, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
-app.run()
\ No newline at end of file
+
+app = App()
+app.ws(
+ "/*",
+ {
+ "compression": CompressOptions.DISABLED,
+ "max_payload_length": 16 * 1024 * 1024,
+ "idle_timeout": 60,
+ "open": ws_open,
+ "message": ws_message,
+ "close": ws_close,
+ },
+)
+app.any("/", lambda res, req: res.end("Nothing to see here!'"))
+app.listen(
+ 4001,
+ lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
+)
+app.run()
diff --git a/examples/async.py b/examples/async.py
index 65f5b91..e366415 100644
--- a/examples/async.py
+++ b/examples/async.py
@@ -3,35 +3,45 @@ import asyncio
app = App()
+
async def delayed_hello(delay, res):
- await asyncio.sleep(delay) #do something async
+ await asyncio.sleep(delay) # do something async
res.cork_end("Hello with delay!")
+
def home(res, req):
- #request object only lives during the life time of this call
- #get parameters, query, headers anything you need here
+ # request object only lives during the life time of this call
+ # get parameters, query, headers anything you need here
delay = req.get_query("delay")
delay = 0 if delay == None else float(delay)
- #tell response to run this in the event loop
- #abort handler is grabed here, so responses only will be send if res.aborted == False
+ # tell response to run this in the event loop
+ # abort handler is grabed here, so responses only will be send if res.aborted == False
res.run_async(delayed_hello(delay, res))
+
async def json(res, req):
- #request object only lives during the life time of this call
- #get parameters, query, headers anything you need here before first await :)
+ # request object only lives during the life time of this call
+ # get parameters, query, headers anything you need here before first await :)
user_agent = req.get_header("user-agent")
- #req maybe will not be available in direct attached async functions after await
- await asyncio.sleep(2) #do something async
-
- res.cork_end({ "message": "I'm delayed!", "user-agent": user_agent})
+ # req maybe will not be available in direct attached async functions after await
+ await asyncio.sleep(2) # do something async
+
+ res.cork_end({"message": "I'm delayed!", "user-agent": user_agent})
+
def not_found(res, req):
res.write_status(404).end("Not Found")
+
app.get("/", home)
app.get("/json", json)
app.any("/*", not_found)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
+app.listen(
+ 3000,
+ lambda config: print(
+ "Listening on port http://localhost:%s now\n" % str(config.port)
+ ),
+)
-app.run()
\ No newline at end of file
+app.run()
diff --git a/examples/automatic_port_selection.py b/examples/automatic_port_selection.py
index 4145920..a90d737 100644
--- a/examples/automatic_port_selection.py
+++ b/examples/automatic_port_selection.py
@@ -2,5 +2,7 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
-app.listen(0, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 0, lambda config: print("Listening on port http://localhost:%d now\n" % config.port)
+)
+app.run()
diff --git a/examples/backpressure.py b/examples/backpressure.py
index 609b46d..c5a18d0 100644
--- a/examples/backpressure.py
+++ b/examples/backpressure.py
@@ -1,39 +1,48 @@
from socketify import App, AppOptions, OpCode, CompressOptions
-#Number between ok and not ok
+# Number between ok and not ok
backpressure = 1024
# Used for statistics
messages = 0
message_number = 0
+
def ws_open(ws):
- print('A WebSocket got connected!')
- # We begin our example by sending until we have backpressure
+ print("A WebSocket got connected!")
+ # We begin our example by sending until we have backpressure
global message_number
global messages
- while (ws.get_buffered_amount() < backpressure):
+ while ws.get_buffered_amount() < backpressure:
ws.send("This is a message, let's call it %i" % message_number)
message_number = message_number + 1
messages = messages + 1
+
def ws_drain(ws):
- # Continue sending when we have drained (some)
+ # Continue sending when we have drained (some)
global message_number
global messages
- while (ws.get_buffered_amount() < backpressure):
+ while ws.get_buffered_amount() < backpressure:
ws.send("This is a message, let's call it %i" % message_number)
message_number = message_number + 1
messages = messages + 1
-app = App()
-app.ws("/*", {
- 'compression': CompressOptions.DISABLED,
- 'max_payload_length': 16 * 1024 * 1024,
- 'idle_timeout': 60,
- 'open': ws_open,
- 'drain': ws_drain
-})
-app.any("/", lambda res,req: res.end("Nothing to see here!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
-app.run()
\ No newline at end of file
+
+app = App()
+app.ws(
+ "/*",
+ {
+ "compression": CompressOptions.DISABLED,
+ "max_payload_length": 16 * 1024 * 1024,
+ "idle_timeout": 60,
+ "open": ws_open,
+ "drain": ws_drain,
+ },
+)
+app.any("/", lambda res, req: res.end("Nothing to see here!"))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
+)
+app.run()
diff --git a/examples/broadcast.py b/examples/broadcast.py
index 123ddc1..d04e4fc 100644
--- a/examples/broadcast.py
+++ b/examples/broadcast.py
@@ -1,26 +1,35 @@
from socketify import App, AppOptions, OpCode, CompressOptions
+
def ws_open(ws):
- print('A WebSocket got connected!')
- #Let this client listen to topic "broadcast"
- ws.subscribe('broadcast')
+ print("A WebSocket got connected!")
+ # Let this client listen to topic "broadcast"
+ ws.subscribe("broadcast")
+
def ws_message(ws, message, opcode):
- #Ok is false if backpressure was built up, wait for drain
+ # Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
- #Broadcast this message
- ws.publish('broadcast', message, opcode)
-
-app = App()
-app.ws("/*", {
- 'compression': CompressOptions.SHARED_COMPRESSOR,
- 'max_payload_length': 16 * 1024 * 1024,
- 'idle_timeout': 12,
- 'open': ws_open,
- 'message': ws_message,
- # The library guarantees proper unsubscription at close
- 'close': lambda ws, code, message: print('WebSocket closed')
-})
-app.any("/", lambda res,req: res.end("Nothing to see here!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
-app.run()
\ No newline at end of file
+ # Broadcast this message
+ ws.publish("broadcast", message, opcode)
+
+
+app = App()
+app.ws(
+ "/*",
+ {
+ "compression": CompressOptions.SHARED_COMPRESSOR,
+ "max_payload_length": 16 * 1024 * 1024,
+ "idle_timeout": 12,
+ "open": ws_open,
+ "message": ws_message,
+ # The library guarantees proper unsubscription at close
+ "close": lambda ws, code, message: print("WebSocket closed"),
+ },
+)
+app.any("/", lambda res, req: res.end("Nothing to see here!"))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
+)
+app.run()
diff --git a/examples/docker/pypy3/main.py b/examples/docker/pypy3/main.py
index 80d037c..234d649 100644
--- a/examples/docker/pypy3/main.py
+++ b/examples/docker/pypy3/main.py
@@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/docker/python3-alpine/main.py b/examples/docker/python3-alpine/main.py
index 80d037c..234d649 100644
--- a/examples/docker/python3-alpine/main.py
+++ b/examples/docker/python3-alpine/main.py
@@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/docker/python3-arch/main.py b/examples/docker/python3-arch/main.py
index 80d037c..234d649 100644
--- a/examples/docker/python3-arch/main.py
+++ b/examples/docker/python3-arch/main.py
@@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/docker/python3/main.py b/examples/docker/python3/main.py
index 80d037c..234d649 100644
--- a/examples/docker/python3/main.py
+++ b/examples/docker/python3/main.py
@@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/error_handler.py b/examples/error_handler.py
index 57843d6..71b7936 100644
--- a/examples/error_handler.py
+++ b/examples/error_handler.py
@@ -3,27 +3,36 @@ import asyncio
app = App()
+
def xablau(res, req):
raise RuntimeError("Xablau!")
+
async def async_xablau(res, req):
await asyncio.sleep(1)
raise RuntimeError("Async Xablau!")
-#this can be async no problems
-def on_error(error, res, req):
- #here you can log properly the error and do a pretty response to your clients
+
+# this can be async no problems
+def on_error(error, res, req):
+ # here you can log properly the error and do a pretty response to your clients
print("Somethind goes %s" % str(error))
- #response and request can be None if the error is in an async function
+ # response and request can be None if the error is in an async function
if res != None:
- #if response exists try to send something
+ # if response exists try to send something
res.write_status(500)
res.end("Sorry we did something wrong")
+
app.get("/", xablau)
app.get("/async", async_xablau)
app.set_error_handler(on_error)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print(
+ "Listening on port http://localhost:%s now\n" % str(config.port)
+ ),
+)
+app.run()
diff --git a/examples/file_stream.py b/examples/file_stream.py
index 25218fc..36d6f06 100644
--- a/examples/file_stream.py
+++ b/examples/file_stream.py
@@ -7,51 +7,54 @@ from os import path
mimetypes.init()
+
async def home(res, req):
- #this is just an implementation example see static_files.py example for use of sendfile and app.static usage
- #there is an static_aiofile.py helper and static.aiofiles helper using async implementation of this
- #asyncio with IO is really slow so, we will implement "aiofile" using libuv inside socketify in future
+ # this is just an implementation example see static_files.py example for use of sendfile and app.static usage
+ # there is an static_aiofile.py helper and static.aiofiles helper using async implementation of this
+ # asyncio with IO is really slow so, we will implement "aiofile" using libuv inside socketify in future
filename = "./public/media/flower.webm"
- #read headers before the first await
- if_modified_since = req.get_header('if-modified-since')
- range_header = req.get_header('range')
+ # read headers before the first await
+ if_modified_since = req.get_header("if-modified-since")
+ range_header = req.get_header("range")
bytes_range = None
start = 0
end = -1
- #parse range header
+ # parse range header
if range_header:
- bytes_range = range_header.replace("bytes=", '').split('-')
+ bytes_range = range_header.replace("bytes=", "").split("-")
start = int(bytes_range[0])
if bytes_range[1]:
end = int(bytes_range[1])
try:
exists = path.exists(filename)
- #not found
+ # not found
if not exists:
- return res.write_status(404).end(b'Not Found')
+ return res.write_status(404).end(b"Not Found")
- #get size and last modified date
+ # get size and last modified date
stats = os.stat(filename)
total_size = stats.st_size
size = total_size
- last_modified = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(stats.st_mtime))
-
- #check if modified since is provided
+ last_modified = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)
+ )
+
+ # check if modified since is provided
if if_modified_since == last_modified:
return res.write_status(304).end_without_body()
- #tells the broswer the last modified date
- res.write_header(b'Last-Modified', last_modified)
+ # tells the broswer the last modified date
+ res.write_header(b"Last-Modified", last_modified)
- #add content type
+ # add content type
(content_type, encoding) = mimetypes.guess_type(filename, strict=True)
if content_type and encoding:
- res.write_header(b'Content-Type', '%s; %s' % (content_type, encoding))
+ res.write_header(b"Content-Type", "%s; %s" % (content_type, encoding))
elif content_type:
- res.write_header(b'Content-Type', content_type)
-
+ res.write_header(b"Content-Type", content_type)
+
with open(filename, "rb") as fd:
- #check range and support it
+ # check range and support it
if start > 0 or not end == -1:
if end < 0 or end >= size:
end = size - 1
@@ -63,26 +66,32 @@ async def home(res, req):
else:
end = size - 1
res.write_status(200)
-
- #tells the browser that we support range
- res.write_header(b'Accept-Ranges', b'bytes')
- res.write_header(b'Content-Range', 'bytes %d-%d/%d' % (start, end, total_size))
+
+ # tells the browser that we support range
+ res.write_header(b"Accept-Ranges", b"bytes")
+ res.write_header(
+ b"Content-Range", "bytes %d-%d/%d" % (start, end, total_size)
+ )
pending_size = size
- #keep sending until abort or done
+ # keep sending until abort or done
while not res.aborted:
- chunk_size = 16384 #16kb chunks
+ chunk_size = 16384 # 16kb chunks
if chunk_size > pending_size:
chunk_size = pending_size
buffer = fd.read(chunk_size)
pending_size = pending_size - chunk_size
(ok, done) = await res.send_chunk(buffer, size)
- if not ok or done: #if cannot send probably aborted
+ if not ok or done: # if cannot send probably aborted
break
except Exception as error:
- res.write_status(500).end("Internal Error")
+ res.write_status(500).end("Internal Error")
+
app = App()
app.get("/", home)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/forks.py b/examples/forks.py
index 3007a3e..4d26d99 100644
--- a/examples/forks.py
+++ b/examples/forks.py
@@ -2,20 +2,29 @@ from socketify import App
import os
import multiprocessing
+
def run_app():
app = App()
app.get("/", lambda res, req: res.end("Hello, World!"))
- app.listen(3000, lambda config: print("PID %d Listening on port http://localhost:%d now\n" % (os.getpid(), config.port)))
+ app.listen(
+ 3000,
+ lambda config: print(
+ "PID %d Listening on port http://localhost:%d now\n"
+ % (os.getpid(), config.port)
+ ),
+ )
app.run()
+
def create_fork():
n = os.fork()
# n greater than 0 means parent process
if not n > 0:
run_app()
+
# fork limiting the cpu count - 1
for i in range(1, multiprocessing.cpu_count()):
create_fork()
-run_app() # run app on the main process too :)
\ No newline at end of file
+run_app() # run app on the main process too :)
diff --git a/examples/graceful_shutdown.py b/examples/graceful_shutdown.py
index bdd529d..c271a30 100644
--- a/examples/graceful_shutdown.py
+++ b/examples/graceful_shutdown.py
@@ -2,14 +2,21 @@ from socketify import App, AppOptions, AppListenOptions
app = App()
+
def shutdown(res, req):
res.end("Good bye!")
app.close()
-
+
+
app.get("/", lambda res, req: res.end("Hello!"))
app.get("/shutdown", shutdown)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
+app.listen(
+ 3000,
+ lambda config: print(
+ "Listening on port http://localhost:%s now\n" % str(config.port)
+ ),
+)
app.run()
-print("App Closed!")
\ No newline at end of file
+print("App Closed!")
diff --git a/examples/graphiql.py b/examples/graphiql.py
index 2ce64d0..1295a8d 100644
--- a/examples/graphiql.py
+++ b/examples/graphiql.py
@@ -1,4 +1,3 @@
-
import dataclasses
import strawberry
import strawberry.utils.graphiql
@@ -7,10 +6,12 @@ from socketify import App
from typing import List, Optional
from helpers.graphiql import graphiql_from
+
@strawberry.type
class User:
name: str
+
@strawberry.type
class Query:
@strawberry.field
@@ -24,5 +25,8 @@ app.get("/", lambda res, req: res.end(strawberry.utils.graphiql.get_graphiql_htm
app.post("/", graphiql_from(Query))
# you can also pass an Mutation as second parameter
# app.post("/", graphiql_from(Query, Mutation))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/graphiql_raw.py b/examples/graphiql_raw.py
index cbb630d..768bd8e 100644
--- a/examples/graphiql_raw.py
+++ b/examples/graphiql_raw.py
@@ -1,4 +1,3 @@
-
import dataclasses
import strawberry
import strawberry.utils.graphiql
@@ -6,10 +5,12 @@ import strawberry.utils.graphiql
from socketify import App
from typing import List, Optional
+
@strawberry.type
class User:
name: str
+
@strawberry.type
class Query:
@strawberry.field
@@ -23,10 +24,10 @@ schema = strawberry.Schema(Query)
async def graphiql_post(res, req):
# we can pass whatever we want to context, query, headers or params, cookies etc
context_value = req.preserve()
-
+
# get all incomming data and parses as json
body = await res.get_json()
-
+
query = body["query"]
variables = body.get("variables", None)
root_value = body.get("root_value", None)
@@ -40,14 +41,20 @@ async def graphiql_post(res, req):
operation_name,
)
- res.cork_end({
- "data": ( data.data ),
- **({"errors": data.errors} if data.errors else {}),
- **({"extensions": data.extensions} if data.extensions else {})
- })
+ res.cork_end(
+ {
+ "data": (data.data),
+ **({"errors": data.errors} if data.errors else {}),
+ **({"extensions": data.extensions} if data.extensions else {}),
+ }
+ )
+
app = App()
app.get("/", lambda res, req: res.end(strawberry.utils.graphiql.get_graphiql_html()))
app.post("/", graphiql_post)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/hello_world.py b/examples/hello_world.py
index 693bb01..58dee24 100644
--- a/examples/hello_world.py
+++ b/examples/hello_world.py
@@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/helpers/graphiql.py b/examples/helpers/graphiql.py
index bce52eb..74aabcd 100644
--- a/examples/helpers/graphiql.py
+++ b/examples/helpers/graphiql.py
@@ -1,6 +1,7 @@
import strawberry
import strawberry.utils.graphiql
+
def graphiql_from(Query, Mutation=None):
if Mutation:
schema = strawberry.Schema(query=Query, mutation=Mutation)
@@ -9,10 +10,10 @@ def graphiql_from(Query, Mutation=None):
async def post(res, req):
# we can pass whatever we want to context, query, headers or params, cookies etc
- context_value = {
- "query": req.get_queries(),
- "headers": req.get_headers(),
- "params": req.get_parameters()
+ context_value = {
+ "query": req.get_queries(),
+ "headers": req.get_headers(),
+ "params": req.get_parameters(),
}
# get all incomming data and parses as json
@@ -31,9 +32,12 @@ def graphiql_from(Query, Mutation=None):
operation_name,
)
- res.cork_end({
- "data": ( data.data ),
- **({"errors": data.errors} if data.errors else {}),
- **({"extensions": data.extensions} if data.extensions else {})
- })
- return post
\ No newline at end of file
+ res.cork_end(
+ {
+ "data": (data.data),
+ **({"errors": data.errors} if data.errors else {}),
+ **({"extensions": data.extensions} if data.extensions else {}),
+ }
+ )
+
+ return post
diff --git a/examples/helpers/memory_cache.py b/examples/helpers/memory_cache.py
index dac6dc1..474d8b4 100644
--- a/examples/helpers/memory_cache.py
+++ b/examples/helpers/memory_cache.py
@@ -1,19 +1,22 @@
import datetime
+
class MemoryCacheItem:
def __init__(self, expires, value):
self.expires = datetime.datetime.utcnow().timestamp() + expires
self.value = value
+
def is_expired(self):
return datetime.datetime.utcnow().timestamp() > self.expires
+
class MemoryCache:
def __init__(self):
self.cache = {}
def setex(self, key, expires, value):
self.cache[key] = MemoryCacheItem(expires, value)
-
+
def get(self, key):
try:
cache = self.cache[key]
@@ -21,4 +24,4 @@ class MemoryCache:
return None
return cache.value
except KeyError:
- return None
\ No newline at end of file
+ return None
diff --git a/examples/helpers/static_aiofile.py b/examples/helpers/static_aiofile.py
index 9d4195d..4038696 100644
--- a/examples/helpers/static_aiofile.py
+++ b/examples/helpers/static_aiofile.py
@@ -7,45 +7,47 @@ from os import path
mimetypes.init()
# In production we highly recomend to use CDN like CloudFlare or/and NGINX or similar for static files
async def sendfile(res, req, filename):
- #read headers before the first await
- if_modified_since = req.get_header('if-modified-since')
- range_header = req.get_header('range')
+ # read headers before the first await
+ if_modified_since = req.get_header("if-modified-since")
+ range_header = req.get_header("range")
bytes_range = None
start = 0
end = -1
- #parse range header
+ # parse range header
if range_header:
- bytes_range = range_header.replace("bytes=", '').split('-')
+ bytes_range = range_header.replace("bytes=", "").split("-")
start = int(bytes_range[0])
if bytes_range[1]:
end = int(bytes_range[1])
try:
exists = path.exists(filename)
- #not found
+ # not found
if not exists:
- return res.write_status(404).end(b'Not Found')
+ return res.write_status(404).end(b"Not Found")
- #get size and last modified date
+ # get size and last modified date
stats = os.stat(filename)
total_size = stats.st_size
size = total_size
- last_modified = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(stats.st_mtime))
-
- #check if modified since is provided
+ last_modified = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)
+ )
+
+ # check if modified since is provided
if if_modified_since == last_modified:
return res.write_status(304).end_without_body()
- #tells the broswer the last modified date
- res.write_header(b'Last-Modified', last_modified)
+ # tells the broswer the last modified date
+ res.write_header(b"Last-Modified", last_modified)
- #add content type
+ # add content type
(content_type, encoding) = mimetypes.guess_type(filename, strict=True)
if content_type and encoding:
- res.write_header(b'Content-Type', '%s; %s' % (content_type, encoding))
+ res.write_header(b"Content-Type", "%s; %s" % (content_type, encoding))
elif content_type:
- res.write_header(b'Content-Type', content_type)
-
+ res.write_header(b"Content-Type", content_type)
+
async with async_open(filename, "rb") as fd:
- #check range and support it
+ # check range and support it
if start > 0 or not end == -1:
if end < 0 or end >= size:
end = size - 1
@@ -57,33 +59,34 @@ async def sendfile(res, req, filename):
else:
end = size - 1
res.write_status(200)
-
- #tells the browser that we support range
- #TODO: FIX BYTE RANGE IN ASYNC
- # res.write_header(b'Accept-Ranges', b'bytes')
+
+ # tells the browser that we support range
+ # TODO: FIX BYTE RANGE IN ASYNC
+ # res.write_header(b'Accept-Ranges', b'bytes')
# res.write_header(b'Content-Range', 'bytes %d-%d/%d' % (start, end, total_size))
-
+
pending_size = size
- #keep sending until abort or done
+ # keep sending until abort or done
while not res.aborted:
- chunk_size = 16384 #16kb chunks
+ chunk_size = 16384 # 16kb chunks
if chunk_size > pending_size:
chunk_size = pending_size
buffer = await fd.read(chunk_size)
pending_size = pending_size - chunk_size
(ok, done) = await res.send_chunk(buffer, size)
- if not ok or done: #if cannot send probably aborted
+ if not ok or done: # if cannot send probably aborted
break
except Exception as error:
- res.write_status(500).end("Internal Error")
+ res.write_status(500).end("Internal Error")
+
def in_directory(file, directory):
- #make both absolute
- directory = path.join(path.realpath(directory), '')
+ # make both absolute
+ directory = path.join(path.realpath(directory), "")
file = path.realpath(file)
- #return true, if the common prefix of both is equal to directory
- #e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
+ # return true, if the common prefix of both is equal to directory
+ # e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
return path.commonprefix([file, directory]) == directory
@@ -91,7 +94,7 @@ def static_route(app, route, directory):
def route_handler(res, req):
url = req.get_url()
res.grab_aborted_handler()
- url = url[len(route)::]
+ url = url[len(route) : :]
if url.startswith("/"):
url = url[1::]
filename = path.join(path.realpath(directory), url)
@@ -100,6 +103,7 @@ def static_route(app, route, directory):
res.write_status(404).end_without_body()
return
res.run_async(sendfile(res, req, filename))
+
if route.startswith("/"):
route = route[1::]
- app.get("%s/*" % route, route_handler)
\ No newline at end of file
+ app.get("%s/*" % route, route_handler)
diff --git a/examples/helpers/static_aiofiles.py b/examples/helpers/static_aiofiles.py
index 884f8f6..ea75d15 100644
--- a/examples/helpers/static_aiofiles.py
+++ b/examples/helpers/static_aiofiles.py
@@ -7,45 +7,47 @@ from os import path
mimetypes.init()
# In production we highly recomend to use CDN like CloudFlare or/and NGINX or similar for static files
async def sendfile(res, req, filename):
- #read headers before the first await
- if_modified_since = req.get_header('if-modified-since')
- range_header = req.get_header('range')
+ # read headers before the first await
+ if_modified_since = req.get_header("if-modified-since")
+ range_header = req.get_header("range")
bytes_range = None
start = 0
end = -1
- #parse range header
+ # parse range header
if range_header:
- bytes_range = range_header.replace("bytes=", '').split('-')
+ bytes_range = range_header.replace("bytes=", "").split("-")
start = int(bytes_range[0])
if bytes_range[1]:
end = int(bytes_range[1])
try:
exists = await os.path.exists(filename)
- #not found
+ # not found
if not exists:
- return res.write_status(404).end(b'Not Found')
+ return res.write_status(404).end(b"Not Found")
- #get size and last modified date
+ # get size and last modified date
stats = await os.stat(filename)
total_size = stats.st_size
size = total_size
- last_modified = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(stats.st_mtime))
-
- #check if modified since is provided
+ last_modified = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)
+ )
+
+ # check if modified since is provided
if if_modified_since == last_modified:
return res.write_status(304).end_without_body()
- #tells the broswer the last modified date
- res.write_header(b'Last-Modified', last_modified)
+ # tells the broswer the last modified date
+ res.write_header(b"Last-Modified", last_modified)
- #add content type
+ # add content type
(content_type, encoding) = mimetypes.guess_type(filename, strict=True)
if content_type and encoding:
- res.write_header(b'Content-Type', '%s; %s' % (content_type, encoding))
+ res.write_header(b"Content-Type", "%s; %s" % (content_type, encoding))
elif content_type:
- res.write_header(b'Content-Type', content_type)
-
+ res.write_header(b"Content-Type", content_type)
+
async with aiofiles.open(filename, "rb") as fd:
- #check range and support it
+ # check range and support it
if start > 0 or not end == -1:
if end < 0 or end >= size:
end = size - 1
@@ -57,33 +59,34 @@ async def sendfile(res, req, filename):
else:
end = size - 1
res.write_status(200)
-
- #tells the browser that we support range
- #TODO: FIX BYTE RANGE IN ASYNC
- # res.write_header(b'Accept-Ranges', b'bytes')
+
+ # tells the browser that we support range
+ # TODO: FIX BYTE RANGE IN ASYNC
+ # res.write_header(b'Accept-Ranges', b'bytes')
# res.write_header(b'Content-Range', 'bytes %d-%d/%d' % (start, end, total_size))
-
+
pending_size = size
- #keep sending until abort or done
+ # keep sending until abort or done
while not res.aborted:
- chunk_size = 16384 #16kb chunks
+ chunk_size = 16384 # 16kb chunks
if chunk_size > pending_size:
chunk_size = pending_size
buffer = await fd.read(chunk_size)
pending_size = pending_size - chunk_size
(ok, done) = await res.send_chunk(buffer, size)
- if not ok or done: #if cannot send probably aborted
+ if not ok or done: # if cannot send probably aborted
break
except Exception as error:
- res.write_status(500).end("Internal Error")
+ res.write_status(500).end("Internal Error")
+
def in_directory(file, directory):
- #make both absolute
- directory = path.join(path.realpath(directory), '')
+ # make both absolute
+ directory = path.join(path.realpath(directory), "")
file = path.realpath(file)
- #return true, if the common prefix of both is equal to directory
- #e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
+ # return true, if the common prefix of both is equal to directory
+ # e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
return path.commonprefix([file, directory]) == directory
@@ -91,7 +94,7 @@ def static_route(app, route, directory):
def route_handler(res, req):
url = req.get_url()
res.grab_aborted_handler()
- url = url[len(route)::]
+ url = url[len(route) : :]
if url.startswith("/"):
url = url[1::]
filename = path.join(path.realpath(directory), url)
@@ -100,6 +103,7 @@ def static_route(app, route, directory):
res.write_status(404).end_without_body()
return
res.run_async(sendfile(res, req, filename))
+
if route.startswith("/"):
route = route[1::]
- app.get("%s/*" % route, route_handler)
\ No newline at end of file
+ app.get("%s/*" % route, route_handler)
diff --git a/examples/helpers/templates.py b/examples/helpers/templates.py
index 698069d..71d195f 100644
--- a/examples/helpers/templates.py
+++ b/examples/helpers/templates.py
@@ -1,4 +1,4 @@
-#Simple example of mako and jinja2 template plugin for socketify.py
+# Simple example of mako and jinja2 template plugin for socketify.py
from mako.template import Template
from mako.lookup import TemplateLookup
from mako import exceptions
@@ -6,11 +6,14 @@ from mako import exceptions
from jinja2 import Environment, FileSystemLoader
-class Jinja2Template:
- def __init__(self, searchpath, encoding='utf-8', followlinks=False):
- self.env = Environment(loader=FileSystemLoader(searchpath, encoding, followlinks))
- #You can also add caching and logging strategy here if you want ;)
+class Jinja2Template:
+ def __init__(self, searchpath, encoding="utf-8", followlinks=False):
+ self.env = Environment(
+ loader=FileSystemLoader(searchpath, encoding, followlinks)
+ )
+
+ # You can also add caching and logging strategy here if you want ;)
def render(self, templatename, **kwargs):
try:
template = self.env.get_template(templatename)
@@ -18,14 +21,15 @@ class Jinja2Template:
except Exception as err:
return str(err)
+
class MakoTemplate:
def __init__(self, **options):
self.lookup = TemplateLookup(**options)
- #You can also add caching and logging strategy here if you want ;)
+ # You can also add caching and logging strategy here if you want ;)
def render(self, templatename, **kwargs):
try:
template = self.lookup.get_template(templatename)
return template.render(**kwargs)
except Exception as err:
- return exceptions.html_error_template().render()
\ No newline at end of file
+ return exceptions.html_error_template().render()
diff --git a/examples/helpers/twolevel_cache.py b/examples/helpers/twolevel_cache.py
index 43aa279..da89007 100644
--- a/examples/helpers/twolevel_cache.py
+++ b/examples/helpers/twolevel_cache.py
@@ -3,16 +3,18 @@ from .memory_cache import MemoryCache
# 2 LEVEL CACHE (Redis to share amoung worker, Memory to be much faster)
class TwoLevelCache:
- def __init__(self, redis_conection, memory_expiration_time=3, redis_expiration_time=10):
+ def __init__(
+ self, redis_conection, memory_expiration_time=3, redis_expiration_time=10
+ ):
self.memory_cache = MemoryCache()
self.redis_conection = redis_conection
self.memory_expiration_time = memory_expiration_time
self.redis_expiration_time = redis_expiration_time
- #set cache to redis and memory
+ # set cache to redis and memory
def set(self, key, data):
try:
- #never cache invalid data
+ # never cache invalid data
if data == None:
return False
self.redis_conection.setex(key, self.redis_expiration_time, data)
@@ -21,32 +23,32 @@ class TwoLevelCache:
except Exception as err:
print(err)
return False
-
+
def get(self, key):
try:
value = self.memory_cache.get(key)
if value != None:
return value
- #no memory cache so, got to redis
+ # no memory cache so, got to redis
value = self.redis_conection.get(key)
if value != None:
- #refresh memory cache to speed up
+ # refresh memory cache to speed up
self.memory_cache.setex(key, self.memory_expiration_time, data)
return value
except Exception as err:
return None
- #if more than 1 worker/request try to do this request, only one will call the Model and the others will get from cache
+ # if more than 1 worker/request try to do this request, only one will call the Model and the others will get from cache
async def run_once(self, key, timeout, executor, *args):
result = None
try:
lock = self.redis_conection.lock(f"lock-{key}", blocking_timeout=timeout)
- #wait lock (some request is yeat not finish)
+ # wait lock (some request is yeat not finish)
while lock.locked():
await asyncio.sleep(0)
try:
lock.acquire(blocking=False)
- #always check cache first
+ # always check cache first
cached = self.get(key)
if cached != None:
return cached
@@ -59,12 +61,12 @@ class TwoLevelCache:
finally:
lock.release()
except Exception as err:
- #cannot even create or release the lock
+ # cannot even create or release the lock
pass
finally:
- #if result is None, try cache one last time
+ # if result is None, try cache one last time
if result == None:
cache = self.get(key)
if cache != None:
return cache
- return result
\ No newline at end of file
+ return result
diff --git a/examples/http_request_cache.py b/examples/http_request_cache.py
index 91cf6ca..050a1dc 100644
--- a/examples/http_request_cache.py
+++ b/examples/http_request_cache.py
@@ -4,30 +4,36 @@ import aiohttp
import asyncio
from helpers.twolevel_cache import TwoLevelCache
-#create redis poll + connections
-redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
+# create redis poll + connections
+redis_pool = redis.ConnectionPool(host="localhost", port=6379, db=0)
redis_conection = redis.Redis(connection_pool=redis_pool)
# 2 LEVEL CACHE (Redis to share amoung workers, Memory to be much faster)
-# cache in memory is 30s, cache in redis is 60s duration
+# cache in memory is 30s, cache in redis is 60s duration
cache = TwoLevelCache(redis_conection, 30, 60)
###
# Model
###
+
async def get_pokemon(number):
async with aiohttp.ClientSession() as session:
- async with session.get(f'https://pokeapi.co/api/v2/pokemon/{number}') as response:
+ async with session.get(
+ f"https://pokeapi.co/api/v2/pokemon/{number}"
+ ) as response:
pokemon = await response.text()
- #cache only works with strings/bytes
- #we will not change nothing here so no needs to parse json
+ # cache only works with strings/bytes
+ # we will not change nothing here so no needs to parse json
return pokemon.encode("utf-8")
+
async def get_original_pokemons():
async with aiohttp.ClientSession() as session:
- async with session.get(f'https://pokeapi.co/api/v2/pokemon?limit=151') as response:
- #cache only works with strings/bytes
- #we will not change nothing here so no needs to parse json
+ async with session.get(
+ f"https://pokeapi.co/api/v2/pokemon?limit=151"
+ ) as response:
+ # cache only works with strings/bytes
+ # we will not change nothing here so no needs to parse json
pokemons = await response.text()
return pokemons.encode("utf-8")
@@ -36,13 +42,13 @@ async def get_original_pokemons():
# Routes
###
def list_original_pokemons(res, req):
-
- #check cache for faster response
+
+ # check cache for faster response
value = cache.get("original_pokemons")
- if value != None:
+ if value != None:
return res.end(value)
-
- #get asynchronous from Model
+
+ # get asynchronous from Model
async def get_originals():
value = await cache.run_once("original_pokemons", 5, get_original_pokemons)
res.cork_end(value)
@@ -52,28 +58,29 @@ def list_original_pokemons(res, req):
def list_pokemon(res, req):
- #get needed parameters
+ # get needed parameters
try:
number = int(req.get_parameter(0))
except:
- #invalid number
- return req.set_yield(1)
+ # invalid number
+ return req.set_yield(1)
- #check cache for faster response
+ # check cache for faster response
cache_key = f"pokemon-{number}"
value = cache.get(cache_key)
- if value != None:
+ if value != None:
return res.end(value)
- #get asynchronous from Model
+ # get asynchronous from Model
async def find_pokemon(number, res):
- #sync with redis lock to run only once
- #if more than 1 worker/request try to do this request, only one will call the Model and the others will get from cache
+ # sync with redis lock to run only once
+ # if more than 1 worker/request try to do this request, only one will call the Model and the others will get from cache
value = await cache.run_once(cache_key, 5, get_pokemon, number)
res.cork_end(value)
res.run_async(find_pokemon(number, res))
+
###
# Here i decided to use an sync first and async only if needs, but you can use async directly see ./async.py
###
@@ -81,5 +88,8 @@ app = App()
app.get("/", list_original_pokemons)
app.get("/:number", list_pokemon)
app.any("/*", lambda res, _: res.write_status(404).end("Not Found"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
app.run()
diff --git a/examples/https.py b/examples/https.py
index 265aa01..4127ffb 100644
--- a/examples/https.py
+++ b/examples/https.py
@@ -1,8 +1,17 @@
from socketify import App, AppOptions
-app = App(AppOptions(key_file_name="./misc/key.pem", cert_file_name="./misc/cert.pem", passphrase="1234"))
+app = App(
+ AppOptions(
+ key_file_name="./misc/key.pem",
+ cert_file_name="./misc/cert.pem",
+ passphrase="1234",
+ )
+)
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
-app.listen(3000, lambda config: print("Listening on port https://localhost:%d now\n" % config.port))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port https://localhost:%d now\n" % config.port),
+)
app.run()
-#openssl req -newkey rsa:2048 -new -nodes -x509 -days 3650 -passout pass:1234 -keyout ./misc/key.pem -out ./misc/cert.pem
+# openssl req -newkey rsa:2048 -new -nodes -x509 -days 3650 -passout pass:1234 -keyout ./misc/key.pem -out ./misc/cert.pem
diff --git a/examples/listen_options.py b/examples/listen_options.py
index b85be5b..12bc2bf 100644
--- a/examples/listen_options.py
+++ b/examples/listen_options.py
@@ -2,5 +2,10 @@ from socketify import App, AppListenOptions
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
-app.listen(AppListenOptions(port=3000, host="0.0.0.0"), lambda config: print("Listening on port http://%s:%d now\n" % (config.host, config.port)))
-app.run()
\ No newline at end of file
+app.listen(
+ AppListenOptions(port=3000, host="0.0.0.0"),
+ lambda config: print(
+ "Listening on port http://%s:%d now\n" % (config.host, config.port)
+ ),
+)
+app.run()
diff --git a/examples/middleware.py b/examples/middleware.py
index 3dacddd..f935b64 100644
--- a/examples/middleware.py
+++ b/examples/middleware.py
@@ -1,34 +1,42 @@
from socketify import App, middleware
-
+
+
async def get_user(authorization):
if authorization:
- #you can do something async here
- return { 'greeting': 'Hello, World' }
+ # you can do something async here
+ return {"greeting": "Hello, World"}
return None
+
async def auth(res, req, data=None):
- user = await get_user(req.get_header('authorization'))
- if not user:
+ user = await get_user(req.get_header("authorization"))
+ if not user:
res.write_status(403).end("not authorized")
- #returning Falsy in middlewares just stop the execution of the next middleware
+ # returning Falsy in middlewares just stop the execution of the next middleware
return False
- #returns extra data
+ # returns extra data
return user
+
def another_middie(res, req, data=None):
- #now we can mix sync and async and change the data here
+ # now we can mix sync and async and change the data here
if isinstance(data, dict):
- gretting = data.get('greeting', '')
- data['greeting'] = f"{gretting} from another middie ;)"
+ gretting = data.get("greeting", "")
+ data["greeting"] = f"{gretting} from another middie ;)"
return data
+
def home(res, req, user=None):
- res.cork_end(user.get('greeting', None))
+ res.cork_end(user.get("greeting", None))
+
app = App()
app.get("/", middleware(auth, another_middie, home))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
app.run()
-#You can also take a loop on MiddlewareRouter in middleware_router.py ;)
\ No newline at end of file
+# You can also take a loop on MiddlewareRouter in middleware_router.py ;)
diff --git a/examples/middleware_async.py b/examples/middleware_async.py
index 9164775..18eeced 100644
--- a/examples/middleware_async.py
+++ b/examples/middleware_async.py
@@ -1,43 +1,49 @@
from socketify import App
-#this is just an example of implementation you can just import using from socketify import middleware for an more complete version
+# this is just an example of implementation you can just import using from socketify import middleware for an more complete version
+
async def get_user(authorization):
if authorization:
- #do actually something async here
- return { 'greeting': 'Hello, World' }
+ # do actually something async here
+ return {"greeting": "Hello, World"}
return None
+
def auth(route):
- #in async query string, arguments and headers are only valid until the first await
+ # in async query string, arguments and headers are only valid until the first await
async def auth_middleware(res, req):
- #get_headers will preserve headers (and cookies) inside req, after await
- headers = req.get_headers()
- #get_parameters will preserve all params inside req after await
+ # get_headers will preserve headers (and cookies) inside req, after await
+ headers = req.get_headers()
+ # get_parameters will preserve all params inside req after await
params = req.get_parameters()
- #get queries will preserve all queries inside req after await
+ # get queries will preserve all queries inside req after await
queries = req.get_queries()
- user = await get_user(headers.get('authorization', None))
+ user = await get_user(headers.get("authorization", None))
if user:
- return route(res, req, user)
-
+ return route(res, req, user)
+
return res.write_status(403).cork_end("not authorized")
-
+
return auth_middleware
def home(res, req, user=None):
theme = req.get_query("theme_color")
theme = theme if theme else "light"
- greeting = user.get('greeting', None)
+ greeting = user.get("greeting", None)
user_id = req.get_parameter(0)
res.cork_end(f"{greeting}
theme: {theme}
id: {user_id}")
+
app = App()
app.get("/user/:id", auth(home))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
app.run()
-#curl --location --request GET 'http://localhost:3000/user/10?theme_color=dark' --header 'Authorization: Bearer 23456789'
\ No newline at end of file
+# curl --location --request GET 'http://localhost:3000/user/10?theme_color=dark' --header 'Authorization: Bearer 23456789'
diff --git a/examples/middleware_router.py b/examples/middleware_router.py
index 01696c7..49d3a76 100644
--- a/examples/middleware_router.py
+++ b/examples/middleware_router.py
@@ -1,46 +1,51 @@
from socketify import App, MiddlewareRouter, middleware
-
+
async def get_user(authorization):
if authorization:
- #you can do something async here
- return { 'greeting': 'Hello, World' }
+ # you can do something async here
+ return {"greeting": "Hello, World"}
return None
+
async def auth(res, req, data=None):
- user = await get_user(req.get_header('authorization'))
- if not user:
+ user = await get_user(req.get_header("authorization"))
+ if not user:
res.write_status(403).end("not authorized")
- #returning Falsy in middlewares just stop the execution of the next middleware
+ # returning Falsy in middlewares just stop the execution of the next middleware
return False
- #returns extra data
+ # returns extra data
return user
+
def another_middie(res, req, data=None):
- #now we can mix sync and async and change the data here
+ # now we can mix sync and async and change the data here
if isinstance(data, dict):
- gretting = data.get('greeting', '')
- data['greeting'] = f"{gretting} from another middie ;)"
+ gretting = data.get("greeting", "")
+ data["greeting"] = f"{gretting} from another middie ;)"
return data
-def home(res, req, user=None):
- res.cork_end(user.get('greeting', None))
+def home(res, req, user=None):
+ res.cork_end(user.get("greeting", None))
app = App()
-#you can use an Middleware router to add middlewares to every route you set
+# you can use an Middleware router to add middlewares to every route you set
auth_router = MiddlewareRouter(app, auth)
auth_router.get("/", home)
-#you can also mix middleware() with MiddlewareRouter
+# you can also mix middleware() with MiddlewareRouter
auth_router.get("/another", middleware(another_middie, home))
-#you can also pass multiple middlewares on the MiddlewareRouter
+# you can also pass multiple middlewares on the MiddlewareRouter
other_router = MiddlewareRouter(app, auth, another_middie)
other_router.get("/another_way", home)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/middleware_sync.py b/examples/middleware_sync.py
index 0059e06..ead6a91 100644
--- a/examples/middleware_sync.py
+++ b/examples/middleware_sync.py
@@ -1,38 +1,46 @@
from socketify import App
-#this is just an example of implementation you can just import using from socketify import middleware for an more complete version
+# this is just an example of implementation you can just import using from socketify import middleware for an more complete version
+
def middleware(*functions):
def middleware_route(res, req):
data = None
- #cicle to all middlewares
+ # cicle to all middlewares
for function in functions:
- #call middlewares
+ # call middlewares
data = function(res, req, data)
- #stops if returns Falsy
+ # stops if returns Falsy
if not data:
break
return middleware_route
-
+
+
def get_user(authorization_header):
if authorization_header:
- return { 'greeting': 'Hello, World' }
+ return {"greeting": "Hello, World"}
return None
+
def auth(res, req, data=None):
- user = get_user(req.get_header('authorization'))
- if not user:
+ user = get_user(req.get_header("authorization"))
+ if not user:
res.write_status(403).end("not authorized")
return False
- #returns extra data
+ # returns extra data
return user
+
def home(res, req, user=None):
- res.end(user.get('greeting', None))
+ res.end(user.get("greeting", None))
+
app = App()
app.get("/", middleware(auth, home))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/not_found.py b/examples/not_found.py
index 967987c..288a81b 100644
--- a/examples/not_found.py
+++ b/examples/not_found.py
@@ -2,23 +2,32 @@ from socketify import App, AppOptions, AppListenOptions
app = App()
+
async def home(res, req):
res.end("Hello, World!")
+
def user(res, req):
try:
if int(req.get_parameter(0)) == 1:
return res.end("Hello user 1!")
finally:
- #invalid user tells to go, to the next route valid route (not found)
- req.set_yield(1)
-
+ # invalid user tells to go, to the next route valid route (not found)
+ req.set_yield(1)
+
+
def not_found(res, req):
res.write_status(404).end("Not Found")
+
app.get("/", home)
app.get("/user/:user_id", user)
app.any("/*", not_found)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print(
+ "Listening on port http://localhost:%s now\n" % str(config.port)
+ ),
+)
+app.run()
diff --git a/examples/proxy.py b/examples/proxy.py
index 36ea0c4..6470767 100644
--- a/examples/proxy.py
+++ b/examples/proxy.py
@@ -1,13 +1,18 @@
from socketify import App
+
+
def home(res, req):
- res.write('
')
- res.write('Your proxied IP is: %s' % res.get_proxied_remote_address())
- res.write('
')
- res.write('Your IP as seen by the origin server is: %s' % res.get_remote_address())
- res.end('
')
+ res.write("")
+ res.write("Your proxied IP is: %s" % res.get_proxied_remote_address())
+ res.write("
")
+ res.write("Your IP as seen by the origin server is: %s" % res.get_remote_address())
+ res.end("
")
app = App()
app.get("/*", home)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/requeriments.txt b/examples/requeriments.txt
index a5df884..2f8c046 100644
--- a/examples/requeriments.txt
+++ b/examples/requeriments.txt
@@ -4,4 +4,4 @@ aiofile
redis
strawberry-graphql
mako
-git+https://github.com/cirospaciari/socketify.py.git@main#socketify --global-option="build_ext"
\ No newline at end of file
+git+https://github.com/cirospaciari/socketify.py.git@main#socketify
\ No newline at end of file
diff --git a/examples/router_and_basics.py b/examples/router_and_basics.py
index 495dcfc..678d36a 100644
--- a/examples/router_and_basics.py
+++ b/examples/router_and_basics.py
@@ -1,39 +1,48 @@
-
from socketify import App, AppOptions, AppListenOptions
import asyncio
from datetime import datetime
from datetime import timedelta
+
app = App()
+
def home(res, req):
res.end("Hello :)")
+
def anything(res, req):
- res.end("Any route with method: %s" % req.get_method())
+ res.end("Any route with method: %s" % req.get_method())
+
def cookies(res, req):
- #cookies are writen after end
- res.set_cookie("spaciari", "1234567890",{
- # expires
- # path
- # comment
- # domain
- # max-age
- # secure
- # version
- # httponly
- # samesite
- "path": "/",
- # "domain": "*.test.com",
- "httponly": True,
- "samesite": "None",
- "secure": True,
- "expires": datetime.utcnow() + timedelta(minutes=30)
- })
- res.end("Your session_id cookie is: %s" % req.get_cookie('session_id'));
+ # cookies are writen after end
+ res.set_cookie(
+ "spaciari",
+ "1234567890",
+ {
+ # expires
+ # path
+ # comment
+ # domain
+ # max-age
+ # secure
+ # version
+ # httponly
+ # samesite
+ "path": "/",
+ # "domain": "*.test.com",
+ "httponly": True,
+ "samesite": "None",
+ "secure": True,
+ "expires": datetime.utcnow() + timedelta(minutes=30),
+ },
+ )
+ res.end("Your session_id cookie is: %s" % req.get_cookie("session_id"))
+
+
+def useragent(res, req):
+ res.end("Your user agent is: %s" % req.get_header("user-agent"))
-def useragent(res,req):
- res.end("Your user agent is: %s" % req.get_header('user-agent'));
def user(res, req):
try:
@@ -44,67 +53,76 @@ def user(res, req):
finally:
# invalid user tells to go, to the next route valid route (not found)
- req.set_yield(1)
+ req.set_yield(1)
+
async def delayed_hello(delay, res):
- await asyncio.sleep(delay) #do something async
+ await asyncio.sleep(delay) # do something async
res.cork_end("Hello sorry for the delay!")
# cork_end is a less verbose way of writing
# res.cork(lambda res: res.end("Hello sorry for the delay!"))
def delayed(res, req):
- #request object only lives during the life time of this call
- #get parameters, query, headers anything you need here
+ # request object only lives during the life time of this call
+ # get parameters, query, headers anything you need here
delay = req.get_query("delay")
delay = 1 if delay == None else float(delay)
- #get queries returns an dict with all query string
+ # get queries returns an dict with all query string
# queries = req.get_queries()
- #tell response to run this in the event loop
- #abort handler is grabed here, so responses only will be send if res.aborted == False
+ # tell response to run this in the event loop
+ # abort handler is grabed here, so responses only will be send if res.aborted == False
res.run_async(delayed_hello(delay, res))
+
def json(res, req):
- #if you pass an object will auto write an header with application/json
- res.end({ "message": "I'm an application/json!"})
+ # if you pass an object will auto write an header with application/json
+ res.end({"message": "I'm an application/json!"})
+
async def sleepy_json(res, req):
- #get parameters, query, headers anything you need here before first await :)
+ # get parameters, query, headers anything you need here before first await :)
user_agent = req.get_header("user-agent")
- #print all headers
- req.for_each_header(lambda key,value: print("Header %s: %s" % (key, value)))
- #or if you want get all headers in an dict
+ # print all headers
+ req.for_each_header(lambda key, value: print("Header %s: %s" % (key, value)))
+ # or if you want get all headers in an dict
print("All headers", req.get_headers())
-
- #req maybe will not be available in direct attached async functions after await
- #but if you dont care about req info you can do it
- await asyncio.sleep(2) #do something async
- res.cork_end({ "message": "I'm delayed!", "user-agent": user_agent})
+
+ # req maybe will not be available in direct attached async functions after await
+ # but if you dont care about req info you can do it
+ await asyncio.sleep(2) # do something async
+ res.cork_end({"message": "I'm delayed!", "user-agent": user_agent})
+
def custom_header(res, req):
res.write_header("Content-Type", "application/octet-stream")
- res.write_header("Content-Disposition", "attachment; filename=\"message.txt\"")
+ res.write_header("Content-Disposition", 'attachment; filename="message.txt"')
res.end("Downloaded this ;)")
+
def send_in_parts(res, req):
- #write and end accepts bytes and str or its try to dumps to an json
+ # write and end accepts bytes and str or its try to dumps to an json
res.write("I can")
res.write(" send ")
res.write("messages")
res.end(" in parts!")
+
def redirect(res, req):
- #status code is optional default is 302
+ # status code is optional default is 302
res.redirect("/redirected", 302)
+
def redirected(res, req):
res.end("You got redirected to here :D")
+
def not_found(res, req):
res.write_status(404).end("Not Found")
+
# app.any, app.get, app.put, app.post, app.head, app.options, app.delete, app.patch, app.connect and app.trace are available
app.get("/", home)
app.any("/anything", anything)
@@ -122,5 +140,10 @@ app.get("/redirected", redirected)
# Wildcard at last always :)
app.any("/*", not_found)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print(
+ "Listening on port http://localhost:%s now\n" % str(config.port)
+ ),
+)
+app.run()
diff --git a/examples/static_files.py b/examples/static_files.py
index 854ace4..11ad95a 100644
--- a/examples/static_files.py
+++ b/examples/static_files.py
@@ -17,9 +17,9 @@
# pypy3 - socketify static_aiofile - 639.70 req/s
# pypy3 - socketify static_aiofiles - 637.55 req/s
# pypy3 - fastapi static gunicorn - 253.31 req/s
-# pypy3 - scarlette static uvicorn - 279.45 req/s
+# pypy3 - scarlette static uvicorn - 279.45 req/s
-# Conclusions:
+# Conclusions:
# With PyPy3 only static is really usable gunicorn/uvicorn, aiofiles and aiofile are realy slow on PyPy3 maybe this changes with HPy
# Python3 with any option will be faster than gunicorn/uvicorn but with PyPy3 with static we got 2x (or almost this in case of fastify) performance of node.js
# But even PyPy3 + socketify static is 7x+ slower than NGINX
@@ -35,16 +35,19 @@ from socketify import App, sendfile
app = App()
-#send home page index.html
+# send home page index.html
async def home(res, req):
- #sends the whole file with 304 and bytes range support
+ # sends the whole file with 304 and bytes range support
await sendfile(res, req, "./public/index.html")
-
+
+
app.get("/", home)
-#serve all files in public folder under /* route (you can use any route like /assets)
+# serve all files in public folder under /* route (you can use any route like /assets)
app.static("/", "./public")
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
app.run()
-
diff --git a/examples/template_jinja2.py b/examples/template_jinja2.py
index 244c572..e36610f 100644
--- a/examples/template_jinja2.py
+++ b/examples/template_jinja2.py
@@ -1,14 +1,22 @@
from socketify import App
-#see helper/templates.py for plugin implementation
+
+# see helper/templates.py for plugin implementation
from helpers.templates import Jinja2Template
app = App()
-app.template(Jinja2Template("./templates", encoding='utf-8', followlinks=False))
+app.template(Jinja2Template("./templates", encoding="utf-8", followlinks=False))
+
def home(res, req):
res.render("jinja2_home.html", title="Hello", message="Hello, World")
+
app.get("/", home)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print(
+ "Listening on port http://localhost:%s now\n" % str(config.port)
+ ),
+)
+app.run()
diff --git a/examples/template_mako.py b/examples/template_mako.py
index b097e9d..706a269 100644
--- a/examples/template_mako.py
+++ b/examples/template_mako.py
@@ -1,14 +1,26 @@
from socketify import App
-#see helper/templates.py for plugin implementation
+
+# see helper/templates.py for plugin implementation
from helpers.templates import MakoTemplate
app = App()
-app.template(MakoTemplate(directories=['./templates'], output_encoding='utf-8', encoding_errors='replace'))
+app.template(
+ MakoTemplate(
+ directories=["./templates"], output_encoding="utf-8", encoding_errors="replace"
+ )
+)
+
def home(res, req):
res.render("mako_home.html", message="Hello, World")
+
app.get("/", home)
-app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
-app.run()
\ No newline at end of file
+app.listen(
+ 3000,
+ lambda config: print(
+ "Listening on port http://localhost:%s now\n" % str(config.port)
+ ),
+)
+app.run()
diff --git a/examples/upgrade.py b/examples/upgrade.py
index b57d461..61a3fc8 100644
--- a/examples/upgrade.py
+++ b/examples/upgrade.py
@@ -1,31 +1,43 @@
from socketify import App, AppOptions, OpCode, CompressOptions
+
def ws_open(ws):
- print('A WebSocket got connected!')
+ print("A WebSocket got connected!")
ws.send("Hello World!", OpCode.TEXT)
+
def ws_message(ws, message, opcode):
print(message, opcode)
- #Ok is false if backpressure was built up, wait for drain
+ # Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
+
def ws_upgrade(res, req, socket_context):
key = req.get_header("sec-websocket-key")
protocol = req.get_header("sec-websocket-protocol")
extensions = req.get_header("sec-websocket-extensions")
res.upgrade(key, protocol, extensions, socket_context)
-
-app = App()
-app.ws("/*", {
- 'compression': CompressOptions.SHARED_COMPRESSOR,
- 'max_payload_length': 16 * 1024 * 1024,
- 'idle_timeout': 12,
- 'open': ws_open,
- 'message': ws_message,
- 'upgrade': ws_upgrade,
- 'drain': lambda ws: print('WebSocket backpressure: %s', ws.get_buffered_amount()),
- 'close': lambda ws, code, message: print('WebSocket closed')
-})
-app.any("/", lambda res,req: res.end("Nothing to see here!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
-app.run()
\ No newline at end of file
+
+
+app = App()
+app.ws(
+ "/*",
+ {
+ "compression": CompressOptions.SHARED_COMPRESSOR,
+ "max_payload_length": 16 * 1024 * 1024,
+ "idle_timeout": 12,
+ "open": ws_open,
+ "message": ws_message,
+ "upgrade": ws_upgrade,
+ "drain": lambda ws: print(
+ "WebSocket backpressure: %s", ws.get_buffered_amount()
+ ),
+ "close": lambda ws, code, message: print("WebSocket closed"),
+ },
+)
+app.any("/", lambda res, req: res.end("Nothing to see here!"))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
+)
+app.run()
diff --git a/examples/upgrade_async.py b/examples/upgrade_async.py
index b2069e4..e21d2db 100644
--- a/examples/upgrade_async.py
+++ b/examples/upgrade_async.py
@@ -1,33 +1,45 @@
from socketify import App, AppOptions, OpCode, CompressOptions
import asyncio
+
def ws_open(ws):
- print('A WebSocket got connected!')
+ print("A WebSocket got connected!")
ws.send("Hello World!", OpCode.TEXT)
+
def ws_message(ws, message, opcode):
print(message, opcode)
- #Ok is false if backpressure was built up, wait for drain
+ # Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
+
async def ws_upgrade(res, req, socket_context):
key = req.get_header("sec-websocket-key")
protocol = req.get_header("sec-websocket-protocol")
extensions = req.get_header("sec-websocket-extensions")
await asyncio.sleep(2)
res.upgrade(key, protocol, extensions, socket_context)
-
-app = App()
-app.ws("/*", {
- 'compression': CompressOptions.SHARED_COMPRESSOR,
- 'max_payload_length': 16 * 1024 * 1024,
- 'idle_timeout': 12,
- 'open': ws_open,
- 'message': ws_message,
- 'upgrade': ws_upgrade,
- 'drain': lambda ws: print('WebSocket backpressure: %s', ws.get_buffered_amount()),
- 'close': lambda ws, code, message: print('WebSocket closed')
-})
-app.any("/", lambda res,req: res.end("Nothing to see here!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
-app.run()
\ No newline at end of file
+
+
+app = App()
+app.ws(
+ "/*",
+ {
+ "compression": CompressOptions.SHARED_COMPRESSOR,
+ "max_payload_length": 16 * 1024 * 1024,
+ "idle_timeout": 12,
+ "open": ws_open,
+ "message": ws_message,
+ "upgrade": ws_upgrade,
+ "drain": lambda ws: print(
+ "WebSocket backpressure: %s", ws.get_buffered_amount()
+ ),
+ "close": lambda ws, code, message: print("WebSocket closed"),
+ },
+)
+app.any("/", lambda res, req: res.end("Nothing to see here!"))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
+)
+app.run()
diff --git a/examples/upload_or_post.py b/examples/upload_or_post.py
index 5177424..12be60a 100644
--- a/examples/upload_or_post.py
+++ b/examples/upload_or_post.py
@@ -1,77 +1,84 @@
from socketify import App
###
-# We always recomend check res.aborted in async operations
+# We always recomend check res.aborted in async operations
###
+
def upload(res, req):
print(f"Posted to {req.get_url()}")
def on_data(res, chunk, is_end):
print(f"Got chunk of data with length {len(chunk)}, is_end: {is_end}")
- if (is_end):
+ if is_end:
res.cork_end("Thanks for the data!")
res.on_data(on_data)
+
async def upload_chunks(res, req):
print(f"Posted to {req.get_url()}")
- #await all the data, returns received chunks if fail (most likely fail is aborted requests)
+ # await all the data, returns received chunks if fail (most likely fail is aborted requests)
data = await res.get_data()
-
+
print(f"Got {len(data)} chunks of data!")
for chunk in data:
print(f"Got chunk of data with length {len(chunk)}")
-
- #We respond when we are done
+
+ # We respond when we are done
res.cork_end("Thanks for the data!")
+
async def upload_json(res, req):
print(f"Posted to {req.get_url()}")
- #await all the data and parses as json, returns None if fail
+ # await all the data and parses as json, returns None if fail
people = await res.get_json()
if isinstance(people, list) and isinstance(people[0], dict):
print(f"First person is named: {people[0]['name']}")
-
- #We respond when we are done
+
+ # We respond when we are done
res.cork_end("Thanks for the data!")
+
async def upload_text(res, req):
print(f"Posted to {req.get_url()}")
- #await all the data and decode as text, returns None if fail
- text = await res.get_text() #first parameter is the encoding (default utf-8)
-
+ # await all the data and decode as text, returns None if fail
+ text = await res.get_text() # first parameter is the encoding (default utf-8)
+
print(f"Your text is ${text}")
-
- #We respond when we are done
+
+ # We respond when we are done
res.cork_end("Thanks for the data!")
+
async def upload_urlencoded(res, req):
print(f"Posted to {req.get_url()}")
- #await all the data and decode as application/x-www-form-urlencoded, returns None if fails
- form = await res.get_form_urlencoded() #first parameter is the encoding (default utf-8)
-
+ # await all the data and decode as application/x-www-form-urlencoded, returns None if fails
+ form = (
+ await res.get_form_urlencoded()
+ ) # first parameter is the encoding (default utf-8)
+
print(f"Your form is ${form}")
-
- #We respond when we are done
+
+ # We respond when we are done
res.cork_end("Thanks for the data!")
-
+
async def upload_multiple(res, req):
print(f"Posted to {req.get_url()}")
content_type = req.get_header("content-type")
- #we can check the Content-Type to accept multiple formats
+ # we can check the Content-Type to accept multiple formats
if content_type == "application/json":
data = await res.get_json()
elif content_type == "application/x-www-form-urlencoded":
data = await res.get_form_urlencoded()
else:
data = await res.get_text()
-
+
print(f"Your data is ${data}")
-
- #We respond when we are done
+
+ # We respond when we are done
res.cork_end("Thanks for the data!")
@@ -83,6 +90,9 @@ app.post("/text", upload_text)
app.post("/urlencoded", upload_urlencoded)
app.post("/multiple", upload_multiple)
-app.any("/*", lambda res,_: res.write_status(404).end("Not Found"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
-app.run()
\ No newline at end of file
+app.any("/*", lambda res, _: res.write_status(404).end("Not Found"))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
+)
+app.run()
diff --git a/examples/websockets.py b/examples/websockets.py
index 37271a0..c6ab004 100644
--- a/examples/websockets.py
+++ b/examples/websockets.py
@@ -1,24 +1,35 @@
from socketify import App, AppOptions, OpCode, CompressOptions
+
def ws_open(ws):
- print('A WebSocket got connected!')
+ print("A WebSocket got connected!")
ws.send("Hello World!", OpCode.TEXT)
+
def ws_message(ws, message, opcode):
print(message, opcode)
- #Ok is false if backpressure was built up, wait for drain
+ # Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
-
-app = App()
-app.ws("/*", {
- 'compression': CompressOptions.SHARED_COMPRESSOR,
- 'max_payload_length': 16 * 1024 * 1024,
- 'idle_timeout': 12,
- 'open': ws_open,
- 'message': ws_message,
- 'drain': lambda ws: print('WebSocket backpressure: %s', ws.get_buffered_amount()),
- 'close': lambda ws, code, message: print('WebSocket closed')
-})
-app.any("/", lambda res,req: res.end("Nothing to see here!'"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
-app.run()
\ No newline at end of file
+
+
+app = App()
+app.ws(
+ "/*",
+ {
+ "compression": CompressOptions.SHARED_COMPRESSOR,
+ "max_payload_length": 16 * 1024 * 1024,
+ "idle_timeout": 12,
+ "open": ws_open,
+ "message": ws_message,
+ "drain": lambda ws: print(
+ "WebSocket backpressure: %s", ws.get_buffered_amount()
+ ),
+ "close": lambda ws, code, message: print("WebSocket closed"),
+ },
+)
+app.any("/", lambda res, req: res.end("Nothing to see here!'"))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
+)
+app.run()
diff --git a/setup.py b/setup.py
index 80cf500..61f4be8 100644
--- a/setup.py
+++ b/setup.py
@@ -2,12 +2,13 @@ import sys
vi = sys.version_info
if vi < (3, 7):
- raise RuntimeError('socketify requires Python 3.7 or greater')
+ raise RuntimeError("socketify requires Python 3.7 or greater")
# if sys.platform in ('win32', 'cygwin', 'cli'):
# raise RuntimeError('socketify does not support Windows at the moment')
import setuptools
+
# from setuptools.command.sdist import sdist
# from setuptools.command.build_ext import build_ext
@@ -21,15 +22,13 @@ import setuptools
# UWS_DIR = str(_ROOT / "src" / "socketify" /"uWebSockets")
# UWS_BUILD_DIR = str(_ROOT / "build" /"uWebSockets")
-# NATIVE_CAPI_DIR = str(_ROOT / "build" / "native")
+# NATIVE_CAPI_DIR = str(_ROOT / "build" / "native")
# NATIVE_LIB_PATH = str(_ROOT / "build" / "libsocketify.so")
# NATIVE_DIR = str(_ROOT / "src" / "socketify" /"native")
# NATIVE_BUILD_DIR = str(_ROOT / "build" /"native")
# NATIVE_LIB_OUTPUT = str(_ROOT / "src" / "socketify" / "libsocketify.so")
-
-
# class Prepare(sdist):
# def run(self):
# super().run()
@@ -38,7 +37,7 @@ import setuptools
# class Makefile(build_ext):
# def run(self):
# env = os.environ.copy()
-
+
# if os.path.exists(UWS_BUILD_DIR):
# shutil.rmtree(UWS_BUILD_DIR)
# shutil.copytree(UWS_DIR, UWS_BUILD_DIR)
@@ -49,7 +48,7 @@ import setuptools
# subprocess.run(["make", "shared"], cwd=NATIVE_CAPI_DIR, env=env, check=True)
# shutil.move(NATIVE_LIB_PATH, NATIVE_LIB_OUTPUT)
-
+
# super().run()
@@ -60,7 +59,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
setuptools.setup(
name="socketify",
version="0.0.1",
- platforms=['macOS', 'POSIX'],
+ platforms=["macOS", "POSIX"],
author="Ciro Spaciari",
author_email="ciro.spaciari@gmail.com",
description="Fast WebSocket and Http/Https server",
@@ -77,11 +76,20 @@ setuptools.setup(
],
packages=["socketify"],
package_dir={"": "src"},
- package_data={"": ['./*.so', './uWebSockets/*','./uWebSockets/*/*','./uWebSockets/*/*/*', './native/*','./native/*/*','./native/*/*/*']},
+ package_data={
+ "": [
+ "./*.so",
+ "./uWebSockets/*",
+ "./uWebSockets/*/*",
+ "./uWebSockets/*/*/*",
+ "./native/*",
+ "./native/*/*",
+ "./native/*/*/*",
+ ]
+ },
python_requires=">=3.7",
install_requires=["cffi>=1.0.0", "setuptools>=58.1.0"],
has_ext_modules=lambda: True,
- cmdclass={}, #cmdclass={'sdist': Prepare, 'build_ext': Makefile},
-
- include_package_data=True
-)
\ No newline at end of file
+ cmdclass={}, # cmdclass={'sdist': Prepare, 'build_ext': Makefile},
+ include_package_data=True,
+)
diff --git a/src/socketify/__init__.py b/src/socketify/__init__.py
index 4267860..5d1f321 100644
--- a/src/socketify/__init__.py
+++ b/src/socketify/__init__.py
@@ -1,2 +1,9 @@
-from .socketify import App, AppOptions, AppListenOptions, OpCode, SendStatus, CompressOptions
-from .helpers import sendfile, middleware, MiddlewareRouter
\ No newline at end of file
+from .socketify import (
+ App,
+ AppOptions,
+ AppListenOptions,
+ OpCode,
+ SendStatus,
+ CompressOptions,
+)
+from .helpers import sendfile, middleware, MiddlewareRouter
diff --git a/src/socketify/helpers.py b/src/socketify/helpers.py
index 157c966..0837a23 100644
--- a/src/socketify/helpers.py
+++ b/src/socketify/helpers.py
@@ -10,45 +10,47 @@ mimetypes.init()
# This is an sync version without any dependencies is normally much faster in CPython and PyPy3
# In production we highly recomend to use CDN like CloudFlare or/and NGINX or similar for static files
async def sendfile(res, req, filename):
- #read headers before the first await
- if_modified_since = req.get_header('if-modified-since')
- range_header = req.get_header('range')
+ # read headers before the first await
+ if_modified_since = req.get_header("if-modified-since")
+ range_header = req.get_header("range")
bytes_range = None
start = 0
end = -1
- #parse range header
+ # parse range header
if range_header:
- bytes_range = range_header.replace("bytes=", '').split('-')
+ bytes_range = range_header.replace("bytes=", "").split("-")
start = int(bytes_range[0])
if bytes_range[1]:
end = int(bytes_range[1])
try:
exists = path.exists(filename)
- #not found
+ # not found
if not exists:
- return res.write_status(404).end(b'Not Found')
+ return res.write_status(404).end(b"Not Found")
- #get size and last modified date
+ # get size and last modified date
stats = os.stat(filename)
total_size = stats.st_size
size = total_size
- last_modified = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(stats.st_mtime))
-
- #check if modified since is provided
+ last_modified = time.strftime(
+ "%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)
+ )
+
+ # check if modified since is provided
if if_modified_since == last_modified:
return res.write_status(304).end_without_body()
- #tells the broswer the last modified date
- res.write_header(b'Last-Modified', last_modified)
+ # tells the broswer the last modified date
+ res.write_header(b"Last-Modified", last_modified)
- #add content type
+ # add content type
(content_type, encoding) = mimetypes.guess_type(filename, strict=True)
if content_type and encoding:
- res.write_header(b'Content-Type', '%s; %s' % (content_type, encoding))
+ res.write_header(b"Content-Type", "%s; %s" % (content_type, encoding))
elif content_type:
- res.write_header(b'Content-Type', content_type)
-
+ res.write_header(b"Content-Type", content_type)
+
with open(filename, "rb") as fd:
- #check range and support it
+ # check range and support it
if start > 0 or not end == -1:
if end < 0 or end >= size:
end = size - 1
@@ -60,32 +62,34 @@ async def sendfile(res, req, filename):
else:
end = size - 1
res.write_status(200)
-
- #tells the browser that we support range
- res.write_header(b'Accept-Ranges', b'bytes')
- res.write_header(b'Content-Range', 'bytes %d-%d/%d' % (start, end, total_size))
+
+ # tells the browser that we support range
+ res.write_header(b"Accept-Ranges", b"bytes")
+ res.write_header(
+ b"Content-Range", "bytes %d-%d/%d" % (start, end, total_size)
+ )
pending_size = size
- #keep sending until abort or done
+ # keep sending until abort or done
while not res.aborted:
- chunk_size = 16384 #16kb chunks
+ chunk_size = 16384 # 16kb chunks
if chunk_size > pending_size:
chunk_size = pending_size
buffer = fd.read(chunk_size)
pending_size = pending_size - chunk_size
(ok, done) = await res.send_chunk(buffer, size)
- if not ok or done: #if cannot send probably aborted
+ if not ok or done: # if cannot send probably aborted
break
except Exception as error:
- res.write_status(500).end("Internal Error")
+ res.write_status(500).end("Internal Error")
def in_directory(file, directory):
- #make both absolute
- directory = path.join(path.realpath(directory), '')
+ # make both absolute
+ directory = path.join(path.realpath(directory), "")
file = path.realpath(file)
- #return true, if the common prefix of both is equal to directory
- #e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
+ # return true, if the common prefix of both is equal to directory
+ # e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
return path.commonprefix([file, directory]) == directory
@@ -93,39 +97,39 @@ def static_route(app, route, directory):
def route_handler(res, req):
url = req.get_url()
res.grab_aborted_handler()
- url = url[len(route)::]
+ url = url[len(route) : :]
if url.startswith("/"):
url = url[1::]
filename = path.join(path.realpath(directory), url)
-
+
if not in_directory(filename, directory):
res.write_status(404).end_without_body()
return
res.run_async(sendfile(res, req, filename))
+
if route.startswith("/"):
route = route[1::]
app.get("%s/*" % route, route_handler)
-
def middleware(*functions):
- #we use Optional data=None at the end so you can use and middleware inside a middleware
+ # we use Optional data=None at the end so you can use and middleware inside a middleware
async def middleware_route(res, req, data=None):
some_async_as_run = False
- #cicle to all middlewares
+ # cicle to all middlewares
for function in functions:
- #detect if is coroutine or not
+ # detect if is coroutine or not
if inspect.iscoroutinefunction(function):
- #in async query string, arguments and headers are only valid until the first await
+ # in async query string, arguments and headers are only valid until the first await
if not some_async_as_run:
- #preserve queries, headers, parameters, url, full_url and method
+ # preserve queries, headers, parameters, url, full_url and method
req.preserve()
- some_async_as_run = True
+ some_async_as_run = True
data = await function(res, req, data)
else:
- #call middlewares
+ # call middlewares
data = function(res, req, data)
- #stops if returns Falsy
+ # stops if returns Falsy
if not data:
break
return data
@@ -133,7 +137,7 @@ def middleware(*functions):
return middleware_route
-class MiddlewareRouter():
+class MiddlewareRouter:
def __init__(self, app, *middlewares):
self.app = app
self.middlewares = middlewares
@@ -149,43 +153,51 @@ class MiddlewareRouter():
middies.append(handler)
self.app.post(path, middleware(*middies))
return self
+
def options(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.options(path, middleware(*middies))
return self
+
def delete(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.delete(path, middleware(*middies))
return self
+
def patch(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.patch(path, middleware(*middies))
return self
+
def put(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.put(path, middleware(*middies))
return self
+
def head(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.head(path, middleware(*middies))
return self
+
def connect(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.connect(path, middleware(*middies))
return self
+
def trace(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.trace(path, middleware(*middies))
return self
+
def any(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.any(path, middleware(*middies))
- return self
\ No newline at end of file
+ return self
diff --git a/src/socketify/loop.py b/src/socketify/loop.py
index 929dabf..66efd2e 100644
--- a/src/socketify/loop.py
+++ b/src/socketify/loop.py
@@ -1,4 +1,3 @@
-
import asyncio
import threading
import time
@@ -13,11 +12,11 @@ def future_handler(future, loop, exception_handler, response):
future.result()
return None
except Exception as error:
- if hasattr(exception_handler, '__call__'):
+ if hasattr(exception_handler, "__call__"):
exception_handler(loop, error, response)
else:
try:
- #just log in console the error to call attention
+ # just log in console the error to call attention
print("Uncaught Exception: %s" % str(error))
if response != None:
response.write_status(500).end("Internal Error")
@@ -25,14 +24,17 @@ def future_handler(future, loop, exception_handler, response):
return None
return None
+
class Loop:
def __init__(self, exception_handler=None):
self.loop = asyncio.new_event_loop()
self.uv_loop = UVLoop()
- if hasattr(exception_handler, '__call__'):
+ if hasattr(exception_handler, "__call__"):
self.exception_handler = exception_handler
- self.loop.set_exception_handler(lambda loop, context: exception_handler(loop, context, None))
+ self.loop.set_exception_handler(
+ lambda loop, context: exception_handler(loop, context, None)
+ )
else:
self.exception_handler = None
@@ -43,18 +45,17 @@ class Loop:
def set_timeout(self, timeout, callback, user_data):
return self.uv_loop.create_timer(timeout, 0, callback, user_data)
-
def create_future(self):
return self.loop.create_future()
def start(self):
self.started = True
- #run asyncio once per tick
+ # run asyncio once per tick
def tick(loop):
- #run once asyncio
+ # run once asyncio
loop.run_once_asyncio()
-
- #use check for calling asyncio once per tick
+
+ # use check for calling asyncio once per tick
self.timer = self.uv_loop.create_timer(0, 1, tick, self)
# self.timer = self.uv_loop.create_check(tick, self)
@@ -66,18 +67,17 @@ class Loop:
def run_once_asyncio(self):
# with suppress(asyncio.CancelledError):
- #run only one step
+ # run only one step
self.loop.call_soon(self.loop.stop)
self.loop.run_forever()
-
def stop(self):
- if(self.started):
+ if self.started:
self.timer.stop()
self.started = False
- #unbind run_once
- #if is still running stops
- if self.loop.is_running():
+ # unbind run_once
+ # if is still running stops
+ if self.loop.is_running():
self.loop.stop()
self.last_defer = None
@@ -85,26 +85,27 @@ class Loop:
pending = asyncio.all_tasks(self.loop)
# Run loop until tasks done
self.loop.run_until_complete(asyncio.gather(*pending))
-
- #Exposes native loop for uWS
+
+ # Exposes native loop for uWS
def get_native_loop(self):
return self.uv_loop.get_native_loop()
def run_async(self, task, response=None):
- #with run_once
+ # with run_once
future = asyncio.ensure_future(task, loop=self.loop)
- #with threads
- future.add_done_callback(lambda f: future_handler(f, self.loop, self.exception_handler, response))
- #force asyncio run once to enable req in async functions before first await
+ # with threads
+ future.add_done_callback(
+ lambda f: future_handler(f, self.loop, self.exception_handler, response)
+ )
+ # force asyncio run once to enable req in async functions before first await
self.run_once_asyncio()
- #if response != None: #set auto cork
- # response.needs_cork = True
+ # if response != None: #set auto cork
+ # response.needs_cork = True
return future
-
# if sys.version_info >= (3, 11)
# with asyncio.Runner(loop_factory=uvloop.new_event_loop) as runner:
# runner.run(main())
@@ -113,7 +114,7 @@ class Loop:
# asyncio.run(main())
-#see ./native/uv_selector.txt
+# see ./native/uv_selector.txt
# will only work on linux and macos
# class UVSelector(asyncio.SelectorEventLoop):
# def register(self, fileobj, events, data=None):
diff --git a/src/socketify/socketify.py b/src/socketify/socketify.py
index b53134c..5d92894 100644
--- a/src/socketify/socketify.py
+++ b/src/socketify/socketify.py
@@ -7,7 +7,7 @@ import json
import mimetypes
import os
from os import path
-import platform
+import platform
import signal
from threading import Thread, local, Lock
import time
@@ -19,11 +19,12 @@ from .helpers import static_route
mimetypes.init()
-
-is_python = platform.python_implementation() == 'CPython'
+
+is_python = platform.python_implementation() == "CPython"
ffi = cffi.FFI()
-ffi.cdef("""
+ffi.cdef(
+ """
struct us_socket_context_options_t {
const char *key_file_name;
@@ -260,26 +261,33 @@ bool uws_ws_publish_with_options(int ssl, uws_websocket_t *ws, const char *topic
int uws_ws_get_buffered_amount(int ssl, uws_websocket_t *ws);
size_t uws_ws_get_remote_address(int ssl, uws_websocket_t *ws, const char **dest);
size_t uws_ws_get_remote_address_as_text(int ssl, uws_websocket_t *ws, const char **dest);
-""")
+"""
+)
library_extension = "dll" if platform.system().lower() == "windows" else "so"
-library_path = os.path.join(os.path.dirname(__file__), "libsocketify_%s_%s.%s" % (platform.system().lower(), "arm64" if "arm" in platform.processor().lower() else "amd64", library_extension))
-
+library_path = os.path.join(
+ os.path.dirname(__file__),
+ "libsocketify_%s_%s.%s"
+ % (
+ platform.system().lower(),
+ "arm64" if "arm" in platform.processor().lower() else "amd64",
+ library_extension,
+ ),
+)
lib = ffi.dlopen(library_path)
-
@ffi.callback("void(const char*, size_t, void*)")
def uws_missing_server_name(hostname, hostname_length, user_data):
if not user_data == ffi.NULL:
try:
app = ffi.from_handle(user_data)
- if hostname == ffi.NULL:
+ if hostname == ffi.NULL:
data = None
else:
- data = ffi.unpack(hostname, hostname_length).decode('utf-8')
+ data = ffi.unpack(hostname, hostname_length).decode("utf-8")
handler = app._missing_server_handler
if inspect.iscoroutinefunction(handler):
@@ -287,7 +295,10 @@ def uws_missing_server_name(hostname, hostname_length, user_data):
else:
handler(data)
except Exception as err:
- print("Uncaught Exception: %s" % str(err)) #just log in console the error to call attention
+ print(
+ "Uncaught Exception: %s" % str(err)
+ ) # just log in console the error to call attention
+
@ffi.callback("void(uws_websocket_t*, void*)")
def uws_websocket_drain_handler(ws, user_data):
@@ -301,12 +312,15 @@ def uws_websocket_drain_handler(ws, user_data):
else:
handler(ws)
except Exception as err:
- print("Uncaught Exception: %s" % str(err)) #just log in console the error to call attention
+ print(
+ "Uncaught Exception: %s" % str(err)
+ ) # just log in console the error to call attention
+
@ffi.callback("void(uws_websocket_t*, void*)")
def uws_websocket_open_handler(ws, user_data):
-
- if not user_data == ffi.NULL:
+
+ if not user_data == ffi.NULL:
try:
(handlers, app) = ffi.from_handle(user_data)
ws = WebSocket(ws, app.SSL, app.loop)
@@ -316,7 +330,10 @@ def uws_websocket_open_handler(ws, user_data):
else:
handler(ws)
except Exception as err:
- print("Uncaught Exception: %s" % str(err)) #just log in console the error to call attention
+ print(
+ "Uncaught Exception: %s" % str(err)
+ ) # just log in console the error to call attention
+
@ffi.callback("void(uws_websocket_t*, const char*, size_t, uws_opcode_t, void*)")
def uws_websocket_message_handler(ws, message, length, opcode, user_data):
@@ -324,14 +341,14 @@ def uws_websocket_message_handler(ws, message, length, opcode, user_data):
try:
(handlers, app) = ffi.from_handle(user_data)
ws = WebSocket(ws, app.SSL, app.loop)
-
- if message == ffi.NULL:
+
+ if message == ffi.NULL:
data = None
else:
data = ffi.unpack(message, length)
opcode = OpCode(opcode)
if opcode == OpCode.TEXT:
- data = data.decode('utf-8')
+ data = data.decode("utf-8")
handler = handlers.message
if inspect.iscoroutinefunction(handler):
@@ -340,7 +357,10 @@ def uws_websocket_message_handler(ws, message, length, opcode, user_data):
handler(ws, data, opcode)
except Exception as err:
- print("Uncaught Exception: %s" % str(err)) #just log in console the error to call attention
+ print(
+ "Uncaught Exception: %s" % str(err)
+ ) # just log in console the error to call attention
+
@ffi.callback("void(uws_websocket_t*, const char*, size_t, void*)")
def uws_websocket_pong_handler(ws, message, length, user_data):
@@ -348,11 +368,11 @@ def uws_websocket_pong_handler(ws, message, length, user_data):
try:
(handlers, app) = ffi.from_handle(user_data)
ws = WebSocket(ws, app.SSL, app.loop)
- if message == ffi.NULL:
+ if message == ffi.NULL:
data = None
else:
data = ffi.unpack(message, length)
-
+
handler = handlers.pong
if inspect.iscoroutinefunction(handler):
app.run_async(handler(ws, data))
@@ -360,20 +380,23 @@ def uws_websocket_pong_handler(ws, message, length, user_data):
handler(ws, data)
except Exception as err:
- print("Uncaught Exception: %s" % str(err)) #just log in console the error to call attention
+ print(
+ "Uncaught Exception: %s" % str(err)
+ ) # just log in console the error to call attention
+
@ffi.callback("void(uws_websocket_t*, const char*, size_t, void*)")
-def uws_websocket_ping_handler(ws, message,length, user_data):
+def uws_websocket_ping_handler(ws, message, length, user_data):
if not user_data == ffi.NULL:
try:
(handlers, app) = ffi.from_handle(user_data)
ws = WebSocket(ws, app.SSL, app.loop)
-
- if message == ffi.NULL:
+
+ if message == ffi.NULL:
data = None
else:
data = ffi.unpack(message, length)
-
+
handler = handlers.ping
if inspect.iscoroutinefunction(handler):
app.run_async(handler(ws, data))
@@ -381,7 +404,9 @@ def uws_websocket_ping_handler(ws, message,length, user_data):
handler(ws, data)
except Exception as err:
- print("Uncaught Exception: %s" % str(err)) #just log in console the error to call attention
+ print(
+ "Uncaught Exception: %s" % str(err)
+ ) # just log in console the error to call attention
@ffi.callback("void(uws_websocket_t*, int, const char*, size_t, void*)")
@@ -389,26 +414,27 @@ def uws_websocket_close_handler(ws, code, message, length, user_data):
if not user_data == ffi.NULL:
try:
(handlers, app) = ffi.from_handle(user_data)
- #pass to free data on WebSocket if needed
+ # pass to free data on WebSocket if needed
ws = WebSocket(ws, app.SSL, app.loop)
- if message == ffi.NULL:
+ if message == ffi.NULL:
data = None
else:
data = ffi.unpack(message, length)
-
+
handler = handlers.close
-
+
if handler is None:
return
-
if inspect.iscoroutinefunction(handler):
future = app.run_async(handler(ws, int(code), data))
+
def when_finished(_):
key = ws.get_user_data_uuid()
if not key is None:
SocketRefs.pop(key, None)
+
future.add_done_callback(when_finished)
else:
handler(ws, int(code), data)
@@ -416,26 +442,29 @@ def uws_websocket_close_handler(ws, code, message, length, user_data):
if not key is None:
SocketRefs.pop(key, None)
-
except Exception as err:
- print("Uncaught Exception: %s" % str(err)) #just log in console the error to call attention
+ print(
+ "Uncaught Exception: %s" % str(err)
+ ) # just log in console the error to call attention
@ffi.callback("void(uws_res_t*, uws_req_t*, uws_socket_context_t*, void*)")
def uws_websocket_upgrade_handler(res, req, context, user_data):
if not user_data == ffi.NULL:
- try:
+ try:
(handlers, app) = ffi.from_handle(user_data)
response = AppResponse(res, app.loop, app.SSL, app._template)
- request = AppRequest(req)
+ request = AppRequest(req)
handler = handlers.upgrade
if inspect.iscoroutinefunction(handler):
- app.run_async(handler(response, request, context))
+ response.run_async(handler(response, request, context))
else:
handler(response, request, context)
except Exception as err:
- print("Uncaught Exception: %s" % str(err)) #just log in console the error to call attention
+ print(
+ "Uncaught Exception: %s" % str(err)
+ ) # just log in console the error to call attention
@ffi.callback("void(const char*, size_t, void*)")
@@ -445,19 +474,22 @@ def uws_req_for_each_topic_handler(topic, topic_size, user_data):
ws = ffi.from_handle(user_data)
header_name = ffi.unpack(topic, topic_size).decode("utf-8")
ws.trigger_for_each_topic_handler(header_name, header_value)
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return
+
@ffi.callback("void(const char*, size_t, const char*, size_t, void*)")
-def uws_req_for_each_header_handler(header_name, header_name_size, header_value, header_value_size, user_data):
+def uws_req_for_each_header_handler(
+ header_name, header_name_size, header_value, header_value_size, user_data
+):
if not user_data == ffi.NULL:
try:
- req = ffi.from_handle(user_data)
+ req = ffi.from_handle(user_data)
header_name = ffi.unpack(header_name, header_name_size).decode("utf-8")
header_value = ffi.unpack(header_value, header_value_size).decode("utf-8")
-
+
req.trigger_for_each_header_handler(header_name, header_value)
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return
@@ -486,9 +518,20 @@ def uws_generic_listen_handler(listen_socket, config, user_data):
if not user_data == ffi.NULL:
app = ffi.from_handle(user_data)
config.port = lib.us_socket_local_port(app.SSL, listen_socket)
- if hasattr(app, "_listen_handler") and hasattr(app._listen_handler, '__call__'):
+ if hasattr(app, "_listen_handler") and hasattr(app._listen_handler, "__call__"):
app.socket = listen_socket
- app._listen_handler(None if config == ffi.NULL else AppListenOptions(port=int(config.port),host=None if config.host == ffi.NULL else ffi.string(config.host).decode("utf-8"), options=int(config.options)))
+ app._listen_handler(
+ None
+ if config == ffi.NULL
+ else AppListenOptions(
+ port=int(config.port),
+ host=None
+ if config.host == ffi.NULL
+ else ffi.string(config.host).decode("utf-8"),
+ options=int(config.options),
+ )
+ )
+
@ffi.callback("void(uws_res_t*, void*)")
def uws_generic_aborted_handler(response, user_data):
@@ -498,21 +541,24 @@ def uws_generic_aborted_handler(response, user_data):
res.trigger_aborted()
except:
pass
+
+
@ffi.callback("void(uws_res_t*, const char*, size_t, bool, void*)")
def uws_generic_on_data_handler(res, chunk, chunk_length, is_end, user_data):
if not user_data == ffi.NULL:
res = ffi.from_handle(user_data)
- if chunk == ffi.NULL:
+ if chunk == ffi.NULL:
data = None
else:
data = ffi.unpack(chunk, chunk_length)
-
+
res.trigger_data_handler(data, bool(is_end))
+
@ffi.callback("bool(uws_res_t*, uintmax_t, void*)")
def uws_generic_on_writable_handler(res, offset, user_data):
if not user_data == ffi.NULL:
- res = ffi.from_handle(user_data)
+ res = ffi.from_handle(user_data)
result = res.trigger_writable_handler(offset)
return result
return False
@@ -521,58 +567,58 @@ def uws_generic_on_writable_handler(res, offset, user_data):
@ffi.callback("void(uws_res_t*, void*)")
def uws_generic_cork_handler(res, user_data):
if not user_data == ffi.NULL:
- response = ffi.from_handle(user_data)
+ response = ffi.from_handle(user_data)
try:
if inspect.iscoroutinefunction(response._cork_handler):
raise RuntimeError("Calls inside cork must be sync")
response._cork_handler(response)
except Exception as err:
- print("Error on cork handler %s" % str(err))
-
+ print("Error on cork handler %s" % str(err))
+
@ffi.callback("void(void*)")
def uws_ws_cork_handler(user_data):
if not user_data == ffi.NULL:
- ws = ffi.from_handle(user_data)
+ ws = ffi.from_handle(user_data)
try:
if inspect.iscoroutinefunction(ws._cork_handler):
raise RuntimeError("Calls inside cork must be sync")
ws._cork_handler(ws)
except Exception as err:
- print("Error on cork handler %s" % str(err))
-
+ print("Error on cork handler %s" % str(err))
+
# Compressor mode is 8 lowest bits where HIGH4(windowBits), LOW4(memLevel).
# Decompressor mode is 8 highest bits LOW4(windowBits).
# If compressor or decompressor bits are 1, then they are shared.
# If everything is just simply 0, then everything is disabled.
class CompressOptions(IntEnum):
- #Disabled, shared, shared are "special" values
- DISABLED = lib.DISABLED
- SHARED_COMPRESSOR = lib.SHARED_COMPRESSOR
- SHARED_DECOMPRESSOR = lib.SHARED_DECOMPRESSOR
- #Highest 4 bits describe decompressor
- DEDICATED_DECOMPRESSOR_32KB = lib.DEDICATED_DECOMPRESSOR_32KB
- DEDICATED_DECOMPRESSOR_16KB = lib.DEDICATED_DECOMPRESSOR_16KB
- DEDICATED_DECOMPRESSOR_8KB = lib.DEDICATED_DECOMPRESSOR_8KB
- DEDICATED_DECOMPRESSOR_4KB = lib.DEDICATED_DECOMPRESSOR_4KB
- DEDICATED_DECOMPRESSOR_2KB = lib.DEDICATED_DECOMPRESSOR_2KB
- DEDICATED_DECOMPRESSOR_1KB = lib.DEDICATED_DECOMPRESSOR_1KB
- DEDICATED_DECOMPRESSOR_512B = lib.DEDICATED_DECOMPRESSOR_512B
- #Same as 32kb
- DEDICATED_DECOMPRESSOR = lib.DEDICATED_DECOMPRESSOR,
+ # Disabled, shared, shared are "special" values
+ DISABLED = lib.DISABLED
+ SHARED_COMPRESSOR = lib.SHARED_COMPRESSOR
+ SHARED_DECOMPRESSOR = lib.SHARED_DECOMPRESSOR
+ # Highest 4 bits describe decompressor
+ DEDICATED_DECOMPRESSOR_32KB = lib.DEDICATED_DECOMPRESSOR_32KB
+ DEDICATED_DECOMPRESSOR_16KB = lib.DEDICATED_DECOMPRESSOR_16KB
+ DEDICATED_DECOMPRESSOR_8KB = lib.DEDICATED_DECOMPRESSOR_8KB
+ DEDICATED_DECOMPRESSOR_4KB = lib.DEDICATED_DECOMPRESSOR_4KB
+ DEDICATED_DECOMPRESSOR_2KB = lib.DEDICATED_DECOMPRESSOR_2KB
+ DEDICATED_DECOMPRESSOR_1KB = lib.DEDICATED_DECOMPRESSOR_1KB
+ DEDICATED_DECOMPRESSOR_512B = lib.DEDICATED_DECOMPRESSOR_512B
+ # Same as 32kb
+ DEDICATED_DECOMPRESSOR = (lib.DEDICATED_DECOMPRESSOR,)
- #Lowest 8 bit describe compressor
- DEDICATED_COMPRESSOR_3KB = lib.DEDICATED_COMPRESSOR_3KB
- DEDICATED_COMPRESSOR_4KB = lib.DEDICATED_COMPRESSOR_4KB
- DEDICATED_COMPRESSOR_8KB = lib.DEDICATED_COMPRESSOR_8KB
- DEDICATED_COMPRESSOR_16KB = lib.DEDICATED_COMPRESSOR_16KB
- DEDICATED_COMPRESSOR_32KB = lib.DEDICATED_COMPRESSOR_32KB
- DEDICATED_COMPRESSOR_64KB = lib.DEDICATED_COMPRESSOR_64KB
- DEDICATED_COMPRESSOR_128KB = lib.DEDICATED_COMPRESSOR_128KB
- DEDICATED_COMPRESSOR_256KB = lib.DEDICATED_COMPRESSOR_256KB
- #Same as 256kb
- DEDICATED_COMPRESSOR = lib.DEDICATED_COMPRESSOR
+ # Lowest 8 bit describe compressor
+ DEDICATED_COMPRESSOR_3KB = lib.DEDICATED_COMPRESSOR_3KB
+ DEDICATED_COMPRESSOR_4KB = lib.DEDICATED_COMPRESSOR_4KB
+ DEDICATED_COMPRESSOR_8KB = lib.DEDICATED_COMPRESSOR_8KB
+ DEDICATED_COMPRESSOR_16KB = lib.DEDICATED_COMPRESSOR_16KB
+ DEDICATED_COMPRESSOR_32KB = lib.DEDICATED_COMPRESSOR_32KB
+ DEDICATED_COMPRESSOR_64KB = lib.DEDICATED_COMPRESSOR_64KB
+ DEDICATED_COMPRESSOR_128KB = lib.DEDICATED_COMPRESSOR_128KB
+ DEDICATED_COMPRESSOR_256KB = lib.DEDICATED_COMPRESSOR_256KB
+ # Same as 256kb
+ DEDICATED_COMPRESSOR = lib.DEDICATED_COMPRESSOR
class OpCode(IntEnum):
@@ -583,18 +629,21 @@ class OpCode(IntEnum):
PING = 9
PONG = 10
+
class SendStatus(IntEnum):
BACKPRESSURE = 0
SUCCESS = 1
DROPPED = 2
-
-#dict to keep socket data alive until closed if needed
+
+
+# dict to keep socket data alive until closed if needed
SocketRefs = {}
+
class WebSocket:
def __init__(self, websocket, ssl, loop):
self.ws = websocket
- self.SSL = ssl
+ self.SSL = ssl
self._ptr = ffi.new_handle(self)
self.loop = loop
self._cork_handler = None
@@ -603,17 +652,20 @@ class WebSocket:
self.socket_data = None
self.got_socket_data = False
-
def trigger_for_each_topic_handler(self, topic):
- if hasattr(self, "_for_each_topic_handler") and hasattr(self._for_each_topic_handler, '__call__'):
+ if hasattr(self, "_for_each_topic_handler") and hasattr(
+ self._for_each_topic_handler, "__call__"
+ ):
try:
if inspect.iscoroutinefunction(self._for_each_topic_handler):
- raise RuntimeError("WebSocket.for_each_topic_handler must be synchronous")
+ raise RuntimeError(
+ "WebSocket.for_each_topic_handler must be synchronous"
+ )
self._for_each_topic_handler(topic)
except Exception as err:
- print("Error on for each topic handler %s" % str(err))
+ print("Error on for each topic handler %s" % str(err))
- #uuid for socket data, used to free data after socket closes
+ # uuid for socket data, used to free data after socket closes
def get_user_data_uuid(self):
try:
if self.got_socket_data:
@@ -643,7 +695,7 @@ class WebSocket:
return data
except:
return None
-
+
def get_buffered_amount(self):
return int(lib.uws_ws_get_buffered_amount(self.SSL, self.ws))
@@ -700,16 +752,28 @@ class WebSocket:
elif isinstance(message, bytes):
data = message
elif message == None:
- data = b''
+ data = b""
else:
data = json.dumps(message).encode("utf-8")
- return bool(lib.uws_ws_publish_with_options(self.SSL, self.ws, topic_data, len(topic_data),data, len(data), int(opcode), bool(compress)))
+ return bool(
+ lib.uws_ws_publish_with_options(
+ self.SSL,
+ self.ws,
+ topic_data,
+ len(topic_data),
+ data,
+ len(data),
+ int(opcode),
+ bool(compress),
+ )
+ )
except:
return False
-
+
def get_topics(self):
topics = []
+
def copy_topics(topic):
topics.append(value)
@@ -718,28 +782,30 @@ class WebSocket:
def for_each_topic(self, handler):
self._for_each_topic_handler = handler
- lib.uws_ws_iterate_topics(self.SSL, self.ws, uws_req_for_each_topic_handler, self._ptr)
+ lib.uws_ws_iterate_topics(
+ self.SSL, self.ws, uws_req_for_each_topic_handler, self._ptr
+ )
def get_remote_address_bytes(self):
buffer = ffi.new("char**")
- length = lib.uws_ws_get_remote_address(self.SSL, self.ws, buffer)
+ length = lib.uws_ws_get_remote_address(self.SSL, self.ws, buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
return ffi.unpack(buffer_address, length)
- except Exception: #invalid
+ except Exception: # invalid
return None
def get_remote_address(self):
buffer = ffi.new("char**")
- length = lib.uws_ws_get_remote_address_as_text(self.SSL, self.ws, buffer)
+ length = lib.uws_ws_get_remote_address_as_text(self.SSL, self.ws, buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
return ffi.unpack(buffer_address, length).decode("utf-8")
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return None
def send_fragment(self, message, compress=False):
@@ -749,12 +815,14 @@ class WebSocket:
elif isinstance(message, bytes):
data = message
elif message == None:
- lib.uws_ws_send_fragment(self.SSL, self.ws, b'', 0, compress)
+ lib.uws_ws_send_fragment(self.SSL, self.ws, b"", 0, compress)
return self
else:
data = json.dumps(message).encode("utf-8")
- return SendStatus(lib.uws_ws_send_fragment(self.SSL, self.ws, data, len(data),compress))
+ return SendStatus(
+ lib.uws_ws_send_fragment(self.SSL, self.ws, data, len(data), compress)
+ )
except:
return None
@@ -765,15 +833,19 @@ class WebSocket:
elif isinstance(message, bytes):
data = message
elif message == None:
- lib.uws_ws_send_last_fragment(self.SSL, self.ws, b'', 0, compress)
+ lib.uws_ws_send_last_fragment(self.SSL, self.ws, b"", 0, compress)
return self
else:
data = json.dumps(message).encode("utf-8")
- return SendStatus(lib.uws_ws_send_last_fragment(self.SSL, self.ws, data, len(data),compress))
+ return SendStatus(
+ lib.uws_ws_send_last_fragment(
+ self.SSL, self.ws, data, len(data), compress
+ )
+ )
except:
return None
-
+
def send_first_fragment(self, message, opcode=OpCode.BINARY, compress=False):
try:
if isinstance(message, str):
@@ -781,12 +853,18 @@ class WebSocket:
elif isinstance(message, bytes):
data = message
elif message == None:
- lib.uws_ws_send_first_fragment_with_opcode(self.SSL, self.ws, b'', 0, int(opcode), compress)
+ lib.uws_ws_send_first_fragment_with_opcode(
+ self.SSL, self.ws, b"", 0, int(opcode), compress
+ )
return self
else:
data = json.dumps(message).encode("utf-8")
- return SendStatus(lib.uws_ws_send_first_fragment_with_opcode(self.SSL, self.ws, data, len(data), int(opcode), compress))
+ return SendStatus(
+ lib.uws_ws_send_first_fragment_with_opcode(
+ self.SSL, self.ws, data, len(data), int(opcode), compress
+ )
+ )
except:
return None
@@ -801,29 +879,35 @@ class WebSocket:
elif isinstance(message, bytes):
data = message
elif message == None:
- lib.uws_ws_send_with_options(self.SSL, self.ws, b'', 0, int(opcode), compress, fin)
+ lib.uws_ws_send_with_options(
+ self.SSL, self.ws, b"", 0, int(opcode), compress, fin
+ )
return self
else:
data = json.dumps(message).encode("utf-8")
- return SendStatus(lib.uws_ws_send_with_options(self.SSL, self.ws, data, len(data), int(opcode), compress, fin))
+ return SendStatus(
+ lib.uws_ws_send_with_options(
+ self.SSL, self.ws, data, len(data), int(opcode), compress, fin
+ )
+ )
except:
return None
def cork_end(self, code=0, message=None):
self.cork(lambda ws: ws.end(message, code, message))
return self
-
+
def end(self, code=0, message=None):
try:
if not isinstance(code, int):
- raise RuntimeError("code must be an int")
+ raise RuntimeError("code must be an int")
if isinstance(message, str):
data = message.encode("utf-8")
elif isinstance(message, bytes):
data = message
elif message == None:
- lib.uws_ws_end(self.SSL, self.ws, b'', 0)
+ lib.uws_ws_end(self.SSL, self.ws, b"", 0)
return self
else:
data = json.dumps(message).encode("utf-8")
@@ -831,6 +915,7 @@ class WebSocket:
lib.uws_ws_end(self.SSL, self.ws, code, data, len(data))
finally:
return self
+
def close(self):
lib.uws_ws_close(self.SSL, self._ptr)
return self
@@ -843,6 +928,7 @@ class WebSocket:
self.ws = ffi.NULL
self._ptr = ffi.NULL
+
class WSBehaviorHandlers:
def __init__(self):
self.upgrade = None
@@ -853,6 +939,7 @@ class WSBehaviorHandlers:
self.pong = None
self.close = None
+
class AppRequest:
def __init__(self, request):
self.req = request
@@ -867,12 +954,11 @@ class AppRequest:
self._full_url = None
self._method = None
-
def get_cookie(self, name):
if self.read_jar == None:
if self.jar_parsed:
return None
-
+
if self._headers:
raw_cookies = self._headers.get("cookie", None)
else:
@@ -893,58 +979,61 @@ class AppRequest:
if self._url:
return self._url
buffer = ffi.new("char**")
- length = lib.uws_req_get_url(self.req, buffer)
+ length = lib.uws_req_get_url(self.req, buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
self._url = ffi.unpack(buffer_address, length).decode("utf-8")
return self._url
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return None
def get_full_url(self):
if self._full_url:
return self._full_url
buffer = ffi.new("char**")
- length = lib.uws_req_get_full_url(self.req, buffer)
+ length = lib.uws_req_get_full_url(self.req, buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
self._full_url = ffi.unpack(buffer_address, length).decode("utf-8")
return self._full_url
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return None
-
def get_method(self):
if self._method:
return self._method
buffer = ffi.new("char**")
- #will use uws_req_get_case_sensitive_method until version v21 and switch back to uws_req_get_method for 0 impacts on behavior
- length = lib.uws_req_get_case_sensitive_method(self.req, buffer)
+ # will use uws_req_get_case_sensitive_method until version v21 and switch back to uws_req_get_method for 0 impacts on behavior
+ length = lib.uws_req_get_case_sensitive_method(self.req, buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
-
+
try:
self._method = ffi.unpack(buffer_address, length).decode("utf-8")
return self._method
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return None
def for_each_header(self, handler):
self._for_each_header_handler = handler
- lib.uws_req_for_each_header(self.req, uws_req_for_each_header_handler, self._ptr)
-
+ lib.uws_req_for_each_header(
+ self.req, uws_req_for_each_header_handler, self._ptr
+ )
+
def get_headers(self):
if not self._headers is None:
return self._headers
-
+
self._headers = {}
+
def copy_headers(key, value):
self._headers[key] = value
+
self.for_each_header(copy_headers)
return self._headers
@@ -957,22 +1046,22 @@ class AppRequest:
data = json.dumps(lower_case_header).encode("utf-8")
buffer = ffi.new("char**")
- length = lib.uws_req_get_header(self.req, data, len(data), buffer)
+ length = lib.uws_req_get_header(self.req, data, len(data), buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
return ffi.unpack(buffer_address, length).decode("utf-8")
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return None
-
+
def get_queries(self):
try:
if self._query:
return self._query
url = self.get_url()
- query = self.get_full_url()[len(url):]
+ query = self.get_full_url()[len(url) :]
if full_url.startswith("?"):
query = query[1:]
self._query = parse_qs(query, encoding="utf-8")
@@ -985,7 +1074,7 @@ class AppRequest:
if self._query:
return self._query.get(key, None)
buffer = ffi.new("char**")
-
+
if isinstance(key, str):
key_data = key.encode("utf-8")
elif isinstance(key, bytes):
@@ -993,13 +1082,13 @@ class AppRequest:
else:
key_data = json.dumps(key).encode("utf-8")
- length = lib.uws_req_get_query(self.req, key_data, len(key_data), buffer)
+ length = lib.uws_req_get_query(self.req, key_data, len(key_data), buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
return ffi.unpack(buffer_address, length).decode("utf-8")
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return None
def get_parameters(self):
@@ -1024,17 +1113,20 @@ class AppRequest:
return None
buffer = ffi.new("char**")
- length = lib.uws_req_get_parameter(self.req, ffi.cast("unsigned short", index), buffer)
+ length = lib.uws_req_get_parameter(
+ self.req, ffi.cast("unsigned short", index), buffer
+ )
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
return ffi.unpack(buffer_address, length).decode("utf-8")
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return None
+
def preserve(self):
- #preserve queries, headers, parameters, method, url and full url
- self.get_queries() #queries calls url and full_url so its preserved
+ # preserve queries, headers, parameters, method, url and full url
+ self.get_queries() # queries calls url and full_url so its preserved
self.get_headers()
self.get_parameters()
self.get_method()
@@ -1042,24 +1134,33 @@ class AppRequest:
def set_yield(self, has_yield):
lib.uws_req_set_field(self.req, 1 if has_yield else 0)
+
def get_yield(self):
return bool(lib.uws_req_get_yield(self.req))
+
def is_ancient(self):
return bool(lib.uws_req_is_ancient(self.req))
+
def trigger_for_each_header_handler(self, key, value):
- if hasattr(self, "_for_each_header_handler") and hasattr(self._for_each_header_handler, '__call__'):
+ if hasattr(self, "_for_each_header_handler") and hasattr(
+ self._for_each_header_handler, "__call__"
+ ):
try:
if inspect.iscoroutinefunction(self._for_each_header_handler):
- raise RuntimeError("AppResponse.for_each_header_handler must be synchronous")
+ raise RuntimeError(
+ "AppResponse.for_each_header_handler must be synchronous"
+ )
self._for_each_header_handler(key, value)
except Exception as err:
- print("Error on data handler %s" % str(err))
+ print("Error on data handler %s" % str(err))
return self
+
def __del__(self):
self.req = ffi.NULL
self._ptr = ffi.NULL
+
class AppResponse:
def __init__(self, response, loop, ssl, render=None):
self.res = response
@@ -1084,8 +1185,7 @@ class AppResponse:
self.grab_aborted_handler()
self._cork_handler = callback
lib.uws_res_cork(self.SSL, self.res, uws_generic_cork_handler, self._ptr)
-
-
+
def set_cookie(self, name, value, options={}):
if self._write_jar == None:
self._write_jar = cookies.SimpleCookie()
@@ -1093,7 +1193,9 @@ class AppResponse:
if isinstance(options, dict):
for key in options:
if key == "expires" and isinstance(options[key], datetime):
- self._write_jar[name][key] = options[key].strftime("%a, %d %b %Y %H:%M:%S GMT")
+ self._write_jar[name][key] = options[key].strftime(
+ "%a, %d %b %Y %H:%M:%S GMT"
+ )
else:
self._write_jar[name][key] = options[key]
@@ -1101,112 +1203,123 @@ class AppResponse:
self.aborted = True
self._ptr = ffi.NULL
self.res = ffi.NULL
- if hasattr(self, "_aborted_handler") and hasattr(self._aborted_handler, '__call__'):
- try:
+ if hasattr(self, "_aborted_handler") and hasattr(
+ self._aborted_handler, "__call__"
+ ):
+ try:
if inspect.iscoroutinefunction(self._aborted_handler):
self.run_async(self._aborted_handler(self))
else:
self._aborted_handler(self)
except Exception as err:
- print("Error on abort handler %s" % str(err))
+ print("Error on abort handler %s" % str(err))
return self
-
+
def trigger_data_handler(self, data, is_end):
if self.aborted:
return self
- if hasattr(self, "_data_handler") and hasattr(self._data_handler, '__call__'):
+ if hasattr(self, "_data_handler") and hasattr(self._data_handler, "__call__"):
try:
if inspect.iscoroutinefunction(self._data_handler):
self.run_async(self._data_handler(self, data, is_end))
else:
self._data_handler(self, data, is_end)
except Exception as err:
- print("Error on data handler %s" % str(err))
+ print("Error on data handler %s" % str(err))
return self
-
+
def trigger_writable_handler(self, offset):
if self.aborted:
return False
- if hasattr(self, "_writable_handler") and hasattr(self._writable_handler, '__call__'):
+ if hasattr(self, "_writable_handler") and hasattr(
+ self._writable_handler, "__call__"
+ ):
try:
if inspect.iscoroutinefunction(self._writable_handler):
raise RuntimeError("AppResponse.on_writable must be synchronous")
return self._writable_handler(self, offset)
except Exception as err:
- print("Error on writable handler %s" % str(err))
+ print("Error on writable handler %s" % str(err))
return False
return False
-
+
def run_async(self, task):
self.grab_aborted_handler()
return self.loop.run_async(task, self)
- async def get_form_urlencoded(self, encoding='utf-8'):
+ async def get_form_urlencoded(self, encoding="utf-8"):
data = await self.get_data()
- try:
- #decode and unquote all
+ try:
+ # decode and unquote all
result = {}
- parsed = parse_qs(b''.join(data), encoding=encoding)
+ parsed = parse_qs(b"".join(data), encoding=encoding)
has_value = False
for key in parsed:
has_value = True
try:
value = parsed[key]
new_key = key.decode(encoding)
- last_value = value[len(value)-1]
+ last_value = value[len(value) - 1]
result[new_key] = unquote_plus(last_value.decode(encoding))
except Exception as error:
pass
return result if has_value else None
except Exception as error:
- return None #invalid encoding
+ return None # invalid encoding
- async def get_text(self, encoding='utf-8'):
+ async def get_text(self, encoding="utf-8"):
data = await self.get_data()
- try:
- return b''.join(data).decode(encoding)
+ try:
+ return b"".join(data).decode(encoding)
except Exception:
- return None #invalid encoding
+ return None # invalid encoding
async def get_json(self):
data = await self.get_data()
- try:
- return json.loads(b''.join(data).decode('utf-8'))
+ try:
+ return json.loads(b"".join(data).decode("utf-8"))
except Exception:
- return None #invalid json
-
+ return None # invalid json
def send_chunk(self, buffer, total_size):
self._chunkFuture = self.loop.create_future()
self._lastChunkOffset = 0
-
+
def is_aborted(self):
self.aborted = True
try:
if not self._chunkFuture.done():
- self._chunkFuture.set_result((False, True)) #if aborted set to done True and ok False
+ self._chunkFuture.set_result(
+ (False, True)
+ ) # if aborted set to done True and ok False
except:
pass
+
def on_writeble(self, offset):
# Here the timeout is off, we can spend as much time before calling try_end we want to
- (ok, done) = self.try_end(buffer[offset - self._lastChunkOffset::], total_size)
+ (ok, done) = self.try_end(
+ buffer[offset - self._lastChunkOffset : :], total_size
+ )
if ok:
- self._chunkFuture.set_result((ok, done))
+ self._chunkFuture.set_result((ok, done))
return ok
+
self.on_writable(on_writeble)
self.on_aborted(is_aborted)
if self.aborted:
- self._chunkFuture.set_result((False, True)) #if aborted set to done True and ok False
+ self._chunkFuture.set_result(
+ (False, True)
+ ) # if aborted set to done True and ok False
return self._chunkFuture
-
+
(ok, done) = self.try_end(buffer, total_size)
if ok:
self._chunkFuture.set_result((ok, done))
return self._chunkFuture
- #failed to send chunk
+ # failed to send chunk
self._lastChunkOffset = self.get_write_offset()
return self._chunkFuture
@@ -1214,29 +1327,32 @@ class AppResponse:
def get_data(self):
self._dataFuture = self.loop.create_future()
self._data = []
+
def is_aborted(self):
- self.aborted = True
+ self.aborted = True
try:
if not self._dataFuture.done():
self._dataFuture.set_result(self._data)
except:
pass
+
def get_chunks(self, chunk, is_end):
self._data.append(chunk)
if is_end:
self._dataFuture.set_result(self._data)
self._data = None
-
+
self.on_aborted(is_aborted)
self.on_data(get_chunks)
return self._dataFuture
-
def grab_aborted_handler(self):
- #only needed if is async
+ # only needed if is async
if not self.aborted and not self._grabed_abort_handler_once:
self._grabed_abort_handler_once = True
- lib.uws_res_on_aborted(self.SSL, self.res, uws_generic_aborted_handler, self._ptr)
+ lib.uws_res_on_aborted(
+ self.SSL, self.res, uws_generic_aborted_handler, self._ptr
+ )
return self
def redirect(self, location, status_code=302):
@@ -1244,29 +1360,38 @@ class AppResponse:
self.write_header("Location", location)
self.end_without_body(False)
return self
-
+
def write_offset(self, offset):
- lib.uws_res_override_write_offset(self.SSL, self.res, ffi.cast("uintmax_t", offset))
+ lib.uws_res_override_write_offset(
+ self.SSL, self.res, ffi.cast("uintmax_t", offset)
+ )
return self
def try_end(self, message, total_size, end_connection=False):
- try:
- if self.aborted:
- return (False, True)
- if self._write_jar != None:
- self.write_header("Set-Cookie", self._write_jar.output(header=""))
- self._write_jar = None
- if isinstance(message, str):
- data = message.encode("utf-8")
- elif isinstance(message, bytes):
- data = message
- else:
- return (False, True)
- result = lib.uws_res_try_end(self.SSL, self.res, data, len(data),ffi.cast("uintmax_t", total_size), 1 if end_connection else 0)
- return (bool(result.ok), bool(result.has_responded))
- except:
+ try:
+ if self.aborted:
return (False, True)
-
+ if self._write_jar != None:
+ self.write_header("Set-Cookie", self._write_jar.output(header=""))
+ self._write_jar = None
+ if isinstance(message, str):
+ data = message.encode("utf-8")
+ elif isinstance(message, bytes):
+ data = message
+ else:
+ return (False, True)
+ result = lib.uws_res_try_end(
+ self.SSL,
+ self.res,
+ data,
+ len(data),
+ ffi.cast("uintmax_t", total_size),
+ 1 if end_connection else 0,
+ )
+ return (bool(result.ok), bool(result.has_responded))
+ except:
+ return (False, True)
+
def cork_end(self, message, end_connection=False):
self.cork(lambda res: res.end(message, end_connection))
return self
@@ -1279,67 +1404,72 @@ class AppResponse:
def get_remote_address_bytes(self):
buffer = ffi.new("char**")
- length = lib.uws_res_get_remote_address(self.SSL, self.res, buffer)
+ length = lib.uws_res_get_remote_address(self.SSL, self.res, buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
return ffi.unpack(buffer_address, length)
- except Exception: #invalid
+ except Exception: # invalid
return None
def get_remote_address(self):
buffer = ffi.new("char**")
- length = lib.uws_res_get_remote_address_as_text(self.SSL, self.res, buffer)
+ length = lib.uws_res_get_remote_address_as_text(self.SSL, self.res, buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
return ffi.unpack(buffer_address, length).decode("utf-8")
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return None
def get_proxied_remote_address_bytes(self):
buffer = ffi.new("char**")
- length = lib.uws_res_get_proxied_remote_address(self.SSL, self.res, buffer)
+ length = lib.uws_res_get_proxied_remote_address(self.SSL, self.res, buffer)
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
return ffi.unpack(buffer_address, length)
- except Exception: #invalid
+ except Exception: # invalid
return None
def get_proxied_remote_address(self):
buffer = ffi.new("char**")
- length = lib.uws_res_get_proxied_remote_address_as_text(self.SSL, self.res, buffer)
+ length = lib.uws_res_get_proxied_remote_address_as_text(
+ self.SSL, self.res, buffer
+ )
buffer_address = ffi.addressof(buffer, 0)[0]
- if buffer_address == ffi.NULL:
+ if buffer_address == ffi.NULL:
return None
try:
return ffi.unpack(buffer_address, length).decode("utf-8")
- except Exception: #invalid utf-8
+ except Exception: # invalid utf-8
return None
+
def end(self, message, end_connection=False):
- try:
- if self.aborted:
- return self
- if self._write_jar != None:
- self.write_header("Set-Cookie", self._write_jar.output(header=""))
- self._write_jar = None
- if isinstance(message, str):
- data = message.encode("utf-8")
- elif isinstance(message, bytes):
- data = message
- elif message == None:
- self.end_without_body(end_connection)
- return self
- else:
- self.write_header(b'Content-Type', b'application/json')
- data = json.dumps(message).encode("utf-8")
- lib.uws_res_end(self.SSL, self.res, data, len(data), 1 if end_connection else 0)
- finally:
+ try:
+ if self.aborted:
return self
+ if self._write_jar != None:
+ self.write_header("Set-Cookie", self._write_jar.output(header=""))
+ self._write_jar = None
+ if isinstance(message, str):
+ data = message.encode("utf-8")
+ elif isinstance(message, bytes):
+ data = message
+ elif message == None:
+ self.end_without_body(end_connection)
+ return self
+ else:
+ self.write_header(b"Content-Type", b"application/json")
+ data = json.dumps(message).encode("utf-8")
+ lib.uws_res_end(
+ self.SSL, self.res, data, len(data), 1 if end_connection else 0
+ )
+ finally:
+ return self
def pause(self):
if not self.aborted:
@@ -1356,14 +1486,15 @@ class AppResponse:
lib.uws_res_write_continue(self.SSL, self.res)
return self
-
def write_status(self, status_or_status_text):
if not self.aborted:
if isinstance(status_or_status_text, int):
try:
data = status_codes[status_or_status_text]
- except: #invalid status
- raise RuntimeError("\"%d\" Is not an valid Status Code" % status_or_status_text)
+ except: # invalid status
+ raise RuntimeError(
+ '"%d" Is not an valid Status Code' % status_or_status_text
+ )
elif isinstance(status_text, str):
data = status_text.encode("utf-8")
elif isinstance(status_text, bytes):
@@ -1384,14 +1515,22 @@ class AppResponse:
key_data = json.dumps(key).encode("utf-8")
if isinstance(value, int):
- lib.uws_res_write_header_int(self.SSL, self.res, key_data, len(key_data), ffi.cast("uint64_t", value))
+ lib.uws_res_write_header_int(
+ self.SSL,
+ self.res,
+ key_data,
+ len(key_data),
+ ffi.cast("uint64_t", value),
+ )
elif isinstance(value, str):
value_data = value.encode("utf-8")
elif isinstance(value, bytes):
value_data = value
else:
value_data = json.dumps(value).encode("utf-8")
- lib.uws_res_write_header(self.SSL, self.res, key_data, len(key_data), value_data, len(value_data))
+ lib.uws_res_write_header(
+ self.SSL, self.res, key_data, len(key_data), value_data, len(value_data)
+ )
return self
def end_without_body(self, end_connection=False):
@@ -1423,69 +1562,92 @@ class AppResponse:
return bool(lib.uws_res_has_responded(self.SSL, self.res))
def on_aborted(self, handler):
- if hasattr(handler, '__call__'):
- self._aborted_handler = handler
- self.grab_aborted_handler()
+ if hasattr(handler, "__call__"):
+ self._aborted_handler = handler
+ self.grab_aborted_handler()
return self
def on_data(self, handler):
if not self.aborted:
- if hasattr(handler, '__call__'):
+ if hasattr(handler, "__call__"):
self._data_handler = handler
self.grab_aborted_handler()
- lib.uws_res_on_data(self.SSL, self.res, uws_generic_on_data_handler, self._ptr)
+ lib.uws_res_on_data(
+ self.SSL, self.res, uws_generic_on_data_handler, self._ptr
+ )
return self
- def upgrade(self, sec_web_socket_key, sec_web_socket_protocol, sec_web_socket_extensions, socket_context, user_data=None):
+ def upgrade(
+ self,
+ sec_web_socket_key,
+ sec_web_socket_protocol,
+ sec_web_socket_extensions,
+ socket_context,
+ user_data=None,
+ ):
if self.aborted:
return False
if isinstance(sec_web_socket_key, str):
- sec_web_socket_key_data = sec_web_socket_key.encode('utf-8')
+ sec_web_socket_key_data = sec_web_socket_key.encode("utf-8")
elif isinstance(sec_web_socket_key, bytes):
sec_web_socket_key_data = sec_web_socket_key
else:
- sec_web_socket_key_data = b''
+ sec_web_socket_key_data = b""
if isinstance(sec_web_socket_protocol, str):
- sec_web_socket_protocol_data = sec_web_socket_protocol.encode('utf-8')
+ sec_web_socket_protocol_data = sec_web_socket_protocol.encode("utf-8")
elif isinstance(sec_web_socket_protocol, bytes):
sec_web_socket_protocol_data = sec_web_socket_protocol
else:
- sec_web_socket_protocol_data = b''
-
+ sec_web_socket_protocol_data = b""
+
if isinstance(sec_web_socket_extensions, str):
- sec_web_socket_extensions_data = sec_web_socket_extensions.encode('utf-8')
+ sec_web_socket_extensions_data = sec_web_socket_extensions.encode("utf-8")
elif isinstance(sec_web_socket_extensions, bytes):
sec_web_socket_extensions_data = sec_web_socket_extensions
else:
- sec_web_socket_extensions_data = b''
-
+ sec_web_socket_extensions_data = b""
+
user_data_ptr = ffi.NULL
if not user_data is None:
_id = uuid.uuid4()
user_data_ptr = (ffi.new_handle(user_data), _id)
- #keep alive data
+ # keep alive data
SocketRefs[_id] = user_data_ptr
-
- lib.uws_res_upgrade(self.SSL, self.res, user_data_ptr, sec_web_socket_key_data, len(sec_web_socket_key_data),sec_web_socket_protocol_data, len(sec_web_socket_protocol_data),sec_web_socket_extensions_data, len(sec_web_socket_extensions_data), socket_context)
+
+ lib.uws_res_upgrade(
+ self.SSL,
+ self.res,
+ user_data_ptr,
+ sec_web_socket_key_data,
+ len(sec_web_socket_key_data),
+ sec_web_socket_protocol_data,
+ len(sec_web_socket_protocol_data),
+ sec_web_socket_extensions_data,
+ len(sec_web_socket_extensions_data),
+ socket_context,
+ )
return True
def on_writable(self, handler):
if not self.aborted:
- if hasattr(handler, '__call__'):
- self._writable_handler = handler
- self.grab_aborted_handler()
- lib.uws_res_on_writable(self.SSL, self.res, uws_generic_on_writable_handler, self._ptr)
+ if hasattr(handler, "__call__"):
+ self._writable_handler = handler
+ self.grab_aborted_handler()
+ lib.uws_res_on_writable(
+ self.SSL, self.res, uws_generic_on_writable_handler, self._ptr
+ )
return self
- def get_native_handle(self):
+ def get_native_handle(self):
return lib.uws_res_get_native_handle(self.SSL, self.res)
-
+
def __del__(self):
self.res = ffi.NULL
self._ptr = ffi.NULL
+
class App:
def __init__(self, options=None):
socket_options_ptr = ffi.new("struct us_socket_context_options_t *")
@@ -1495,34 +1657,64 @@ class App:
if options != None:
self.is_ssl = True
self.SSL = ffi.cast("int", 1)
- socket_options.key_file_name = ffi.NULL if options.key_file_name == None else ffi.new("char[]", options.key_file_name.encode("utf-8"))
- socket_options.key_file_name = ffi.NULL if options.key_file_name == None else ffi.new("char[]", options.key_file_name.encode("utf-8"))
- socket_options.cert_file_name = ffi.NULL if options.cert_file_name == None else ffi.new("char[]", options.cert_file_name.encode("utf-8"))
- socket_options.passphrase = ffi.NULL if options.passphrase == None else ffi.new("char[]", options.passphrase.encode("utf-8"))
- socket_options.dh_params_file_name = ffi.NULL if options.dh_params_file_name == None else ffi.new("char[]", options.dh_params_file_name.encode("utf-8"))
- socket_options.ca_file_name = ffi.NULL if options.ca_file_name == None else ffi.new("char[]", options.ca_file_name.encode("utf-8"))
- socket_options.ssl_ciphers = ffi.NULL if options.ssl_ciphers == None else ffi.new("char[]", options.ssl_ciphers.encode("utf-8"))
- socket_options.ssl_prefer_low_memory_usage = ffi.cast("int", options.ssl_prefer_low_memory_usage)
+ socket_options.key_file_name = (
+ ffi.NULL
+ if options.key_file_name == None
+ else ffi.new("char[]", options.key_file_name.encode("utf-8"))
+ )
+ socket_options.key_file_name = (
+ ffi.NULL
+ if options.key_file_name == None
+ else ffi.new("char[]", options.key_file_name.encode("utf-8"))
+ )
+ socket_options.cert_file_name = (
+ ffi.NULL
+ if options.cert_file_name == None
+ else ffi.new("char[]", options.cert_file_name.encode("utf-8"))
+ )
+ socket_options.passphrase = (
+ ffi.NULL
+ if options.passphrase == None
+ else ffi.new("char[]", options.passphrase.encode("utf-8"))
+ )
+ socket_options.dh_params_file_name = (
+ ffi.NULL
+ if options.dh_params_file_name == None
+ else ffi.new("char[]", options.dh_params_file_name.encode("utf-8"))
+ )
+ socket_options.ca_file_name = (
+ ffi.NULL
+ if options.ca_file_name == None
+ else ffi.new("char[]", options.ca_file_name.encode("utf-8"))
+ )
+ socket_options.ssl_ciphers = (
+ ffi.NULL
+ if options.ssl_ciphers == None
+ else ffi.new("char[]", options.ssl_ciphers.encode("utf-8"))
+ )
+ socket_options.ssl_prefer_low_memory_usage = ffi.cast(
+ "int", options.ssl_prefer_low_memory_usage
+ )
else:
self.is_ssl = False
self.SSL = ffi.cast("int", 0)
- self.loop = Loop(lambda loop, context, response: self.trigger_error(context, response, None))
+ self.loop = Loop(
+ lambda loop, context, response: self.trigger_error(context, response, None)
+ )
- #set async loop to be the last created (is thread_local), App must be one per thread otherwise will use only the lasted loop
- #needs to be called before uws_create_app or otherwise will create another loop and will not receive the right one
+ # set async loop to be the last created (is thread_local), App must be one per thread otherwise will use only the lasted loop
+ # needs to be called before uws_create_app or otherwise will create another loop and will not receive the right one
lib.uws_get_loop_with_native(self.loop.get_native_loop())
self.app = lib.uws_create_app(self.SSL, socket_options)
self._ptr = ffi.new_handle(self)
if bool(lib.uws_constructor_failed(self.SSL, self.app)):
- raise RuntimeError("Failed to create connection")
-
+ raise RuntimeError("Failed to create connection")
self.handlers = []
self.error_handler = None
self._missing_server_handler = None
-
def template(self, template_engine):
self._template = template_engine
@@ -1532,53 +1724,122 @@ class App:
def get(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_get(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_get(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
+
def post(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_post(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_post(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
+
def options(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_options(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_options(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
+
def delete(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_delete(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_delete(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
+
def patch(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_patch(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_patch(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
+
def put(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_put(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_put(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
+
def head(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_head(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_head(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
+
def connect(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_connect(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_connect(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
+
def trace(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_trace(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_trace(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
+
def any(self, path, handler):
user_data = ffi.new_handle((handler, self))
- self.handlers.append(user_data) #Keep alive handler
- lib.uws_app_any(self.SSL, self.app, path.encode("utf-8"), uws_generic_method_handler, user_data)
+ self.handlers.append(user_data) # Keep alive handler
+ lib.uws_app_any(
+ self.SSL,
+ self.app,
+ path.encode("utf-8"),
+ uws_generic_method_handler,
+ user_data,
+ )
return self
def get_native_handle(self):
@@ -1586,71 +1847,122 @@ class App:
def num_subscribers(self, topic):
if isinstance(topic, str):
- topic_data = topic.encode('utf-8')
+ topic_data = topic.encode("utf-8")
elif isinstance(topic, bytes):
topic_data = topic
else:
- raise RuntimeError("topic need to be an String or Bytes")
- return int(lib.uws_num_subscribers(self.SSL, self.app, topic_data, len(topic_data)))
+ raise RuntimeError("topic need to be an String or Bytes")
+ return int(
+ lib.uws_num_subscribers(self.SSL, self.app, topic_data, len(topic_data))
+ )
def publish(self, topic, message, opcode=OpCode.BINARY, compress=False):
if isinstance(topic, str):
- topic_data = topic.encode('utf-8')
+ topic_data = topic.encode("utf-8")
elif isinstance(topic, bytes):
topic_data = topic
else:
- raise RuntimeError("topic need to be an String or Bytes")
-
+ raise RuntimeError("topic need to be an String or Bytes")
+
if isinstance(message, str):
- message_data = message.encode('utf-8')
+ message_data = message.encode("utf-8")
elif isinstance(message, bytes):
message_data = message
elif message == None:
- data = b''
+ data = b""
else:
data = json.dumps(message).encode("utf-8")
-
- return bool(lib.uws_publish(self.SSL, self.app, topic_data, len(topic_data), message_data, len(message_data), int(opcode), bool(compress)))
+
+ return bool(
+ lib.uws_publish(
+ self.SSL,
+ self.app,
+ topic_data,
+ len(topic_data),
+ message_data,
+ len(message_data),
+ int(opcode),
+ bool(compress),
+ )
+ )
def remove_server_name(self, hostname):
if isinstance(hostname, str):
- hostname_data = hostname.encode('utf-8')
+ hostname_data = hostname.encode("utf-8")
elif isinstance(hostname, bytes):
hostname_data = hostname
else:
- raise RuntimeError("hostname need to be an String or Bytes")
+ raise RuntimeError("hostname need to be an String or Bytes")
- lib.uws_remove_server_name(self.SSL, self.app, hostname_data, len(hostname_data))
+ lib.uws_remove_server_name(
+ self.SSL, self.app, hostname_data, len(hostname_data)
+ )
return self
def add_server_name(self, hostname, options=None):
if isinstance(hostname, str):
- hostname_data = hostname.encode('utf-8')
+ hostname_data = hostname.encode("utf-8")
elif isinstance(hostname, bytes):
hostname_data = hostname
else:
- raise RuntimeError("hostname need to be an String or Bytes")
+ raise RuntimeError("hostname need to be an String or Bytes")
if options is None:
- lib.uws_add_server_name(self.SSL, self.app, hostname_data, len(hostname_data))
+ lib.uws_add_server_name(
+ self.SSL, self.app, hostname_data, len(hostname_data)
+ )
else:
socket_options_ptr = ffi.new("struct us_socket_context_options_t *")
socket_options = socket_options_ptr[0]
- socket_options.key_file_name = ffi.NULL if options.key_file_name == None else ffi.new("char[]", options.key_file_name.encode("utf-8"))
- socket_options.key_file_name = ffi.NULL if options.key_file_name == None else ffi.new("char[]", options.key_file_name.encode("utf-8"))
- socket_options.cert_file_name = ffi.NULL if options.cert_file_name == None else ffi.new("char[]", options.cert_file_name.encode("utf-8"))
- socket_options.passphrase = ffi.NULL if options.passphrase == None else ffi.new("char[]", options.passphrase.encode("utf-8"))
- socket_options.dh_params_file_name = ffi.NULL if options.dh_params_file_name == None else ffi.new("char[]", options.dh_params_file_name.encode("utf-8"))
- socket_options.ca_file_name = ffi.NULL if options.ca_file_name == None else ffi.new("char[]", options.ca_file_name.encode("utf-8"))
- socket_options.ssl_ciphers = ffi.NULL if options.ssl_ciphers == None else ffi.new("char[]", options.ssl_ciphers.encode("utf-8"))
- socket_options.ssl_prefer_low_memory_usage = ffi.cast("int", options.ssl_prefer_low_memory_usage)
- lib.uws_add_server_name_with_options(self.SSL, self.app, hostname_data, len(hostname_data), socket_options)
+ socket_options.key_file_name = (
+ ffi.NULL
+ if options.key_file_name == None
+ else ffi.new("char[]", options.key_file_name.encode("utf-8"))
+ )
+ socket_options.key_file_name = (
+ ffi.NULL
+ if options.key_file_name == None
+ else ffi.new("char[]", options.key_file_name.encode("utf-8"))
+ )
+ socket_options.cert_file_name = (
+ ffi.NULL
+ if options.cert_file_name == None
+ else ffi.new("char[]", options.cert_file_name.encode("utf-8"))
+ )
+ socket_options.passphrase = (
+ ffi.NULL
+ if options.passphrase == None
+ else ffi.new("char[]", options.passphrase.encode("utf-8"))
+ )
+ socket_options.dh_params_file_name = (
+ ffi.NULL
+ if options.dh_params_file_name == None
+ else ffi.new("char[]", options.dh_params_file_name.encode("utf-8"))
+ )
+ socket_options.ca_file_name = (
+ ffi.NULL
+ if options.ca_file_name == None
+ else ffi.new("char[]", options.ca_file_name.encode("utf-8"))
+ )
+ socket_options.ssl_ciphers = (
+ ffi.NULL
+ if options.ssl_ciphers == None
+ else ffi.new("char[]", options.ssl_ciphers.encode("utf-8"))
+ )
+ socket_options.ssl_prefer_low_memory_usage = ffi.cast(
+ "int", options.ssl_prefer_low_memory_usage
+ )
+ lib.uws_add_server_name_with_options(
+ self.SSL, self.app, hostname_data, len(hostname_data), socket_options
+ )
return self
def missing_server_name(self, handler):
self._missing_server_handler = handler
- lib.uws_missing_server_name(self.SSL, self.app, uws_missing_server_name, self._ptr)
+ lib.uws_missing_server_name(
+ self.SSL, self.app, uws_missing_server_name, self._ptr
+ )
def ws(self, path, behavior):
native_options = ffi.new("uws_socket_behavior_t *")
@@ -1673,111 +1985,164 @@ class App:
close_handler = None
if behavior is None:
- raise RuntimeError("behavior must be an dict or WSBehavior")
+ raise RuntimeError("behavior must be an dict or WSBehavior")
elif isinstance(behavior, dict):
max_payload_length = behavior.get("max_payload_length", 16 * 1024)
idle_timeout = behavior.get("idle_timeout", 60 * 2)
max_backpressure = behavior.get("max_backpressure", 64 * 1024)
- close_on_backpressure_limit = behavior.get("close_on_backpressure_limit", False)
- reset_idle_timeout_on_send = behavior.get("reset_idle_timeout_on_send", False)
+ close_on_backpressure_limit = behavior.get(
+ "close_on_backpressure_limit", False
+ )
+ reset_idle_timeout_on_send = behavior.get(
+ "reset_idle_timeout_on_send", False
+ )
send_pings_automatically = behavior.get("send_pings_automatically", False)
max_lifetime = behavior.get("max_lifetime", 0)
- compression = behavior.get("compression", 0)
- upgrade_handler = behavior.get("upgrade", None)
- open_handler = behavior.get("open", None)
- message_handler = behavior.get("message", None)
- drain_handler = behavior.get("drain", None)
- ping_handler = behavior.get("ping", None)
- pong_handler = behavior.get("pong", None)
- close_handler = behavior.get("close", None)
+ compression = behavior.get("compression", 0)
+ upgrade_handler = behavior.get("upgrade", None)
+ open_handler = behavior.get("open", None)
+ message_handler = behavior.get("message", None)
+ drain_handler = behavior.get("drain", None)
+ ping_handler = behavior.get("ping", None)
+ pong_handler = behavior.get("pong", None)
+ close_handler = behavior.get("close", None)
-
-
- native_behavior.maxPayloadLength = ffi.cast("unsigned int", max_payload_length if isinstance(max_payload_length, int) else 16 * 1024)
- native_behavior.idleTimeout = ffi.cast("unsigned short", idle_timeout if isinstance(idle_timeout, int) else 16 * 1024)
- native_behavior.maxBackpressure = ffi.cast("unsigned int", max_backpressure if isinstance(max_backpressure, int) else 64 * 1024)
- native_behavior.compression = ffi.cast("uws_compress_options_t", compression if isinstance(compression, int) else 0)
- native_behavior.maxLifetime = ffi.cast("unsigned short", max_lifetime if isinstance(max_lifetime, int) else 0)
- native_behavior.closeOnBackpressureLimit = ffi.cast("int", 1 if close_on_backpressure_limit else 0)
- native_behavior.resetIdleTimeoutOnSend = ffi.cast("int", 1 if reset_idle_timeout_on_send else 0)
- native_behavior.sendPingsAutomatically = ffi.cast("int", 1 if send_pings_automatically else 0)
+ native_behavior.maxPayloadLength = ffi.cast(
+ "unsigned int",
+ max_payload_length if isinstance(max_payload_length, int) else 16 * 1024,
+ )
+ native_behavior.idleTimeout = ffi.cast(
+ "unsigned short",
+ idle_timeout if isinstance(idle_timeout, int) else 16 * 1024,
+ )
+ native_behavior.maxBackpressure = ffi.cast(
+ "unsigned int",
+ max_backpressure if isinstance(max_backpressure, int) else 64 * 1024,
+ )
+ native_behavior.compression = ffi.cast(
+ "uws_compress_options_t", compression if isinstance(compression, int) else 0
+ )
+ native_behavior.maxLifetime = ffi.cast(
+ "unsigned short", max_lifetime if isinstance(max_lifetime, int) else 0
+ )
+ native_behavior.closeOnBackpressureLimit = ffi.cast(
+ "int", 1 if close_on_backpressure_limit else 0
+ )
+ native_behavior.resetIdleTimeoutOnSend = ffi.cast(
+ "int", 1 if reset_idle_timeout_on_send else 0
+ )
+ native_behavior.sendPingsAutomatically = ffi.cast(
+ "int", 1 if send_pings_automatically else 0
+ )
handlers = WSBehaviorHandlers()
if upgrade_handler:
- handlers.upgrade = upgrade_handler
- native_behavior.upgrade = uws_websocket_upgrade_handler
+ handlers.upgrade = upgrade_handler
+ native_behavior.upgrade = uws_websocket_upgrade_handler
else:
- native_behavior.upgrade = ffi.NULL
+ native_behavior.upgrade = ffi.NULL
if open_handler:
- handlers.open = open_handler
- native_behavior.open = uws_websocket_open_handler
+ handlers.open = open_handler
+ native_behavior.open = uws_websocket_open_handler
else:
- native_behavior.open = ffi.NULL
+ native_behavior.open = ffi.NULL
if message_handler:
- handlers.message = message_handler
- native_behavior.message = uws_websocket_message_handler
+ handlers.message = message_handler
+ native_behavior.message = uws_websocket_message_handler
else:
- native_behavior.message = ffi.NULL
+ native_behavior.message = ffi.NULL
if drain_handler:
- handlers.drain = drain_handler
- native_behavior.drain = uws_websocket_drain_handler
+ handlers.drain = drain_handler
+ native_behavior.drain = uws_websocket_drain_handler
else:
- native_behavior.drain = ffi.NULL
+ native_behavior.drain = ffi.NULL
if ping_handler:
- handlers.ping = ping_handler
- native_behavior.ping = uws_websocket_ping_handler
+ handlers.ping = ping_handler
+ native_behavior.ping = uws_websocket_ping_handler
else:
- native_behavior.ping = ffi.NULL
+ native_behavior.ping = ffi.NULL
if pong_handler:
- handlers.pong = pong_handler
- native_behavior.pong = uws_websocket_pong_handler
+ handlers.pong = pong_handler
+ native_behavior.pong = uws_websocket_pong_handler
else:
- native_behavior.pong = ffi.NULL
+ native_behavior.pong = ffi.NULL
if close_handler:
- handlers.close = close_handler
- native_behavior.close = uws_websocket_close_handler
- else: #always keep an close
- native_behavior.close = uws_websocket_close_handler
-
+ handlers.close = close_handler
+ native_behavior.close = uws_websocket_close_handler
+ else: # always keep an close
+ native_behavior.close = uws_websocket_close_handler
+
user_data = ffi.new_handle((handlers, self))
- self.handlers.append(user_data) #Keep alive handlers
+ self.handlers.append(user_data) # Keep alive handlers
lib.uws_ws(self.SSL, self.app, path.encode("utf-8"), native_behavior, user_data)
return self
def listen(self, port_or_options=None, handler=None):
self._listen_handler = handler
if port_or_options is None:
- lib.uws_app_listen(self.SSL, self.app, ffi.cast("int", 0), uws_generic_listen_handler, self._ptr)
- elif isinstance(port_or_options, int):
- lib.uws_app_listen(self.SSL, self.app, ffi.cast("int", port_or_options), uws_generic_listen_handler, self._ptr)
+ lib.uws_app_listen(
+ self.SSL,
+ self.app,
+ ffi.cast("int", 0),
+ uws_generic_listen_handler,
+ self._ptr,
+ )
+ elif isinstance(port_or_options, int):
+ lib.uws_app_listen(
+ self.SSL,
+ self.app,
+ ffi.cast("int", port_or_options),
+ uws_generic_listen_handler,
+ self._ptr,
+ )
elif isinstance(port_or_options, dict):
native_options = ffi.new("uws_app_listen_config_t *")
options = native_options[0]
port = port_or_options.get("port", 0)
options = port_or_options.get("options", 0)
host = port_or_options.get("host", "0.0.0.0")
- options.port = ffi.cast("int", port, 0) if isinstance(port, int) else ffi.cast("int", 0)
- options.host = ffi.new("char[]", host.encode("utf-8")) if isinstance(host, str) else ffi.NULL
- options.options = ffi.cast("int", port) if isinstance(options, int) else ffi.cast("int", 0)
- self.native_options_listen = native_options #Keep alive native_options
- lib.uws_app_listen_with_config(self.SSL, self.app, options, uws_generic_listen_handler, self._ptr)
+ options.port = (
+ ffi.cast("int", port, 0)
+ if isinstance(port, int)
+ else ffi.cast("int", 0)
+ )
+ options.host = (
+ ffi.new("char[]", host.encode("utf-8"))
+ if isinstance(host, str)
+ else ffi.NULL
+ )
+ options.options = (
+ ffi.cast("int", port)
+ if isinstance(options, int)
+ else ffi.cast("int", 0)
+ )
+ self.native_options_listen = native_options # Keep alive native_options
+ lib.uws_app_listen_with_config(
+ self.SSL, self.app, options, uws_generic_listen_handler, self._ptr
+ )
else:
native_options = ffi.new("uws_app_listen_config_t *")
options = native_options[0]
options.port = ffi.cast("int", port_or_options.port)
- options.host = ffi.NULL if port_or_options.host == None else ffi.new("char[]", port_or_options.host.encode("utf-8"))
+ options.host = (
+ ffi.NULL
+ if port_or_options.host == None
+ else ffi.new("char[]", port_or_options.host.encode("utf-8"))
+ )
options.options = ffi.cast("int", port_or_options.options)
- self.native_options_listen = native_options #Keep alive native_options
- lib.uws_app_listen_with_config(self.SSL, self.app, options, uws_generic_listen_handler, self._ptr)
+ self.native_options_listen = native_options # Keep alive native_options
+ lib.uws_app_listen_with_config(
+ self.SSL, self.app, options, uws_generic_listen_handler, self._ptr
+ )
return self
-
+
def run_async(self, task, response=None):
return self.loop.run_async(task, response)
@@ -1786,7 +2151,7 @@ class App:
self.loop.start()
self.loop.run()
return self
-
+
def close(self):
if hasattr(self, "socket"):
if not self.socket == ffi.NULL:
@@ -1795,55 +2160,80 @@ class App:
return self
def set_error_handler(self, handler):
- if hasattr(handler, '__call__'):
- self.error_handler = handler
+ if hasattr(handler, "__call__"):
+ self.error_handler = handler
else:
self.error_handler = None
def trigger_error(self, error, response, request):
if self.error_handler == None:
try:
- print("Uncaught Exception: %s" % str(error)) #just log in console the error to call attention
+ print(
+ "Uncaught Exception: %s" % str(error)
+ ) # just log in console the error to call attention
response.write_status(500).end("Internal Error")
finally:
return
else:
try:
- if inspect.iscoroutinefunction(self.error_handler ):
- self.run_async(self.error_handler(error, response, request), response)
+ if inspect.iscoroutinefunction(self.error_handler):
+ self.run_async(
+ self.error_handler(error, response, request), response
+ )
else:
self.error_handler(error, response, request)
except Exception as error:
try:
- #Error handler got an error :D
- print("Uncaught Exception: %s" % str(error)) #just log in console the error to call attention
+ # Error handler got an error :D
+ print(
+ "Uncaught Exception: %s" % str(error)
+ ) # just log in console the error to call attention
response.write_status(500).end("Internal Error")
finally:
- pass
+ pass
-
def __del__(self):
lib.uws_app_destroy(self.SSL, self.app)
class AppListenOptions:
def __init__(self, port=0, host=None, options=0):
- if not isinstance(port, int): raise RuntimeError("port must be an int")
- if host != None and not isinstance(host, str): raise RuntimeError("host must be an String or None")
- if not isinstance(options, int): raise RuntimeError("options must be an int")
+ if not isinstance(port, int):
+ raise RuntimeError("port must be an int")
+ if host != None and not isinstance(host, str):
+ raise RuntimeError("host must be an String or None")
+ if not isinstance(options, int):
+ raise RuntimeError("options must be an int")
self.port = port
self.host = host
self.options = options
-
+
+
class AppOptions:
- def __init__(self, key_file_name=None, cert_file_name=None, passphrase=None, dh_params_file_name=None, ca_file_name=None, ssl_ciphers=None, ssl_prefer_low_memory_usage=0):
- if key_file_name != None and not isinstance(key_file_name, str): raise RuntimeError("key_file_name must be an String or None")
- if cert_file_name != None and not isinstance(cert_file_name, str): raise RuntimeError("cert_file_name must be an String or None")
- if passphrase != None and not isinstance(passphrase, str): raise RuntimeError("passphrase must be an String or None")
- if dh_params_file_name != None and not isinstance(dh_params_file_name, str): raise RuntimeError("dh_params_file_name must be an String or None")
- if ca_file_name != None and not isinstance(ca_file_name, str): raise RuntimeError("ca_file_name must be an String or None")
- if ssl_ciphers != None and not isinstance(ssl_ciphers, str): raise RuntimeError("ssl_ciphers must be an String or None")
- if not isinstance(ssl_prefer_low_memory_usage, int): raise RuntimeError("ssl_prefer_low_memory_usage must be an int")
+ def __init__(
+ self,
+ key_file_name=None,
+ cert_file_name=None,
+ passphrase=None,
+ dh_params_file_name=None,
+ ca_file_name=None,
+ ssl_ciphers=None,
+ ssl_prefer_low_memory_usage=0,
+ ):
+ if key_file_name != None and not isinstance(key_file_name, str):
+ raise RuntimeError("key_file_name must be an String or None")
+ if cert_file_name != None and not isinstance(cert_file_name, str):
+ raise RuntimeError("cert_file_name must be an String or None")
+ if passphrase != None and not isinstance(passphrase, str):
+ raise RuntimeError("passphrase must be an String or None")
+ if dh_params_file_name != None and not isinstance(dh_params_file_name, str):
+ raise RuntimeError("dh_params_file_name must be an String or None")
+ if ca_file_name != None and not isinstance(ca_file_name, str):
+ raise RuntimeError("ca_file_name must be an String or None")
+ if ssl_ciphers != None and not isinstance(ssl_ciphers, str):
+ raise RuntimeError("ssl_ciphers must be an String or None")
+ if not isinstance(ssl_prefer_low_memory_usage, int):
+ raise RuntimeError("ssl_prefer_low_memory_usage must be an int")
self.key_file_name = key_file_name
self.cert_file_name = cert_file_name
diff --git a/src/socketify/status_codes.py b/src/socketify/status_codes.py
index 711da04..87cd722 100644
--- a/src/socketify/status_codes.py
+++ b/src/socketify/status_codes.py
@@ -1,65 +1,65 @@
status_codes = {
- 100 : b'100 Continue',
- 101 : b'101 Switching Protocols',
- 102 : b'102 Processing',
- 103 : b'103 Early Hints',
- 200 : b'200 OK',
- 201 : b'201 Created',
- 202 : b'202 Accepted',
- 203 : b'203 Non-Authoritative Information',
- 204 : b'204 No Content',
- 205 : b'205 Reset Content',
- 206 : b'206 Partial Content',
- 207 : b'207 Multi-Status',
- 208 : b'208 Already Reported',
- 226 : b'226 IM Used (HTTP Delta encoding)',
- 300 : b'300 Multiple Choices',
- 301 : b'301 Moved Permanently',
- 302 : b'302 Found',
- 303 : b'303 See Other',
- 304 : b'304 Not Modified',
- 305 : b'305 Use Proxy Deprecated',
- 306 : b'306 unused',
- 307 : b'307 Temporary Redirect',
- 308 : b'308 Permanent Redirect',
- 400 : b'400 Bad Request',
- 401 : b'401 Unauthorized',
- 402 : b'402 Payment Required Experimental',
- 403 : b'403 Forbidden',
- 404 : b'404 Not Found',
- 405 : b'405 Method Not Allowed',
- 406 : b'406 Not Acceptable',
- 407 : b'407 Proxy Authentication Required',
- 408 : b'408 Request Timeout',
- 409 : b'409 Conflict',
- 410 : b'410 Gone',
- 411 : b'411 Length Required',
- 412 : b'412 Precondition Failed',
- 413 : b'413 Payload Too Large',
- 414 : b'414 URI Too Long',
- 415 : b'415 Unsupported Media Type',
- 416 : b'416 Range Not Satisfiable',
- 417 : b'417 Expectation Failed',
- 418 : b'418 I\'m a teapot',
- 421 : b'421 Misdirected Request',
- 422 : b'422 Unprocessable Entity',
- 423 : b'423 Locked',
- 424 : b'424 Failed Dependency',
- 425 : b'425 Too Early Experimental',
- 426 : b'426 Upgrade Required',
- 428 : b'428 Precondition Required',
- 429 : b'429 Too Many Requests',
- 431 : b'431 Request Header Fields Too Large',
- 451 : b'451 Unavailable For Legal Reasons',
- 500 : b'500 Internal Server Error',
- 501 : b'501 Not Implemented',
- 502 : b'502 Bad Gateway',
- 503 : b'503 Service Unavailable',
- 504 : b'504 Gateway Timeout',
- 505 : b'505 HTTP Version Not Supported',
- 506 : b'506 Variant Also Negotiates',
- 507 : b'507 Insufficient Storage',
- 508 : b'508 Loop Detected',
- 510 : b'510 Not Extended',
- 511 : b'511 Network Authentication Required'
-}
\ No newline at end of file
+ 100: b"100 Continue",
+ 101: b"101 Switching Protocols",
+ 102: b"102 Processing",
+ 103: b"103 Early Hints",
+ 200: b"200 OK",
+ 201: b"201 Created",
+ 202: b"202 Accepted",
+ 203: b"203 Non-Authoritative Information",
+ 204: b"204 No Content",
+ 205: b"205 Reset Content",
+ 206: b"206 Partial Content",
+ 207: b"207 Multi-Status",
+ 208: b"208 Already Reported",
+ 226: b"226 IM Used (HTTP Delta encoding)",
+ 300: b"300 Multiple Choices",
+ 301: b"301 Moved Permanently",
+ 302: b"302 Found",
+ 303: b"303 See Other",
+ 304: b"304 Not Modified",
+ 305: b"305 Use Proxy Deprecated",
+ 306: b"306 unused",
+ 307: b"307 Temporary Redirect",
+ 308: b"308 Permanent Redirect",
+ 400: b"400 Bad Request",
+ 401: b"401 Unauthorized",
+ 402: b"402 Payment Required Experimental",
+ 403: b"403 Forbidden",
+ 404: b"404 Not Found",
+ 405: b"405 Method Not Allowed",
+ 406: b"406 Not Acceptable",
+ 407: b"407 Proxy Authentication Required",
+ 408: b"408 Request Timeout",
+ 409: b"409 Conflict",
+ 410: b"410 Gone",
+ 411: b"411 Length Required",
+ 412: b"412 Precondition Failed",
+ 413: b"413 Payload Too Large",
+ 414: b"414 URI Too Long",
+ 415: b"415 Unsupported Media Type",
+ 416: b"416 Range Not Satisfiable",
+ 417: b"417 Expectation Failed",
+ 418: b"418 I'm a teapot",
+ 421: b"421 Misdirected Request",
+ 422: b"422 Unprocessable Entity",
+ 423: b"423 Locked",
+ 424: b"424 Failed Dependency",
+ 425: b"425 Too Early Experimental",
+ 426: b"426 Upgrade Required",
+ 428: b"428 Precondition Required",
+ 429: b"429 Too Many Requests",
+ 431: b"431 Request Header Fields Too Large",
+ 451: b"451 Unavailable For Legal Reasons",
+ 500: b"500 Internal Server Error",
+ 501: b"501 Not Implemented",
+ 502: b"502 Bad Gateway",
+ 503: b"503 Service Unavailable",
+ 504: b"504 Gateway Timeout",
+ 505: b"505 HTTP Version Not Supported",
+ 506: b"506 Variant Also Negotiates",
+ 507: b"507 Insufficient Storage",
+ 508: b"508 Loop Detected",
+ 510: b"510 Not Extended",
+ 511: b"511 Network Authentication Required",
+}
diff --git a/src/socketify/uv.py b/src/socketify/uv.py
index f359025..90ff6c7 100644
--- a/src/socketify/uv.py
+++ b/src/socketify/uv.py
@@ -1,10 +1,10 @@
-
import cffi
import os
import platform
ffi = cffi.FFI()
-ffi.cdef("""
+ffi.cdef(
+ """
typedef void (*socketify_prepare_handler)(void* user_data);
@@ -54,23 +54,36 @@ void socketify_timer_set_repeat(socketify_timer* timer, uint64_t repeat);
socketify_timer* socketify_create_check(socketify_loop* loop, socketify_timer_handler handler, void* user_data);
void socketify_check_destroy(socketify_timer* timer);
-""")
+"""
+)
library_extension = "dll" if platform.system().lower() == "windows" else "so"
-library_path = os.path.join(os.path.dirname(__file__), "libsocketify_%s_%s.%s" % (platform.system().lower(), "arm64" if "arm" in platform.processor().lower() else "amd64", library_extension))
+library_path = os.path.join(
+ os.path.dirname(__file__),
+ "libsocketify_%s_%s.%s"
+ % (
+ platform.system().lower(),
+ "arm64" if "arm" in platform.processor().lower() else "amd64",
+ library_extension,
+ ),
+)
lib = ffi.dlopen(library_path)
+
@ffi.callback("void(void *)")
def socketify_generic_handler(data):
if not data == ffi.NULL:
(handler, user_data) = ffi.from_handle(data)
handler(user_data)
-
+
class UVCheck:
def __init__(self, loop, handler, user_data):
self._handler_data = ffi.new_handle((handler, user_data))
- self._ptr = lib.socketify_create_check(loop, socketify_generic_handler, self._handler_data)
+ self._ptr = lib.socketify_create_check(
+ loop, socketify_generic_handler, self._handler_data
+ )
+
def stop(self):
lib.socketify_check_destroy(self._ptr)
self._handler_data = None
@@ -81,10 +94,18 @@ class UVCheck:
lib.socketify_check_destroy(self._ptr)
self._handler_data = None
+
class UVTimer:
def __init__(self, loop, timeout, repeat, handler, user_data):
self._handler_data = ffi.new_handle((handler, user_data))
- self._ptr = lib.socketify_create_timer(loop, ffi.cast("uint64_t", timeout), ffi.cast("uint64_t", repeat), socketify_generic_handler, self._handler_data)
+ self._ptr = lib.socketify_create_timer(
+ loop,
+ ffi.cast("uint64_t", timeout),
+ ffi.cast("uint64_t", repeat),
+ socketify_generic_handler,
+ self._handler_data,
+ )
+
def stop(self):
lib.socketify_timer_destroy(self._ptr)
self._handler_data = None
@@ -92,7 +113,7 @@ class UVTimer:
def set_repeat(self, repeat):
lib.socketify_timer_set_repeat(self._ptr, ffi.cast("uint64_t", repeat))
-
+
def __del__(self):
if self._ptr != ffi.NULL:
lib.socketify_timer_destroy(self._ptr)
@@ -107,14 +128,16 @@ class UVLoop:
def on_prepare(self, handler, user_data):
self._handler_data = ffi.new_handle((handler, user_data))
- lib.socketify_on_prepare(self._loop, socketify_generic_handler, self._handler_data)
+ lib.socketify_on_prepare(
+ self._loop, socketify_generic_handler, self._handler_data
+ )
def create_timer(self, timeout, repeat, handler, user_data):
return UVTimer(self._loop, timeout, repeat, handler, user_data)
def create_check(self, handler, user_data):
return UVCheck(self._loop, handler, user_data)
-
+
def prepare_unbind(self):
lib.socketify_prepare_unbind(self._loop)
@@ -124,7 +147,7 @@ class UVLoop:
def __del__(self):
lib.socketify_destroy_loop(self._loop)
self._handler_data = None
-
+
def run(self):
return lib.socketify_loop_run(self._loop, lib.SOCKETIFY_RUN_DEFAULT)
@@ -132,4 +155,4 @@ class UVLoop:
return lib.socketify_loop_run(self._loop, lib.SOCKETIFY_RUN_ONCE)
def stop(self):
- lib.socketify_loop_stop(self._loop)
\ No newline at end of file
+ lib.socketify_loop_stop(self._loop)
diff --git a/src/tests.py b/src/tests.py
index 8b1bf43..eb3d25b 100644
--- a/src/tests.py
+++ b/src/tests.py
@@ -3,7 +3,7 @@
# import os.path
-# DLL_EXPORT typedef void (*uws_listen_domain_handler)(struct us_listen_socket_t *listen_socket, const char* domain, size_t domain_length, int options, void *user_data);
+# DLL_EXPORT typedef void (*uws_listen_domain_handler)(struct us_listen_socket_t *listen_socket, const char* domain, size_t domain_length, int options, void *user_data);
# DLL_EXPORT typedef void (*uws_filter_handler)(uws_res_t *response, int, void *user_data);
# DLL_EXPORT void uws_app_listen_domain(int ssl, uws_app_t *app, const char *domain,size_t server_name_length,_listen_domain_handler handler, void *user_data);
@@ -15,31 +15,41 @@
from socketify import App, AppOptions, OpCode, CompressOptions
import asyncio
+
def ws_open(ws):
- print('A WebSocket got connected!')
+ print("A WebSocket got connected!")
ws.send("Hello World!", OpCode.TEXT)
+
def ws_message(ws, message, opcode):
print(message, opcode)
- #Ok is false if backpressure was built up, wait for drain
+ # Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
+
async def ws_upgrade(res, req, socket_context):
key = req.get_header("sec-websocket-key")
protocol = req.get_header("sec-websocket-protocol")
extensions = req.get_header("sec-websocket-extensions")
await asyncio.sleep(2)
res.upgrade(key, protocol, extensions, socket_context)
-
-app = App()
-app.ws("/*", {
- 'compression': CompressOptions.SHARED_COMPRESSOR,
- 'max_payload_length': 16 * 1024 * 1024,
- 'idle_timeout': 12,
- 'open': ws_open,
- 'message': ws_message,
- 'upgrade': ws_upgrade
-})
-app.any("/", lambda res,req: res.end("Nothing to see here!"))
-app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
-app.run()
\ No newline at end of file
+
+
+app = App()
+app.ws(
+ "/*",
+ {
+ "compression": CompressOptions.SHARED_COMPRESSOR,
+ "max_payload_length": 16 * 1024 * 1024,
+ "idle_timeout": 12,
+ "open": ws_open,
+ "message": ws_message,
+ "upgrade": ws_upgrade,
+ },
+)
+app.any("/", lambda res, req: res.end("Nothing to see here!"))
+app.listen(
+ 3000,
+ lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
+)
+app.run()