use black as code formatter

pull/39/head
Ciro 2022-11-16 16:28:46 -03:00
rodzic aa11d8bd58
commit 021dda7d2a
53 zmienionych plików z 1824 dodań i 1085 usunięć

Wyświetl plik

@ -2,6 +2,7 @@ from wsgiref.simple_server import make_server
import falcon
class Home:
def on_get(self, req, resp):
resp.status = falcon.HTTP_200 # This is the default status
@ -12,18 +13,18 @@ class Home:
app = falcon.App()
home = Home()
app.add_route('/', home)
app.add_route("/", home)
if __name__ == '__main__':
with make_server('', 8000, app) as httpd:
print('Serving on port 8000...')
if __name__ == "__main__":
with make_server("", 8000, app) as httpd:
print("Serving on port 8000...")
# Serve until process is killed
httpd.serve_forever()
#pypy3 -m gunicorn falcon_plaintext:app -w 4 --worker-class=gevent #recomended for pypy3
#python3 -m gunicorn falcon_plaintext:app -w 4 #without Cython
#pypy3 -m gunicorn falcon_plaintext:app -w 4 #without gevent
#python3 -m gunicorn falcon_plaintext:app -w 4 --worker-class="egg:meinheld#gunicorn_worker" #with Cython
#meinheld is buggy -> greenlet.c:566:10: error: no member named 'use_tracing' in 'struct _ts'
#so using pip3 install git+https://github.com/idot/meinheld.git@2bfe452d6608c92688d92337c87b1dd6448f4ccb
# pypy3 -m gunicorn falcon_plaintext:app -w 4 --worker-class=gevent #recomended for pypy3
# python3 -m gunicorn falcon_plaintext:app -w 4 #without Cython
# pypy3 -m gunicorn falcon_plaintext:app -w 4 #without gevent
# python3 -m gunicorn falcon_plaintext:app -w 4 --worker-class="egg:meinheld#gunicorn_worker" #with Cython
# meinheld is buggy -> greenlet.c:566:10: error: no member named 'use_tracing' in 'struct _ts'
# so using pip3 install git+https://github.com/idot/meinheld.git@2bfe452d6608c92688d92337c87b1dd6448f4ccb

Wyświetl plik

@ -2,11 +2,13 @@ from robyn import Robyn
app = Robyn(__file__)
@app.get("/")
async def h(request):
return "Hello, world!"
app.start(port=8000)
# python3 ./robyn_plaintext.py --processes 4 --log-level CRITICAL
# pypy3 did not compile
# pypy3 did not compile

Wyświetl plik

@ -3,21 +3,28 @@ import os
import multiprocessing
def run_app():
app = App()
app.get("/", lambda res, req: res.end("Hello, World!"))
app.listen(8000, lambda config: print("PID %d Listening on port http://localhost:%d now\n" % (os.getpid(), config.port)))
app.listen(
8000,
lambda config: print(
"PID %d Listening on port http://localhost:%d now\n"
% (os.getpid(), config.port)
),
)
app.run()
def create_fork():
n = os.fork()
# n greater than 0 means parent process
if not n > 0:
run_app()
# fork limiting the cpu count - 1
# for i in range(1, multiprocessing.cpu_count()):
# create_fork()
run_app() # run app on the main process too :)
# fork limiting the cpu count - 1
for i in range(1, multiprocessing.cpu_count()):
create_fork()
run_app() # run app on the main process too :)

Wyświetl plik

@ -1,17 +1,22 @@
async def app(scope, receive, send):
assert scope['type'] == 'http'
assert scope["type"] == "http"
await send({
'type': 'http.response.start',
'status': 200,
'headers': [
[b'content-type', b'text/plain'],
],
})
await send({
'type': 'http.response.body',
'body': b'Hello, world!',
})
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [
[b"content-type", b"text/plain"],
],
}
)
await send(
{
"type": "http.response.body",
"body": b"Hello, world!",
}
)
#python3 -m gunicorn uvicorn_guvicorn_plaintext:app -w 1 -k uvicorn.workers.UvicornWorker
#pypy3 -m gunicorn uvicorn_guvicorn_plaintext:app -w 1 -k uvicorn.workers.UvicornWorker
# python3 -m gunicorn uvicorn_guvicorn_plaintext:app -w 1 -k uvicorn.workers.UvicornWorker
# pypy3 -m gunicorn uvicorn_guvicorn_plaintext:app -w 1 -k uvicorn.workers.UvicornWorker

Wyświetl plik

@ -5,16 +5,16 @@ import asyncio
clients = set([])
remaining_clients = 16
async def broadcast(message):
# some clients got disconnected if we tried to to all async :/
# tasks = [ws.send_text(message) for ws in client]
# return await asyncio.wait(tasks, return_when=ALL_COMPLETED)
for ws in clients:
await ws.send_text(message)
class SomeResource:
async def on_get(self, req):
pass
@ -26,7 +26,7 @@ class SomeResource:
remaining_clients = remaining_clients - 1
if remaining_clients == 0:
await broadcast("ready")
while True:
payload = await ws.receive_text()
await broadcast(payload)
@ -35,11 +35,8 @@ class SomeResource:
clients.remove(ws)
remaining_clients = remaining_clients + 1
app = falcon.asgi.App()
app.add_route('/', SomeResource())
app.add_route("/", SomeResource())
# python3 -m gunicorn falcon_server:app -b 127.0.0.1:4001 -w 1 -k uvicorn.workers.UvicornWorker
# pypy3 -m gunicorn falcon_server:app -b 127.0.0.1:4001 -w 1 -k uvicorn.workers.UvicornH11Worker
# pypy3 -m gunicorn falcon_server:app -b 127.0.0.1:4001 -w 1 -k uvicorn.workers.UvicornH11Worker

Wyświetl plik

@ -2,37 +2,46 @@ from socketify import App, AppOptions, OpCode, CompressOptions
remaining_clients = 16
def ws_open(ws):
ws.subscribe("room")
global remaining_clients
remaining_clients = remaining_clients - 1
if remaining_clients == 0:
print("All clients connected")
print('Starting benchmark by sending "ready" message')
ws.publish("room", "ready", OpCode.TEXT)
#publish will send to everyone except it self so send to it self too
ws.send("ready", OpCode.TEXT)
print("All clients connected")
print('Starting benchmark by sending "ready" message')
ws.publish("room", "ready", OpCode.TEXT)
# publish will send to everyone except it self so send to it self too
ws.send("ready", OpCode.TEXT)
def ws_message(ws, message, opcode):
#publish will send to everyone except it self so send to it self too
# publish will send to everyone except it self so send to it self too
ws.publish("room", message, opcode)
ws.send(message, opcode)
def ws_close(ws, close, message):
global remaining_clients
remaining_clients = remaining_clients + 1
app = App()
app.ws("/*", {
'compression': CompressOptions.DISABLED,
'max_payload_length': 16 * 1024 * 1024,
'idle_timeout': 60,
'open': ws_open,
'message': ws_message,
'close': ws_close
})
app.any("/", lambda res,req: res.end("Nothing to see here!'"))
app.listen(4001, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
app.run()
app = App()
app.ws(
"/*",
{
"compression": CompressOptions.DISABLED,
"max_payload_length": 16 * 1024 * 1024,
"idle_timeout": 60,
"open": ws_open,
"message": ws_message,
"close": ws_close,
},
)
app.any("/", lambda res, req: res.end("Nothing to see here!'"))
app.listen(
4001,
lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
)
app.run()

Wyświetl plik

@ -3,35 +3,45 @@ import asyncio
app = App()
async def delayed_hello(delay, res):
await asyncio.sleep(delay) #do something async
await asyncio.sleep(delay) # do something async
res.cork_end("Hello with delay!")
def home(res, req):
#request object only lives during the life time of this call
#get parameters, query, headers anything you need here
# request object only lives during the life time of this call
# get parameters, query, headers anything you need here
delay = req.get_query("delay")
delay = 0 if delay == None else float(delay)
#tell response to run this in the event loop
#abort handler is grabed here, so responses only will be send if res.aborted == False
# tell response to run this in the event loop
# abort handler is grabed here, so responses only will be send if res.aborted == False
res.run_async(delayed_hello(delay, res))
async def json(res, req):
#request object only lives during the life time of this call
#get parameters, query, headers anything you need here before first await :)
# request object only lives during the life time of this call
# get parameters, query, headers anything you need here before first await :)
user_agent = req.get_header("user-agent")
#req maybe will not be available in direct attached async functions after await
await asyncio.sleep(2) #do something async
res.cork_end({ "message": "I'm delayed!", "user-agent": user_agent})
# req maybe will not be available in direct attached async functions after await
await asyncio.sleep(2) # do something async
res.cork_end({"message": "I'm delayed!", "user-agent": user_agent})
def not_found(res, req):
res.write_status(404).end("Not Found")
app.get("/", home)
app.get("/json", json)
app.any("/*", not_found)
app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
app.listen(
3000,
lambda config: print(
"Listening on port http://localhost:%s now\n" % str(config.port)
),
)
app.run()
app.run()

Wyświetl plik

@ -2,5 +2,7 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
app.listen(0, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
0, lambda config: print("Listening on port http://localhost:%d now\n" % config.port)
)
app.run()

Wyświetl plik

@ -1,39 +1,48 @@
from socketify import App, AppOptions, OpCode, CompressOptions
#Number between ok and not ok
# Number between ok and not ok
backpressure = 1024
# Used for statistics
messages = 0
message_number = 0
def ws_open(ws):
print('A WebSocket got connected!')
# We begin our example by sending until we have backpressure
print("A WebSocket got connected!")
# We begin our example by sending until we have backpressure
global message_number
global messages
while (ws.get_buffered_amount() < backpressure):
while ws.get_buffered_amount() < backpressure:
ws.send("This is a message, let's call it %i" % message_number)
message_number = message_number + 1
messages = messages + 1
def ws_drain(ws):
# Continue sending when we have drained (some)
# Continue sending when we have drained (some)
global message_number
global messages
while (ws.get_buffered_amount() < backpressure):
while ws.get_buffered_amount() < backpressure:
ws.send("This is a message, let's call it %i" % message_number)
message_number = message_number + 1
messages = messages + 1
app = App()
app.ws("/*", {
'compression': CompressOptions.DISABLED,
'max_payload_length': 16 * 1024 * 1024,
'idle_timeout': 60,
'open': ws_open,
'drain': ws_drain
})
app.any("/", lambda res,req: res.end("Nothing to see here!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
app.run()
app = App()
app.ws(
"/*",
{
"compression": CompressOptions.DISABLED,
"max_payload_length": 16 * 1024 * 1024,
"idle_timeout": 60,
"open": ws_open,
"drain": ws_drain,
},
)
app.any("/", lambda res, req: res.end("Nothing to see here!"))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
)
app.run()

Wyświetl plik

@ -1,26 +1,35 @@
from socketify import App, AppOptions, OpCode, CompressOptions
def ws_open(ws):
print('A WebSocket got connected!')
#Let this client listen to topic "broadcast"
ws.subscribe('broadcast')
print("A WebSocket got connected!")
# Let this client listen to topic "broadcast"
ws.subscribe("broadcast")
def ws_message(ws, message, opcode):
#Ok is false if backpressure was built up, wait for drain
# Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
#Broadcast this message
ws.publish('broadcast', message, opcode)
app = App()
app.ws("/*", {
'compression': CompressOptions.SHARED_COMPRESSOR,
'max_payload_length': 16 * 1024 * 1024,
'idle_timeout': 12,
'open': ws_open,
'message': ws_message,
# The library guarantees proper unsubscription at close
'close': lambda ws, code, message: print('WebSocket closed')
})
app.any("/", lambda res,req: res.end("Nothing to see here!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
app.run()
# Broadcast this message
ws.publish("broadcast", message, opcode)
app = App()
app.ws(
"/*",
{
"compression": CompressOptions.SHARED_COMPRESSOR,
"max_payload_length": 16 * 1024 * 1024,
"idle_timeout": 12,
"open": ws_open,
"message": ws_message,
# The library guarantees proper unsubscription at close
"close": lambda ws, code, message: print("WebSocket closed"),
},
)
app.any("/", lambda res, req: res.end("Nothing to see here!"))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
)
app.run()

Wyświetl plik

@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -3,27 +3,36 @@ import asyncio
app = App()
def xablau(res, req):
raise RuntimeError("Xablau!")
async def async_xablau(res, req):
await asyncio.sleep(1)
raise RuntimeError("Async Xablau!")
#this can be async no problems
def on_error(error, res, req):
#here you can log properly the error and do a pretty response to your clients
# this can be async no problems
def on_error(error, res, req):
# here you can log properly the error and do a pretty response to your clients
print("Somethind goes %s" % str(error))
#response and request can be None if the error is in an async function
# response and request can be None if the error is in an async function
if res != None:
#if response exists try to send something
# if response exists try to send something
res.write_status(500)
res.end("Sorry we did something wrong")
app.get("/", xablau)
app.get("/async", async_xablau)
app.set_error_handler(on_error)
app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
app.run()
app.listen(
3000,
lambda config: print(
"Listening on port http://localhost:%s now\n" % str(config.port)
),
)
app.run()

Wyświetl plik

@ -7,51 +7,54 @@ from os import path
mimetypes.init()
async def home(res, req):
#this is just an implementation example see static_files.py example for use of sendfile and app.static usage
#there is an static_aiofile.py helper and static.aiofiles helper using async implementation of this
#asyncio with IO is really slow so, we will implement "aiofile" using libuv inside socketify in future
# this is just an implementation example see static_files.py example for use of sendfile and app.static usage
# there is an static_aiofile.py helper and static.aiofiles helper using async implementation of this
# asyncio with IO is really slow so, we will implement "aiofile" using libuv inside socketify in future
filename = "./public/media/flower.webm"
#read headers before the first await
if_modified_since = req.get_header('if-modified-since')
range_header = req.get_header('range')
# read headers before the first await
if_modified_since = req.get_header("if-modified-since")
range_header = req.get_header("range")
bytes_range = None
start = 0
end = -1
#parse range header
# parse range header
if range_header:
bytes_range = range_header.replace("bytes=", '').split('-')
bytes_range = range_header.replace("bytes=", "").split("-")
start = int(bytes_range[0])
if bytes_range[1]:
end = int(bytes_range[1])
try:
exists = path.exists(filename)
#not found
# not found
if not exists:
return res.write_status(404).end(b'Not Found')
return res.write_status(404).end(b"Not Found")
#get size and last modified date
# get size and last modified date
stats = os.stat(filename)
total_size = stats.st_size
size = total_size
last_modified = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(stats.st_mtime))
#check if modified since is provided
last_modified = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)
)
# check if modified since is provided
if if_modified_since == last_modified:
return res.write_status(304).end_without_body()
#tells the broswer the last modified date
res.write_header(b'Last-Modified', last_modified)
# tells the broswer the last modified date
res.write_header(b"Last-Modified", last_modified)
#add content type
# add content type
(content_type, encoding) = mimetypes.guess_type(filename, strict=True)
if content_type and encoding:
res.write_header(b'Content-Type', '%s; %s' % (content_type, encoding))
res.write_header(b"Content-Type", "%s; %s" % (content_type, encoding))
elif content_type:
res.write_header(b'Content-Type', content_type)
res.write_header(b"Content-Type", content_type)
with open(filename, "rb") as fd:
#check range and support it
# check range and support it
if start > 0 or not end == -1:
if end < 0 or end >= size:
end = size - 1
@ -63,26 +66,32 @@ async def home(res, req):
else:
end = size - 1
res.write_status(200)
#tells the browser that we support range
res.write_header(b'Accept-Ranges', b'bytes')
res.write_header(b'Content-Range', 'bytes %d-%d/%d' % (start, end, total_size))
# tells the browser that we support range
res.write_header(b"Accept-Ranges", b"bytes")
res.write_header(
b"Content-Range", "bytes %d-%d/%d" % (start, end, total_size)
)
pending_size = size
#keep sending until abort or done
# keep sending until abort or done
while not res.aborted:
chunk_size = 16384 #16kb chunks
chunk_size = 16384 # 16kb chunks
if chunk_size > pending_size:
chunk_size = pending_size
buffer = fd.read(chunk_size)
pending_size = pending_size - chunk_size
(ok, done) = await res.send_chunk(buffer, size)
if not ok or done: #if cannot send probably aborted
if not ok or done: # if cannot send probably aborted
break
except Exception as error:
res.write_status(500).end("Internal Error")
res.write_status(500).end("Internal Error")
app = App()
app.get("/", home)
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -2,20 +2,29 @@ from socketify import App
import os
import multiprocessing
def run_app():
app = App()
app.get("/", lambda res, req: res.end("Hello, World!"))
app.listen(3000, lambda config: print("PID %d Listening on port http://localhost:%d now\n" % (os.getpid(), config.port)))
app.listen(
3000,
lambda config: print(
"PID %d Listening on port http://localhost:%d now\n"
% (os.getpid(), config.port)
),
)
app.run()
def create_fork():
n = os.fork()
# n greater than 0 means parent process
if not n > 0:
run_app()
# fork limiting the cpu count - 1
for i in range(1, multiprocessing.cpu_count()):
create_fork()
run_app() # run app on the main process too :)
run_app() # run app on the main process too :)

Wyświetl plik

@ -2,14 +2,21 @@ from socketify import App, AppOptions, AppListenOptions
app = App()
def shutdown(res, req):
res.end("Good bye!")
app.close()
app.get("/", lambda res, req: res.end("Hello!"))
app.get("/shutdown", shutdown)
app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
app.listen(
3000,
lambda config: print(
"Listening on port http://localhost:%s now\n" % str(config.port)
),
)
app.run()
print("App Closed!")
print("App Closed!")

Wyświetl plik

@ -1,4 +1,3 @@
import dataclasses
import strawberry
import strawberry.utils.graphiql
@ -7,10 +6,12 @@ from socketify import App
from typing import List, Optional
from helpers.graphiql import graphiql_from
@strawberry.type
class User:
name: str
@strawberry.type
class Query:
@strawberry.field
@ -24,5 +25,8 @@ app.get("/", lambda res, req: res.end(strawberry.utils.graphiql.get_graphiql_htm
app.post("/", graphiql_from(Query))
# you can also pass an Mutation as second parameter
# app.post("/", graphiql_from(Query, Mutation))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -1,4 +1,3 @@
import dataclasses
import strawberry
import strawberry.utils.graphiql
@ -6,10 +5,12 @@ import strawberry.utils.graphiql
from socketify import App
from typing import List, Optional
@strawberry.type
class User:
name: str
@strawberry.type
class Query:
@strawberry.field
@ -23,10 +24,10 @@ schema = strawberry.Schema(Query)
async def graphiql_post(res, req):
# we can pass whatever we want to context, query, headers or params, cookies etc
context_value = req.preserve()
# get all incomming data and parses as json
body = await res.get_json()
query = body["query"]
variables = body.get("variables", None)
root_value = body.get("root_value", None)
@ -40,14 +41,20 @@ async def graphiql_post(res, req):
operation_name,
)
res.cork_end({
"data": ( data.data ),
**({"errors": data.errors} if data.errors else {}),
**({"extensions": data.extensions} if data.extensions else {})
})
res.cork_end(
{
"data": (data.data),
**({"errors": data.errors} if data.errors else {}),
**({"extensions": data.extensions} if data.extensions else {}),
}
)
app = App()
app.get("/", lambda res, req: res.end(strawberry.utils.graphiql.get_graphiql_html()))
app.post("/", graphiql_post)
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -2,5 +2,8 @@ from socketify import App
app = App()
app.get("/", lambda res, req: res.end("Hello World!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -1,6 +1,7 @@
import strawberry
import strawberry.utils.graphiql
def graphiql_from(Query, Mutation=None):
if Mutation:
schema = strawberry.Schema(query=Query, mutation=Mutation)
@ -9,10 +10,10 @@ def graphiql_from(Query, Mutation=None):
async def post(res, req):
# we can pass whatever we want to context, query, headers or params, cookies etc
context_value = {
"query": req.get_queries(),
"headers": req.get_headers(),
"params": req.get_parameters()
context_value = {
"query": req.get_queries(),
"headers": req.get_headers(),
"params": req.get_parameters(),
}
# get all incomming data and parses as json
@ -31,9 +32,12 @@ def graphiql_from(Query, Mutation=None):
operation_name,
)
res.cork_end({
"data": ( data.data ),
**({"errors": data.errors} if data.errors else {}),
**({"extensions": data.extensions} if data.extensions else {})
})
return post
res.cork_end(
{
"data": (data.data),
**({"errors": data.errors} if data.errors else {}),
**({"extensions": data.extensions} if data.extensions else {}),
}
)
return post

Wyświetl plik

@ -1,19 +1,22 @@
import datetime
class MemoryCacheItem:
def __init__(self, expires, value):
self.expires = datetime.datetime.utcnow().timestamp() + expires
self.value = value
def is_expired(self):
return datetime.datetime.utcnow().timestamp() > self.expires
class MemoryCache:
def __init__(self):
self.cache = {}
def setex(self, key, expires, value):
self.cache[key] = MemoryCacheItem(expires, value)
def get(self, key):
try:
cache = self.cache[key]
@ -21,4 +24,4 @@ class MemoryCache:
return None
return cache.value
except KeyError:
return None
return None

Wyświetl plik

@ -7,45 +7,47 @@ from os import path
mimetypes.init()
# In production we highly recomend to use CDN like CloudFlare or/and NGINX or similar for static files
async def sendfile(res, req, filename):
#read headers before the first await
if_modified_since = req.get_header('if-modified-since')
range_header = req.get_header('range')
# read headers before the first await
if_modified_since = req.get_header("if-modified-since")
range_header = req.get_header("range")
bytes_range = None
start = 0
end = -1
#parse range header
# parse range header
if range_header:
bytes_range = range_header.replace("bytes=", '').split('-')
bytes_range = range_header.replace("bytes=", "").split("-")
start = int(bytes_range[0])
if bytes_range[1]:
end = int(bytes_range[1])
try:
exists = path.exists(filename)
#not found
# not found
if not exists:
return res.write_status(404).end(b'Not Found')
return res.write_status(404).end(b"Not Found")
#get size and last modified date
# get size and last modified date
stats = os.stat(filename)
total_size = stats.st_size
size = total_size
last_modified = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(stats.st_mtime))
#check if modified since is provided
last_modified = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)
)
# check if modified since is provided
if if_modified_since == last_modified:
return res.write_status(304).end_without_body()
#tells the broswer the last modified date
res.write_header(b'Last-Modified', last_modified)
# tells the broswer the last modified date
res.write_header(b"Last-Modified", last_modified)
#add content type
# add content type
(content_type, encoding) = mimetypes.guess_type(filename, strict=True)
if content_type and encoding:
res.write_header(b'Content-Type', '%s; %s' % (content_type, encoding))
res.write_header(b"Content-Type", "%s; %s" % (content_type, encoding))
elif content_type:
res.write_header(b'Content-Type', content_type)
res.write_header(b"Content-Type", content_type)
async with async_open(filename, "rb") as fd:
#check range and support it
# check range and support it
if start > 0 or not end == -1:
if end < 0 or end >= size:
end = size - 1
@ -57,33 +59,34 @@ async def sendfile(res, req, filename):
else:
end = size - 1
res.write_status(200)
#tells the browser that we support range
#TODO: FIX BYTE RANGE IN ASYNC
# res.write_header(b'Accept-Ranges', b'bytes')
# tells the browser that we support range
# TODO: FIX BYTE RANGE IN ASYNC
# res.write_header(b'Accept-Ranges', b'bytes')
# res.write_header(b'Content-Range', 'bytes %d-%d/%d' % (start, end, total_size))
pending_size = size
#keep sending until abort or done
# keep sending until abort or done
while not res.aborted:
chunk_size = 16384 #16kb chunks
chunk_size = 16384 # 16kb chunks
if chunk_size > pending_size:
chunk_size = pending_size
buffer = await fd.read(chunk_size)
pending_size = pending_size - chunk_size
(ok, done) = await res.send_chunk(buffer, size)
if not ok or done: #if cannot send probably aborted
if not ok or done: # if cannot send probably aborted
break
except Exception as error:
res.write_status(500).end("Internal Error")
res.write_status(500).end("Internal Error")
def in_directory(file, directory):
#make both absolute
directory = path.join(path.realpath(directory), '')
# make both absolute
directory = path.join(path.realpath(directory), "")
file = path.realpath(file)
#return true, if the common prefix of both is equal to directory
#e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
# return true, if the common prefix of both is equal to directory
# e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
return path.commonprefix([file, directory]) == directory
@ -91,7 +94,7 @@ def static_route(app, route, directory):
def route_handler(res, req):
url = req.get_url()
res.grab_aborted_handler()
url = url[len(route)::]
url = url[len(route) : :]
if url.startswith("/"):
url = url[1::]
filename = path.join(path.realpath(directory), url)
@ -100,6 +103,7 @@ def static_route(app, route, directory):
res.write_status(404).end_without_body()
return
res.run_async(sendfile(res, req, filename))
if route.startswith("/"):
route = route[1::]
app.get("%s/*" % route, route_handler)
app.get("%s/*" % route, route_handler)

Wyświetl plik

@ -7,45 +7,47 @@ from os import path
mimetypes.init()
# In production we highly recomend to use CDN like CloudFlare or/and NGINX or similar for static files
async def sendfile(res, req, filename):
#read headers before the first await
if_modified_since = req.get_header('if-modified-since')
range_header = req.get_header('range')
# read headers before the first await
if_modified_since = req.get_header("if-modified-since")
range_header = req.get_header("range")
bytes_range = None
start = 0
end = -1
#parse range header
# parse range header
if range_header:
bytes_range = range_header.replace("bytes=", '').split('-')
bytes_range = range_header.replace("bytes=", "").split("-")
start = int(bytes_range[0])
if bytes_range[1]:
end = int(bytes_range[1])
try:
exists = await os.path.exists(filename)
#not found
# not found
if not exists:
return res.write_status(404).end(b'Not Found')
return res.write_status(404).end(b"Not Found")
#get size and last modified date
# get size and last modified date
stats = await os.stat(filename)
total_size = stats.st_size
size = total_size
last_modified = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(stats.st_mtime))
#check if modified since is provided
last_modified = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)
)
# check if modified since is provided
if if_modified_since == last_modified:
return res.write_status(304).end_without_body()
#tells the broswer the last modified date
res.write_header(b'Last-Modified', last_modified)
# tells the broswer the last modified date
res.write_header(b"Last-Modified", last_modified)
#add content type
# add content type
(content_type, encoding) = mimetypes.guess_type(filename, strict=True)
if content_type and encoding:
res.write_header(b'Content-Type', '%s; %s' % (content_type, encoding))
res.write_header(b"Content-Type", "%s; %s" % (content_type, encoding))
elif content_type:
res.write_header(b'Content-Type', content_type)
res.write_header(b"Content-Type", content_type)
async with aiofiles.open(filename, "rb") as fd:
#check range and support it
# check range and support it
if start > 0 or not end == -1:
if end < 0 or end >= size:
end = size - 1
@ -57,33 +59,34 @@ async def sendfile(res, req, filename):
else:
end = size - 1
res.write_status(200)
#tells the browser that we support range
#TODO: FIX BYTE RANGE IN ASYNC
# res.write_header(b'Accept-Ranges', b'bytes')
# tells the browser that we support range
# TODO: FIX BYTE RANGE IN ASYNC
# res.write_header(b'Accept-Ranges', b'bytes')
# res.write_header(b'Content-Range', 'bytes %d-%d/%d' % (start, end, total_size))
pending_size = size
#keep sending until abort or done
# keep sending until abort or done
while not res.aborted:
chunk_size = 16384 #16kb chunks
chunk_size = 16384 # 16kb chunks
if chunk_size > pending_size:
chunk_size = pending_size
buffer = await fd.read(chunk_size)
pending_size = pending_size - chunk_size
(ok, done) = await res.send_chunk(buffer, size)
if not ok or done: #if cannot send probably aborted
if not ok or done: # if cannot send probably aborted
break
except Exception as error:
res.write_status(500).end("Internal Error")
res.write_status(500).end("Internal Error")
def in_directory(file, directory):
#make both absolute
directory = path.join(path.realpath(directory), '')
# make both absolute
directory = path.join(path.realpath(directory), "")
file = path.realpath(file)
#return true, if the common prefix of both is equal to directory
#e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
# return true, if the common prefix of both is equal to directory
# e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
return path.commonprefix([file, directory]) == directory
@ -91,7 +94,7 @@ def static_route(app, route, directory):
def route_handler(res, req):
url = req.get_url()
res.grab_aborted_handler()
url = url[len(route)::]
url = url[len(route) : :]
if url.startswith("/"):
url = url[1::]
filename = path.join(path.realpath(directory), url)
@ -100,6 +103,7 @@ def static_route(app, route, directory):
res.write_status(404).end_without_body()
return
res.run_async(sendfile(res, req, filename))
if route.startswith("/"):
route = route[1::]
app.get("%s/*" % route, route_handler)
app.get("%s/*" % route, route_handler)

Wyświetl plik

@ -1,4 +1,4 @@
#Simple example of mako and jinja2 template plugin for socketify.py
# Simple example of mako and jinja2 template plugin for socketify.py
from mako.template import Template
from mako.lookup import TemplateLookup
from mako import exceptions
@ -6,11 +6,14 @@ from mako import exceptions
from jinja2 import Environment, FileSystemLoader
class Jinja2Template:
def __init__(self, searchpath, encoding='utf-8', followlinks=False):
self.env = Environment(loader=FileSystemLoader(searchpath, encoding, followlinks))
#You can also add caching and logging strategy here if you want ;)
class Jinja2Template:
def __init__(self, searchpath, encoding="utf-8", followlinks=False):
self.env = Environment(
loader=FileSystemLoader(searchpath, encoding, followlinks)
)
# You can also add caching and logging strategy here if you want ;)
def render(self, templatename, **kwargs):
try:
template = self.env.get_template(templatename)
@ -18,14 +21,15 @@ class Jinja2Template:
except Exception as err:
return str(err)
class MakoTemplate:
def __init__(self, **options):
self.lookup = TemplateLookup(**options)
#You can also add caching and logging strategy here if you want ;)
# You can also add caching and logging strategy here if you want ;)
def render(self, templatename, **kwargs):
try:
template = self.lookup.get_template(templatename)
return template.render(**kwargs)
except Exception as err:
return exceptions.html_error_template().render()
return exceptions.html_error_template().render()

Wyświetl plik

@ -3,16 +3,18 @@ from .memory_cache import MemoryCache
# 2 LEVEL CACHE (Redis to share amoung worker, Memory to be much faster)
class TwoLevelCache:
def __init__(self, redis_conection, memory_expiration_time=3, redis_expiration_time=10):
def __init__(
self, redis_conection, memory_expiration_time=3, redis_expiration_time=10
):
self.memory_cache = MemoryCache()
self.redis_conection = redis_conection
self.memory_expiration_time = memory_expiration_time
self.redis_expiration_time = redis_expiration_time
#set cache to redis and memory
# set cache to redis and memory
def set(self, key, data):
try:
#never cache invalid data
# never cache invalid data
if data == None:
return False
self.redis_conection.setex(key, self.redis_expiration_time, data)
@ -21,32 +23,32 @@ class TwoLevelCache:
except Exception as err:
print(err)
return False
def get(self, key):
try:
value = self.memory_cache.get(key)
if value != None:
return value
#no memory cache so, got to redis
# no memory cache so, got to redis
value = self.redis_conection.get(key)
if value != None:
#refresh memory cache to speed up
# refresh memory cache to speed up
self.memory_cache.setex(key, self.memory_expiration_time, data)
return value
except Exception as err:
return None
#if more than 1 worker/request try to do this request, only one will call the Model and the others will get from cache
# if more than 1 worker/request try to do this request, only one will call the Model and the others will get from cache
async def run_once(self, key, timeout, executor, *args):
result = None
try:
lock = self.redis_conection.lock(f"lock-{key}", blocking_timeout=timeout)
#wait lock (some request is yeat not finish)
# wait lock (some request is yeat not finish)
while lock.locked():
await asyncio.sleep(0)
try:
lock.acquire(blocking=False)
#always check cache first
# always check cache first
cached = self.get(key)
if cached != None:
return cached
@ -59,12 +61,12 @@ class TwoLevelCache:
finally:
lock.release()
except Exception as err:
#cannot even create or release the lock
# cannot even create or release the lock
pass
finally:
#if result is None, try cache one last time
# if result is None, try cache one last time
if result == None:
cache = self.get(key)
if cache != None:
return cache
return result
return result

Wyświetl plik

@ -4,30 +4,36 @@ import aiohttp
import asyncio
from helpers.twolevel_cache import TwoLevelCache
#create redis poll + connections
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
# create redis poll + connections
redis_pool = redis.ConnectionPool(host="localhost", port=6379, db=0)
redis_conection = redis.Redis(connection_pool=redis_pool)
# 2 LEVEL CACHE (Redis to share amoung workers, Memory to be much faster)
# cache in memory is 30s, cache in redis is 60s duration
# cache in memory is 30s, cache in redis is 60s duration
cache = TwoLevelCache(redis_conection, 30, 60)
###
# Model
###
async def get_pokemon(number):
async with aiohttp.ClientSession() as session:
async with session.get(f'https://pokeapi.co/api/v2/pokemon/{number}') as response:
async with session.get(
f"https://pokeapi.co/api/v2/pokemon/{number}"
) as response:
pokemon = await response.text()
#cache only works with strings/bytes
#we will not change nothing here so no needs to parse json
# cache only works with strings/bytes
# we will not change nothing here so no needs to parse json
return pokemon.encode("utf-8")
async def get_original_pokemons():
async with aiohttp.ClientSession() as session:
async with session.get(f'https://pokeapi.co/api/v2/pokemon?limit=151') as response:
#cache only works with strings/bytes
#we will not change nothing here so no needs to parse json
async with session.get(
f"https://pokeapi.co/api/v2/pokemon?limit=151"
) as response:
# cache only works with strings/bytes
# we will not change nothing here so no needs to parse json
pokemons = await response.text()
return pokemons.encode("utf-8")
@ -36,13 +42,13 @@ async def get_original_pokemons():
# Routes
###
def list_original_pokemons(res, req):
#check cache for faster response
# check cache for faster response
value = cache.get("original_pokemons")
if value != None:
if value != None:
return res.end(value)
#get asynchronous from Model
# get asynchronous from Model
async def get_originals():
value = await cache.run_once("original_pokemons", 5, get_original_pokemons)
res.cork_end(value)
@ -52,28 +58,29 @@ def list_original_pokemons(res, req):
def list_pokemon(res, req):
#get needed parameters
# get needed parameters
try:
number = int(req.get_parameter(0))
except:
#invalid number
return req.set_yield(1)
# invalid number
return req.set_yield(1)
#check cache for faster response
# check cache for faster response
cache_key = f"pokemon-{number}"
value = cache.get(cache_key)
if value != None:
if value != None:
return res.end(value)
#get asynchronous from Model
# get asynchronous from Model
async def find_pokemon(number, res):
#sync with redis lock to run only once
#if more than 1 worker/request try to do this request, only one will call the Model and the others will get from cache
# sync with redis lock to run only once
# if more than 1 worker/request try to do this request, only one will call the Model and the others will get from cache
value = await cache.run_once(cache_key, 5, get_pokemon, number)
res.cork_end(value)
res.run_async(find_pokemon(number, res))
###
# Here i decided to use an sync first and async only if needs, but you can use async directly see ./async.py
###
@ -81,5 +88,8 @@ app = App()
app.get("/", list_original_pokemons)
app.get("/:number", list_pokemon)
app.any("/*", lambda res, _: res.write_status(404).end("Not Found"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -1,8 +1,17 @@
from socketify import App, AppOptions
app = App(AppOptions(key_file_name="./misc/key.pem", cert_file_name="./misc/cert.pem", passphrase="1234"))
app = App(
AppOptions(
key_file_name="./misc/key.pem",
cert_file_name="./misc/cert.pem",
passphrase="1234",
)
)
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
app.listen(3000, lambda config: print("Listening on port https://localhost:%d now\n" % config.port))
app.listen(
3000,
lambda config: print("Listening on port https://localhost:%d now\n" % config.port),
)
app.run()
#openssl req -newkey rsa:2048 -new -nodes -x509 -days 3650 -passout pass:1234 -keyout ./misc/key.pem -out ./misc/cert.pem
# openssl req -newkey rsa:2048 -new -nodes -x509 -days 3650 -passout pass:1234 -keyout ./misc/key.pem -out ./misc/cert.pem

Wyświetl plik

@ -2,5 +2,10 @@ from socketify import App, AppListenOptions
app = App()
app.get("/", lambda res, req: res.end("Hello World socketify from Python!"))
app.listen(AppListenOptions(port=3000, host="0.0.0.0"), lambda config: print("Listening on port http://%s:%d now\n" % (config.host, config.port)))
app.run()
app.listen(
AppListenOptions(port=3000, host="0.0.0.0"),
lambda config: print(
"Listening on port http://%s:%d now\n" % (config.host, config.port)
),
)
app.run()

Wyświetl plik

@ -1,34 +1,42 @@
from socketify import App, middleware
async def get_user(authorization):
if authorization:
#you can do something async here
return { 'greeting': 'Hello, World' }
# you can do something async here
return {"greeting": "Hello, World"}
return None
async def auth(res, req, data=None):
user = await get_user(req.get_header('authorization'))
if not user:
user = await get_user(req.get_header("authorization"))
if not user:
res.write_status(403).end("not authorized")
#returning Falsy in middlewares just stop the execution of the next middleware
# returning Falsy in middlewares just stop the execution of the next middleware
return False
#returns extra data
# returns extra data
return user
def another_middie(res, req, data=None):
#now we can mix sync and async and change the data here
# now we can mix sync and async and change the data here
if isinstance(data, dict):
gretting = data.get('greeting', '')
data['greeting'] = f"{gretting} from another middie ;)"
gretting = data.get("greeting", "")
data["greeting"] = f"{gretting} from another middie ;)"
return data
def home(res, req, user=None):
res.cork_end(user.get('greeting', None))
res.cork_end(user.get("greeting", None))
app = App()
app.get("/", middleware(auth, another_middie, home))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()
#You can also take a loop on MiddlewareRouter in middleware_router.py ;)
# You can also take a loop on MiddlewareRouter in middleware_router.py ;)

Wyświetl plik

@ -1,43 +1,49 @@
from socketify import App
#this is just an example of implementation you can just import using from socketify import middleware for an more complete version
# this is just an example of implementation you can just import using from socketify import middleware for an more complete version
async def get_user(authorization):
if authorization:
#do actually something async here
return { 'greeting': 'Hello, World' }
# do actually something async here
return {"greeting": "Hello, World"}
return None
def auth(route):
#in async query string, arguments and headers are only valid until the first await
# in async query string, arguments and headers are only valid until the first await
async def auth_middleware(res, req):
#get_headers will preserve headers (and cookies) inside req, after await
headers = req.get_headers()
#get_parameters will preserve all params inside req after await
# get_headers will preserve headers (and cookies) inside req, after await
headers = req.get_headers()
# get_parameters will preserve all params inside req after await
params = req.get_parameters()
#get queries will preserve all queries inside req after await
# get queries will preserve all queries inside req after await
queries = req.get_queries()
user = await get_user(headers.get('authorization', None))
user = await get_user(headers.get("authorization", None))
if user:
return route(res, req, user)
return route(res, req, user)
return res.write_status(403).cork_end("not authorized")
return auth_middleware
def home(res, req, user=None):
theme = req.get_query("theme_color")
theme = theme if theme else "light"
greeting = user.get('greeting', None)
greeting = user.get("greeting", None)
user_id = req.get_parameter(0)
res.cork_end(f"{greeting} <br/> theme: {theme} <br/> id: {user_id}")
app = App()
app.get("/user/:id", auth(home))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()
#curl --location --request GET 'http://localhost:3000/user/10?theme_color=dark' --header 'Authorization: Bearer 23456789'
# curl --location --request GET 'http://localhost:3000/user/10?theme_color=dark' --header 'Authorization: Bearer 23456789'

Wyświetl plik

@ -1,46 +1,51 @@
from socketify import App, MiddlewareRouter, middleware
async def get_user(authorization):
if authorization:
#you can do something async here
return { 'greeting': 'Hello, World' }
# you can do something async here
return {"greeting": "Hello, World"}
return None
async def auth(res, req, data=None):
user = await get_user(req.get_header('authorization'))
if not user:
user = await get_user(req.get_header("authorization"))
if not user:
res.write_status(403).end("not authorized")
#returning Falsy in middlewares just stop the execution of the next middleware
# returning Falsy in middlewares just stop the execution of the next middleware
return False
#returns extra data
# returns extra data
return user
def another_middie(res, req, data=None):
#now we can mix sync and async and change the data here
# now we can mix sync and async and change the data here
if isinstance(data, dict):
gretting = data.get('greeting', '')
data['greeting'] = f"{gretting} from another middie ;)"
gretting = data.get("greeting", "")
data["greeting"] = f"{gretting} from another middie ;)"
return data
def home(res, req, user=None):
res.cork_end(user.get('greeting', None))
def home(res, req, user=None):
res.cork_end(user.get("greeting", None))
app = App()
#you can use an Middleware router to add middlewares to every route you set
# you can use an Middleware router to add middlewares to every route you set
auth_router = MiddlewareRouter(app, auth)
auth_router.get("/", home)
#you can also mix middleware() with MiddlewareRouter
# you can also mix middleware() with MiddlewareRouter
auth_router.get("/another", middleware(another_middie, home))
#you can also pass multiple middlewares on the MiddlewareRouter
# you can also pass multiple middlewares on the MiddlewareRouter
other_router = MiddlewareRouter(app, auth, another_middie)
other_router.get("/another_way", home)
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -1,38 +1,46 @@
from socketify import App
#this is just an example of implementation you can just import using from socketify import middleware for an more complete version
# this is just an example of implementation you can just import using from socketify import middleware for an more complete version
def middleware(*functions):
def middleware_route(res, req):
data = None
#cicle to all middlewares
# cicle to all middlewares
for function in functions:
#call middlewares
# call middlewares
data = function(res, req, data)
#stops if returns Falsy
# stops if returns Falsy
if not data:
break
return middleware_route
def get_user(authorization_header):
if authorization_header:
return { 'greeting': 'Hello, World' }
return {"greeting": "Hello, World"}
return None
def auth(res, req, data=None):
user = get_user(req.get_header('authorization'))
if not user:
user = get_user(req.get_header("authorization"))
if not user:
res.write_status(403).end("not authorized")
return False
#returns extra data
# returns extra data
return user
def home(res, req, user=None):
res.end(user.get('greeting', None))
res.end(user.get("greeting", None))
app = App()
app.get("/", middleware(auth, home))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -2,23 +2,32 @@ from socketify import App, AppOptions, AppListenOptions
app = App()
async def home(res, req):
res.end("Hello, World!")
def user(res, req):
try:
if int(req.get_parameter(0)) == 1:
return res.end("Hello user 1!")
finally:
#invalid user tells to go, to the next route valid route (not found)
req.set_yield(1)
# invalid user tells to go, to the next route valid route (not found)
req.set_yield(1)
def not_found(res, req):
res.write_status(404).end("Not Found")
app.get("/", home)
app.get("/user/:user_id", user)
app.any("/*", not_found)
app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
app.run()
app.listen(
3000,
lambda config: print(
"Listening on port http://localhost:%s now\n" % str(config.port)
),
)
app.run()

Wyświetl plik

@ -1,13 +1,18 @@
from socketify import App
def home(res, req):
res.write('<html><h1>')
res.write('Your proxied IP is: %s' % res.get_proxied_remote_address())
res.write('</h1><h1>')
res.write('Your IP as seen by the origin server is: %s' % res.get_remote_address())
res.end('</h1></html>')
res.write("<html><h1>")
res.write("Your proxied IP is: %s" % res.get_proxied_remote_address())
res.write("</h1><h1>")
res.write("Your IP as seen by the origin server is: %s" % res.get_remote_address())
res.end("</h1></html>")
app = App()
app.get("/*", home)
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -4,4 +4,4 @@ aiofile
redis
strawberry-graphql
mako
git+https://github.com/cirospaciari/socketify.py.git@main#socketify --global-option="build_ext"
git+https://github.com/cirospaciari/socketify.py.git@main#socketify

Wyświetl plik

@ -1,39 +1,48 @@
from socketify import App, AppOptions, AppListenOptions
import asyncio
from datetime import datetime
from datetime import timedelta
app = App()
def home(res, req):
res.end("Hello :)")
def anything(res, req):
res.end("Any route with method: %s" % req.get_method())
res.end("Any route with method: %s" % req.get_method())
def cookies(res, req):
#cookies are writen after end
res.set_cookie("spaciari", "1234567890",{
# expires
# path
# comment
# domain
# max-age
# secure
# version
# httponly
# samesite
"path": "/",
# "domain": "*.test.com",
"httponly": True,
"samesite": "None",
"secure": True,
"expires": datetime.utcnow() + timedelta(minutes=30)
})
res.end("Your session_id cookie is: %s" % req.get_cookie('session_id'));
# cookies are writen after end
res.set_cookie(
"spaciari",
"1234567890",
{
# expires
# path
# comment
# domain
# max-age
# secure
# version
# httponly
# samesite
"path": "/",
# "domain": "*.test.com",
"httponly": True,
"samesite": "None",
"secure": True,
"expires": datetime.utcnow() + timedelta(minutes=30),
},
)
res.end("Your session_id cookie is: %s" % req.get_cookie("session_id"))
def useragent(res, req):
res.end("Your user agent is: %s" % req.get_header("user-agent"))
def useragent(res,req):
res.end("Your user agent is: %s" % req.get_header('user-agent'));
def user(res, req):
try:
@ -44,67 +53,76 @@ def user(res, req):
finally:
# invalid user tells to go, to the next route valid route (not found)
req.set_yield(1)
req.set_yield(1)
async def delayed_hello(delay, res):
await asyncio.sleep(delay) #do something async
await asyncio.sleep(delay) # do something async
res.cork_end("Hello sorry for the delay!")
# cork_end is a less verbose way of writing
# res.cork(lambda res: res.end("Hello sorry for the delay!"))
def delayed(res, req):
#request object only lives during the life time of this call
#get parameters, query, headers anything you need here
# request object only lives during the life time of this call
# get parameters, query, headers anything you need here
delay = req.get_query("delay")
delay = 1 if delay == None else float(delay)
#get queries returns an dict with all query string
# get queries returns an dict with all query string
# queries = req.get_queries()
#tell response to run this in the event loop
#abort handler is grabed here, so responses only will be send if res.aborted == False
# tell response to run this in the event loop
# abort handler is grabed here, so responses only will be send if res.aborted == False
res.run_async(delayed_hello(delay, res))
def json(res, req):
#if you pass an object will auto write an header with application/json
res.end({ "message": "I'm an application/json!"})
# if you pass an object will auto write an header with application/json
res.end({"message": "I'm an application/json!"})
async def sleepy_json(res, req):
#get parameters, query, headers anything you need here before first await :)
# get parameters, query, headers anything you need here before first await :)
user_agent = req.get_header("user-agent")
#print all headers
req.for_each_header(lambda key,value: print("Header %s: %s" % (key, value)))
#or if you want get all headers in an dict
# print all headers
req.for_each_header(lambda key, value: print("Header %s: %s" % (key, value)))
# or if you want get all headers in an dict
print("All headers", req.get_headers())
#req maybe will not be available in direct attached async functions after await
#but if you dont care about req info you can do it
await asyncio.sleep(2) #do something async
res.cork_end({ "message": "I'm delayed!", "user-agent": user_agent})
# req maybe will not be available in direct attached async functions after await
# but if you dont care about req info you can do it
await asyncio.sleep(2) # do something async
res.cork_end({"message": "I'm delayed!", "user-agent": user_agent})
def custom_header(res, req):
res.write_header("Content-Type", "application/octet-stream")
res.write_header("Content-Disposition", "attachment; filename=\"message.txt\"")
res.write_header("Content-Disposition", 'attachment; filename="message.txt"')
res.end("Downloaded this ;)")
def send_in_parts(res, req):
#write and end accepts bytes and str or its try to dumps to an json
# write and end accepts bytes and str or its try to dumps to an json
res.write("I can")
res.write(" send ")
res.write("messages")
res.end(" in parts!")
def redirect(res, req):
#status code is optional default is 302
# status code is optional default is 302
res.redirect("/redirected", 302)
def redirected(res, req):
res.end("You got redirected to here :D")
def not_found(res, req):
res.write_status(404).end("Not Found")
# app.any, app.get, app.put, app.post, app.head, app.options, app.delete, app.patch, app.connect and app.trace are available
app.get("/", home)
app.any("/anything", anything)
@ -122,5 +140,10 @@ app.get("/redirected", redirected)
# Wildcard at last always :)
app.any("/*", not_found)
app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
app.run()
app.listen(
3000,
lambda config: print(
"Listening on port http://localhost:%s now\n" % str(config.port)
),
)
app.run()

Wyświetl plik

@ -17,9 +17,9 @@
# pypy3 - socketify static_aiofile - 639.70 req/s
# pypy3 - socketify static_aiofiles - 637.55 req/s
# pypy3 - fastapi static gunicorn - 253.31 req/s
# pypy3 - scarlette static uvicorn - 279.45 req/s
# pypy3 - scarlette static uvicorn - 279.45 req/s
# Conclusions:
# Conclusions:
# With PyPy3 only static is really usable gunicorn/uvicorn, aiofiles and aiofile are realy slow on PyPy3 maybe this changes with HPy
# Python3 with any option will be faster than gunicorn/uvicorn but with PyPy3 with static we got 2x (or almost this in case of fastify) performance of node.js
# But even PyPy3 + socketify static is 7x+ slower than NGINX
@ -35,16 +35,19 @@ from socketify import App, sendfile
app = App()
#send home page index.html
# send home page index.html
async def home(res, req):
#sends the whole file with 304 and bytes range support
# sends the whole file with 304 and bytes range support
await sendfile(res, req, "./public/index.html")
app.get("/", home)
#serve all files in public folder under /* route (you can use any route like /assets)
# serve all files in public folder under /* route (you can use any route like /assets)
app.static("/", "./public")
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -1,14 +1,22 @@
from socketify import App
#see helper/templates.py for plugin implementation
# see helper/templates.py for plugin implementation
from helpers.templates import Jinja2Template
app = App()
app.template(Jinja2Template("./templates", encoding='utf-8', followlinks=False))
app.template(Jinja2Template("./templates", encoding="utf-8", followlinks=False))
def home(res, req):
res.render("jinja2_home.html", title="Hello", message="Hello, World")
app.get("/", home)
app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
app.run()
app.listen(
3000,
lambda config: print(
"Listening on port http://localhost:%s now\n" % str(config.port)
),
)
app.run()

Wyświetl plik

@ -1,14 +1,26 @@
from socketify import App
#see helper/templates.py for plugin implementation
# see helper/templates.py for plugin implementation
from helpers.templates import MakoTemplate
app = App()
app.template(MakoTemplate(directories=['./templates'], output_encoding='utf-8', encoding_errors='replace'))
app.template(
MakoTemplate(
directories=["./templates"], output_encoding="utf-8", encoding_errors="replace"
)
)
def home(res, req):
res.render("mako_home.html", message="Hello, World")
app.get("/", home)
app.listen(3000, lambda config: print("Listening on port http://localhost:%s now\n" % str(config.port)))
app.run()
app.listen(
3000,
lambda config: print(
"Listening on port http://localhost:%s now\n" % str(config.port)
),
)
app.run()

Wyświetl plik

@ -1,31 +1,43 @@
from socketify import App, AppOptions, OpCode, CompressOptions
def ws_open(ws):
print('A WebSocket got connected!')
print("A WebSocket got connected!")
ws.send("Hello World!", OpCode.TEXT)
def ws_message(ws, message, opcode):
print(message, opcode)
#Ok is false if backpressure was built up, wait for drain
# Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
def ws_upgrade(res, req, socket_context):
key = req.get_header("sec-websocket-key")
protocol = req.get_header("sec-websocket-protocol")
extensions = req.get_header("sec-websocket-extensions")
res.upgrade(key, protocol, extensions, socket_context)
app = App()
app.ws("/*", {
'compression': CompressOptions.SHARED_COMPRESSOR,
'max_payload_length': 16 * 1024 * 1024,
'idle_timeout': 12,
'open': ws_open,
'message': ws_message,
'upgrade': ws_upgrade,
'drain': lambda ws: print('WebSocket backpressure: %s', ws.get_buffered_amount()),
'close': lambda ws, code, message: print('WebSocket closed')
})
app.any("/", lambda res,req: res.end("Nothing to see here!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
app.run()
app = App()
app.ws(
"/*",
{
"compression": CompressOptions.SHARED_COMPRESSOR,
"max_payload_length": 16 * 1024 * 1024,
"idle_timeout": 12,
"open": ws_open,
"message": ws_message,
"upgrade": ws_upgrade,
"drain": lambda ws: print(
"WebSocket backpressure: %s", ws.get_buffered_amount()
),
"close": lambda ws, code, message: print("WebSocket closed"),
},
)
app.any("/", lambda res, req: res.end("Nothing to see here!"))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
)
app.run()

Wyświetl plik

@ -1,33 +1,45 @@
from socketify import App, AppOptions, OpCode, CompressOptions
import asyncio
def ws_open(ws):
print('A WebSocket got connected!')
print("A WebSocket got connected!")
ws.send("Hello World!", OpCode.TEXT)
def ws_message(ws, message, opcode):
print(message, opcode)
#Ok is false if backpressure was built up, wait for drain
# Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
async def ws_upgrade(res, req, socket_context):
key = req.get_header("sec-websocket-key")
protocol = req.get_header("sec-websocket-protocol")
extensions = req.get_header("sec-websocket-extensions")
await asyncio.sleep(2)
res.upgrade(key, protocol, extensions, socket_context)
app = App()
app.ws("/*", {
'compression': CompressOptions.SHARED_COMPRESSOR,
'max_payload_length': 16 * 1024 * 1024,
'idle_timeout': 12,
'open': ws_open,
'message': ws_message,
'upgrade': ws_upgrade,
'drain': lambda ws: print('WebSocket backpressure: %s', ws.get_buffered_amount()),
'close': lambda ws, code, message: print('WebSocket closed')
})
app.any("/", lambda res,req: res.end("Nothing to see here!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
app.run()
app = App()
app.ws(
"/*",
{
"compression": CompressOptions.SHARED_COMPRESSOR,
"max_payload_length": 16 * 1024 * 1024,
"idle_timeout": 12,
"open": ws_open,
"message": ws_message,
"upgrade": ws_upgrade,
"drain": lambda ws: print(
"WebSocket backpressure: %s", ws.get_buffered_amount()
),
"close": lambda ws, code, message: print("WebSocket closed"),
},
)
app.any("/", lambda res, req: res.end("Nothing to see here!"))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
)
app.run()

Wyświetl plik

@ -1,77 +1,84 @@
from socketify import App
###
# We always recomend check res.aborted in async operations
# We always recomend check res.aborted in async operations
###
def upload(res, req):
print(f"Posted to {req.get_url()}")
def on_data(res, chunk, is_end):
print(f"Got chunk of data with length {len(chunk)}, is_end: {is_end}")
if (is_end):
if is_end:
res.cork_end("Thanks for the data!")
res.on_data(on_data)
async def upload_chunks(res, req):
print(f"Posted to {req.get_url()}")
#await all the data, returns received chunks if fail (most likely fail is aborted requests)
# await all the data, returns received chunks if fail (most likely fail is aborted requests)
data = await res.get_data()
print(f"Got {len(data)} chunks of data!")
for chunk in data:
print(f"Got chunk of data with length {len(chunk)}")
#We respond when we are done
# We respond when we are done
res.cork_end("Thanks for the data!")
async def upload_json(res, req):
print(f"Posted to {req.get_url()}")
#await all the data and parses as json, returns None if fail
# await all the data and parses as json, returns None if fail
people = await res.get_json()
if isinstance(people, list) and isinstance(people[0], dict):
print(f"First person is named: {people[0]['name']}")
#We respond when we are done
# We respond when we are done
res.cork_end("Thanks for the data!")
async def upload_text(res, req):
print(f"Posted to {req.get_url()}")
#await all the data and decode as text, returns None if fail
text = await res.get_text() #first parameter is the encoding (default utf-8)
# await all the data and decode as text, returns None if fail
text = await res.get_text() # first parameter is the encoding (default utf-8)
print(f"Your text is ${text}")
#We respond when we are done
# We respond when we are done
res.cork_end("Thanks for the data!")
async def upload_urlencoded(res, req):
print(f"Posted to {req.get_url()}")
#await all the data and decode as application/x-www-form-urlencoded, returns None if fails
form = await res.get_form_urlencoded() #first parameter is the encoding (default utf-8)
# await all the data and decode as application/x-www-form-urlencoded, returns None if fails
form = (
await res.get_form_urlencoded()
) # first parameter is the encoding (default utf-8)
print(f"Your form is ${form}")
#We respond when we are done
# We respond when we are done
res.cork_end("Thanks for the data!")
async def upload_multiple(res, req):
print(f"Posted to {req.get_url()}")
content_type = req.get_header("content-type")
#we can check the Content-Type to accept multiple formats
# we can check the Content-Type to accept multiple formats
if content_type == "application/json":
data = await res.get_json()
elif content_type == "application/x-www-form-urlencoded":
data = await res.get_form_urlencoded()
else:
data = await res.get_text()
print(f"Your data is ${data}")
#We respond when we are done
# We respond when we are done
res.cork_end("Thanks for the data!")
@ -83,6 +90,9 @@ app.post("/text", upload_text)
app.post("/urlencoded", upload_urlencoded)
app.post("/multiple", upload_multiple)
app.any("/*", lambda res,_: res.write_status(404).end("Not Found"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % config.port))
app.run()
app.any("/*", lambda res, _: res.write_status(404).end("Not Found"))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % config.port),
)
app.run()

Wyświetl plik

@ -1,24 +1,35 @@
from socketify import App, AppOptions, OpCode, CompressOptions
def ws_open(ws):
print('A WebSocket got connected!')
print("A WebSocket got connected!")
ws.send("Hello World!", OpCode.TEXT)
def ws_message(ws, message, opcode):
print(message, opcode)
#Ok is false if backpressure was built up, wait for drain
# Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
app = App()
app.ws("/*", {
'compression': CompressOptions.SHARED_COMPRESSOR,
'max_payload_length': 16 * 1024 * 1024,
'idle_timeout': 12,
'open': ws_open,
'message': ws_message,
'drain': lambda ws: print('WebSocket backpressure: %s', ws.get_buffered_amount()),
'close': lambda ws, code, message: print('WebSocket closed')
})
app.any("/", lambda res,req: res.end("Nothing to see here!'"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
app.run()
app = App()
app.ws(
"/*",
{
"compression": CompressOptions.SHARED_COMPRESSOR,
"max_payload_length": 16 * 1024 * 1024,
"idle_timeout": 12,
"open": ws_open,
"message": ws_message,
"drain": lambda ws: print(
"WebSocket backpressure: %s", ws.get_buffered_amount()
),
"close": lambda ws, code, message: print("WebSocket closed"),
},
)
app.any("/", lambda res, req: res.end("Nothing to see here!'"))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
)
app.run()

Wyświetl plik

@ -2,12 +2,13 @@ import sys
vi = sys.version_info
if vi < (3, 7):
raise RuntimeError('socketify requires Python 3.7 or greater')
raise RuntimeError("socketify requires Python 3.7 or greater")
# if sys.platform in ('win32', 'cygwin', 'cli'):
# raise RuntimeError('socketify does not support Windows at the moment')
import setuptools
# from setuptools.command.sdist import sdist
# from setuptools.command.build_ext import build_ext
@ -21,15 +22,13 @@ import setuptools
# UWS_DIR = str(_ROOT / "src" / "socketify" /"uWebSockets")
# UWS_BUILD_DIR = str(_ROOT / "build" /"uWebSockets")
# NATIVE_CAPI_DIR = str(_ROOT / "build" / "native")
# NATIVE_CAPI_DIR = str(_ROOT / "build" / "native")
# NATIVE_LIB_PATH = str(_ROOT / "build" / "libsocketify.so")
# NATIVE_DIR = str(_ROOT / "src" / "socketify" /"native")
# NATIVE_BUILD_DIR = str(_ROOT / "build" /"native")
# NATIVE_LIB_OUTPUT = str(_ROOT / "src" / "socketify" / "libsocketify.so")
# class Prepare(sdist):
# def run(self):
# super().run()
@ -38,7 +37,7 @@ import setuptools
# class Makefile(build_ext):
# def run(self):
# env = os.environ.copy()
# if os.path.exists(UWS_BUILD_DIR):
# shutil.rmtree(UWS_BUILD_DIR)
# shutil.copytree(UWS_DIR, UWS_BUILD_DIR)
@ -49,7 +48,7 @@ import setuptools
# subprocess.run(["make", "shared"], cwd=NATIVE_CAPI_DIR, env=env, check=True)
# shutil.move(NATIVE_LIB_PATH, NATIVE_LIB_OUTPUT)
# super().run()
@ -60,7 +59,7 @@ with open("README.md", "r", encoding="utf-8") as fh:
setuptools.setup(
name="socketify",
version="0.0.1",
platforms=['macOS', 'POSIX'],
platforms=["macOS", "POSIX"],
author="Ciro Spaciari",
author_email="ciro.spaciari@gmail.com",
description="Fast WebSocket and Http/Https server",
@ -77,11 +76,20 @@ setuptools.setup(
],
packages=["socketify"],
package_dir={"": "src"},
package_data={"": ['./*.so', './uWebSockets/*','./uWebSockets/*/*','./uWebSockets/*/*/*', './native/*','./native/*/*','./native/*/*/*']},
package_data={
"": [
"./*.so",
"./uWebSockets/*",
"./uWebSockets/*/*",
"./uWebSockets/*/*/*",
"./native/*",
"./native/*/*",
"./native/*/*/*",
]
},
python_requires=">=3.7",
install_requires=["cffi>=1.0.0", "setuptools>=58.1.0"],
has_ext_modules=lambda: True,
cmdclass={}, #cmdclass={'sdist': Prepare, 'build_ext': Makefile},
include_package_data=True
)
cmdclass={}, # cmdclass={'sdist': Prepare, 'build_ext': Makefile},
include_package_data=True,
)

Wyświetl plik

@ -1,2 +1,9 @@
from .socketify import App, AppOptions, AppListenOptions, OpCode, SendStatus, CompressOptions
from .helpers import sendfile, middleware, MiddlewareRouter
from .socketify import (
App,
AppOptions,
AppListenOptions,
OpCode,
SendStatus,
CompressOptions,
)
from .helpers import sendfile, middleware, MiddlewareRouter

Wyświetl plik

@ -10,45 +10,47 @@ mimetypes.init()
# This is an sync version without any dependencies is normally much faster in CPython and PyPy3
# In production we highly recomend to use CDN like CloudFlare or/and NGINX or similar for static files
async def sendfile(res, req, filename):
#read headers before the first await
if_modified_since = req.get_header('if-modified-since')
range_header = req.get_header('range')
# read headers before the first await
if_modified_since = req.get_header("if-modified-since")
range_header = req.get_header("range")
bytes_range = None
start = 0
end = -1
#parse range header
# parse range header
if range_header:
bytes_range = range_header.replace("bytes=", '').split('-')
bytes_range = range_header.replace("bytes=", "").split("-")
start = int(bytes_range[0])
if bytes_range[1]:
end = int(bytes_range[1])
try:
exists = path.exists(filename)
#not found
# not found
if not exists:
return res.write_status(404).end(b'Not Found')
return res.write_status(404).end(b"Not Found")
#get size and last modified date
# get size and last modified date
stats = os.stat(filename)
total_size = stats.st_size
size = total_size
last_modified = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(stats.st_mtime))
#check if modified since is provided
last_modified = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime)
)
# check if modified since is provided
if if_modified_since == last_modified:
return res.write_status(304).end_without_body()
#tells the broswer the last modified date
res.write_header(b'Last-Modified', last_modified)
# tells the broswer the last modified date
res.write_header(b"Last-Modified", last_modified)
#add content type
# add content type
(content_type, encoding) = mimetypes.guess_type(filename, strict=True)
if content_type and encoding:
res.write_header(b'Content-Type', '%s; %s' % (content_type, encoding))
res.write_header(b"Content-Type", "%s; %s" % (content_type, encoding))
elif content_type:
res.write_header(b'Content-Type', content_type)
res.write_header(b"Content-Type", content_type)
with open(filename, "rb") as fd:
#check range and support it
# check range and support it
if start > 0 or not end == -1:
if end < 0 or end >= size:
end = size - 1
@ -60,32 +62,34 @@ async def sendfile(res, req, filename):
else:
end = size - 1
res.write_status(200)
#tells the browser that we support range
res.write_header(b'Accept-Ranges', b'bytes')
res.write_header(b'Content-Range', 'bytes %d-%d/%d' % (start, end, total_size))
# tells the browser that we support range
res.write_header(b"Accept-Ranges", b"bytes")
res.write_header(
b"Content-Range", "bytes %d-%d/%d" % (start, end, total_size)
)
pending_size = size
#keep sending until abort or done
# keep sending until abort or done
while not res.aborted:
chunk_size = 16384 #16kb chunks
chunk_size = 16384 # 16kb chunks
if chunk_size > pending_size:
chunk_size = pending_size
buffer = fd.read(chunk_size)
pending_size = pending_size - chunk_size
(ok, done) = await res.send_chunk(buffer, size)
if not ok or done: #if cannot send probably aborted
if not ok or done: # if cannot send probably aborted
break
except Exception as error:
res.write_status(500).end("Internal Error")
res.write_status(500).end("Internal Error")
def in_directory(file, directory):
#make both absolute
directory = path.join(path.realpath(directory), '')
# make both absolute
directory = path.join(path.realpath(directory), "")
file = path.realpath(file)
#return true, if the common prefix of both is equal to directory
#e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
# return true, if the common prefix of both is equal to directory
# e.g. /a/b/c/d.rst and directory is /a/b, the common prefix is /a/b
return path.commonprefix([file, directory]) == directory
@ -93,39 +97,39 @@ def static_route(app, route, directory):
def route_handler(res, req):
url = req.get_url()
res.grab_aborted_handler()
url = url[len(route)::]
url = url[len(route) : :]
if url.startswith("/"):
url = url[1::]
filename = path.join(path.realpath(directory), url)
if not in_directory(filename, directory):
res.write_status(404).end_without_body()
return
res.run_async(sendfile(res, req, filename))
if route.startswith("/"):
route = route[1::]
app.get("%s/*" % route, route_handler)
def middleware(*functions):
#we use Optional data=None at the end so you can use and middleware inside a middleware
# we use Optional data=None at the end so you can use and middleware inside a middleware
async def middleware_route(res, req, data=None):
some_async_as_run = False
#cicle to all middlewares
# cicle to all middlewares
for function in functions:
#detect if is coroutine or not
# detect if is coroutine or not
if inspect.iscoroutinefunction(function):
#in async query string, arguments and headers are only valid until the first await
# in async query string, arguments and headers are only valid until the first await
if not some_async_as_run:
#preserve queries, headers, parameters, url, full_url and method
# preserve queries, headers, parameters, url, full_url and method
req.preserve()
some_async_as_run = True
some_async_as_run = True
data = await function(res, req, data)
else:
#call middlewares
# call middlewares
data = function(res, req, data)
#stops if returns Falsy
# stops if returns Falsy
if not data:
break
return data
@ -133,7 +137,7 @@ def middleware(*functions):
return middleware_route
class MiddlewareRouter():
class MiddlewareRouter:
def __init__(self, app, *middlewares):
self.app = app
self.middlewares = middlewares
@ -149,43 +153,51 @@ class MiddlewareRouter():
middies.append(handler)
self.app.post(path, middleware(*middies))
return self
def options(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.options(path, middleware(*middies))
return self
def delete(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.delete(path, middleware(*middies))
return self
def patch(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.patch(path, middleware(*middies))
return self
def put(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.put(path, middleware(*middies))
return self
def head(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.head(path, middleware(*middies))
return self
def connect(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.connect(path, middleware(*middies))
return self
def trace(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.trace(path, middleware(*middies))
return self
def any(self, path, handler):
middies = list(self.middlewares)
middies.append(handler)
self.app.any(path, middleware(*middies))
return self
return self

Wyświetl plik

@ -1,4 +1,3 @@
import asyncio
import threading
import time
@ -13,11 +12,11 @@ def future_handler(future, loop, exception_handler, response):
future.result()
return None
except Exception as error:
if hasattr(exception_handler, '__call__'):
if hasattr(exception_handler, "__call__"):
exception_handler(loop, error, response)
else:
try:
#just log in console the error to call attention
# just log in console the error to call attention
print("Uncaught Exception: %s" % str(error))
if response != None:
response.write_status(500).end("Internal Error")
@ -25,14 +24,17 @@ def future_handler(future, loop, exception_handler, response):
return None
return None
class Loop:
def __init__(self, exception_handler=None):
self.loop = asyncio.new_event_loop()
self.uv_loop = UVLoop()
if hasattr(exception_handler, '__call__'):
if hasattr(exception_handler, "__call__"):
self.exception_handler = exception_handler
self.loop.set_exception_handler(lambda loop, context: exception_handler(loop, context, None))
self.loop.set_exception_handler(
lambda loop, context: exception_handler(loop, context, None)
)
else:
self.exception_handler = None
@ -43,18 +45,17 @@ class Loop:
def set_timeout(self, timeout, callback, user_data):
return self.uv_loop.create_timer(timeout, 0, callback, user_data)
def create_future(self):
return self.loop.create_future()
def start(self):
self.started = True
#run asyncio once per tick
# run asyncio once per tick
def tick(loop):
#run once asyncio
# run once asyncio
loop.run_once_asyncio()
#use check for calling asyncio once per tick
# use check for calling asyncio once per tick
self.timer = self.uv_loop.create_timer(0, 1, tick, self)
# self.timer = self.uv_loop.create_check(tick, self)
@ -66,18 +67,17 @@ class Loop:
def run_once_asyncio(self):
# with suppress(asyncio.CancelledError):
#run only one step
# run only one step
self.loop.call_soon(self.loop.stop)
self.loop.run_forever()
def stop(self):
if(self.started):
if self.started:
self.timer.stop()
self.started = False
#unbind run_once
#if is still running stops
if self.loop.is_running():
# unbind run_once
# if is still running stops
if self.loop.is_running():
self.loop.stop()
self.last_defer = None
@ -85,26 +85,27 @@ class Loop:
pending = asyncio.all_tasks(self.loop)
# Run loop until tasks done
self.loop.run_until_complete(asyncio.gather(*pending))
#Exposes native loop for uWS
# Exposes native loop for uWS
def get_native_loop(self):
return self.uv_loop.get_native_loop()
def run_async(self, task, response=None):
#with run_once
# with run_once
future = asyncio.ensure_future(task, loop=self.loop)
#with threads
future.add_done_callback(lambda f: future_handler(f, self.loop, self.exception_handler, response))
#force asyncio run once to enable req in async functions before first await
# with threads
future.add_done_callback(
lambda f: future_handler(f, self.loop, self.exception_handler, response)
)
# force asyncio run once to enable req in async functions before first await
self.run_once_asyncio()
#if response != None: #set auto cork
# response.needs_cork = True
# if response != None: #set auto cork
# response.needs_cork = True
return future
# if sys.version_info >= (3, 11)
# with asyncio.Runner(loop_factory=uvloop.new_event_loop) as runner:
# runner.run(main())
@ -113,7 +114,7 @@ class Loop:
# asyncio.run(main())
#see ./native/uv_selector.txt
# see ./native/uv_selector.txt
# will only work on linux and macos
# class UVSelector(asyncio.SelectorEventLoop):
# def register(self, fileobj, events, data=None):

Plik diff jest za duży Load Diff

Wyświetl plik

@ -1,65 +1,65 @@
status_codes = {
100 : b'100 Continue',
101 : b'101 Switching Protocols',
102 : b'102 Processing',
103 : b'103 Early Hints',
200 : b'200 OK',
201 : b'201 Created',
202 : b'202 Accepted',
203 : b'203 Non-Authoritative Information',
204 : b'204 No Content',
205 : b'205 Reset Content',
206 : b'206 Partial Content',
207 : b'207 Multi-Status',
208 : b'208 Already Reported',
226 : b'226 IM Used (HTTP Delta encoding)',
300 : b'300 Multiple Choices',
301 : b'301 Moved Permanently',
302 : b'302 Found',
303 : b'303 See Other',
304 : b'304 Not Modified',
305 : b'305 Use Proxy Deprecated',
306 : b'306 unused',
307 : b'307 Temporary Redirect',
308 : b'308 Permanent Redirect',
400 : b'400 Bad Request',
401 : b'401 Unauthorized',
402 : b'402 Payment Required Experimental',
403 : b'403 Forbidden',
404 : b'404 Not Found',
405 : b'405 Method Not Allowed',
406 : b'406 Not Acceptable',
407 : b'407 Proxy Authentication Required',
408 : b'408 Request Timeout',
409 : b'409 Conflict',
410 : b'410 Gone',
411 : b'411 Length Required',
412 : b'412 Precondition Failed',
413 : b'413 Payload Too Large',
414 : b'414 URI Too Long',
415 : b'415 Unsupported Media Type',
416 : b'416 Range Not Satisfiable',
417 : b'417 Expectation Failed',
418 : b'418 I\'m a teapot',
421 : b'421 Misdirected Request',
422 : b'422 Unprocessable Entity',
423 : b'423 Locked',
424 : b'424 Failed Dependency',
425 : b'425 Too Early Experimental',
426 : b'426 Upgrade Required',
428 : b'428 Precondition Required',
429 : b'429 Too Many Requests',
431 : b'431 Request Header Fields Too Large',
451 : b'451 Unavailable For Legal Reasons',
500 : b'500 Internal Server Error',
501 : b'501 Not Implemented',
502 : b'502 Bad Gateway',
503 : b'503 Service Unavailable',
504 : b'504 Gateway Timeout',
505 : b'505 HTTP Version Not Supported',
506 : b'506 Variant Also Negotiates',
507 : b'507 Insufficient Storage',
508 : b'508 Loop Detected',
510 : b'510 Not Extended',
511 : b'511 Network Authentication Required'
}
100: b"100 Continue",
101: b"101 Switching Protocols",
102: b"102 Processing",
103: b"103 Early Hints",
200: b"200 OK",
201: b"201 Created",
202: b"202 Accepted",
203: b"203 Non-Authoritative Information",
204: b"204 No Content",
205: b"205 Reset Content",
206: b"206 Partial Content",
207: b"207 Multi-Status",
208: b"208 Already Reported",
226: b"226 IM Used (HTTP Delta encoding)",
300: b"300 Multiple Choices",
301: b"301 Moved Permanently",
302: b"302 Found",
303: b"303 See Other",
304: b"304 Not Modified",
305: b"305 Use Proxy Deprecated",
306: b"306 unused",
307: b"307 Temporary Redirect",
308: b"308 Permanent Redirect",
400: b"400 Bad Request",
401: b"401 Unauthorized",
402: b"402 Payment Required Experimental",
403: b"403 Forbidden",
404: b"404 Not Found",
405: b"405 Method Not Allowed",
406: b"406 Not Acceptable",
407: b"407 Proxy Authentication Required",
408: b"408 Request Timeout",
409: b"409 Conflict",
410: b"410 Gone",
411: b"411 Length Required",
412: b"412 Precondition Failed",
413: b"413 Payload Too Large",
414: b"414 URI Too Long",
415: b"415 Unsupported Media Type",
416: b"416 Range Not Satisfiable",
417: b"417 Expectation Failed",
418: b"418 I'm a teapot",
421: b"421 Misdirected Request",
422: b"422 Unprocessable Entity",
423: b"423 Locked",
424: b"424 Failed Dependency",
425: b"425 Too Early Experimental",
426: b"426 Upgrade Required",
428: b"428 Precondition Required",
429: b"429 Too Many Requests",
431: b"431 Request Header Fields Too Large",
451: b"451 Unavailable For Legal Reasons",
500: b"500 Internal Server Error",
501: b"501 Not Implemented",
502: b"502 Bad Gateway",
503: b"503 Service Unavailable",
504: b"504 Gateway Timeout",
505: b"505 HTTP Version Not Supported",
506: b"506 Variant Also Negotiates",
507: b"507 Insufficient Storage",
508: b"508 Loop Detected",
510: b"510 Not Extended",
511: b"511 Network Authentication Required",
}

Wyświetl plik

@ -1,10 +1,10 @@
import cffi
import os
import platform
ffi = cffi.FFI()
ffi.cdef("""
ffi.cdef(
"""
typedef void (*socketify_prepare_handler)(void* user_data);
@ -54,23 +54,36 @@ void socketify_timer_set_repeat(socketify_timer* timer, uint64_t repeat);
socketify_timer* socketify_create_check(socketify_loop* loop, socketify_timer_handler handler, void* user_data);
void socketify_check_destroy(socketify_timer* timer);
""")
"""
)
library_extension = "dll" if platform.system().lower() == "windows" else "so"
library_path = os.path.join(os.path.dirname(__file__), "libsocketify_%s_%s.%s" % (platform.system().lower(), "arm64" if "arm" in platform.processor().lower() else "amd64", library_extension))
library_path = os.path.join(
os.path.dirname(__file__),
"libsocketify_%s_%s.%s"
% (
platform.system().lower(),
"arm64" if "arm" in platform.processor().lower() else "amd64",
library_extension,
),
)
lib = ffi.dlopen(library_path)
@ffi.callback("void(void *)")
def socketify_generic_handler(data):
if not data == ffi.NULL:
(handler, user_data) = ffi.from_handle(data)
handler(user_data)
class UVCheck:
def __init__(self, loop, handler, user_data):
self._handler_data = ffi.new_handle((handler, user_data))
self._ptr = lib.socketify_create_check(loop, socketify_generic_handler, self._handler_data)
self._ptr = lib.socketify_create_check(
loop, socketify_generic_handler, self._handler_data
)
def stop(self):
lib.socketify_check_destroy(self._ptr)
self._handler_data = None
@ -81,10 +94,18 @@ class UVCheck:
lib.socketify_check_destroy(self._ptr)
self._handler_data = None
class UVTimer:
def __init__(self, loop, timeout, repeat, handler, user_data):
self._handler_data = ffi.new_handle((handler, user_data))
self._ptr = lib.socketify_create_timer(loop, ffi.cast("uint64_t", timeout), ffi.cast("uint64_t", repeat), socketify_generic_handler, self._handler_data)
self._ptr = lib.socketify_create_timer(
loop,
ffi.cast("uint64_t", timeout),
ffi.cast("uint64_t", repeat),
socketify_generic_handler,
self._handler_data,
)
def stop(self):
lib.socketify_timer_destroy(self._ptr)
self._handler_data = None
@ -92,7 +113,7 @@ class UVTimer:
def set_repeat(self, repeat):
lib.socketify_timer_set_repeat(self._ptr, ffi.cast("uint64_t", repeat))
def __del__(self):
if self._ptr != ffi.NULL:
lib.socketify_timer_destroy(self._ptr)
@ -107,14 +128,16 @@ class UVLoop:
def on_prepare(self, handler, user_data):
self._handler_data = ffi.new_handle((handler, user_data))
lib.socketify_on_prepare(self._loop, socketify_generic_handler, self._handler_data)
lib.socketify_on_prepare(
self._loop, socketify_generic_handler, self._handler_data
)
def create_timer(self, timeout, repeat, handler, user_data):
return UVTimer(self._loop, timeout, repeat, handler, user_data)
def create_check(self, handler, user_data):
return UVCheck(self._loop, handler, user_data)
def prepare_unbind(self):
lib.socketify_prepare_unbind(self._loop)
@ -124,7 +147,7 @@ class UVLoop:
def __del__(self):
lib.socketify_destroy_loop(self._loop)
self._handler_data = None
def run(self):
return lib.socketify_loop_run(self._loop, lib.SOCKETIFY_RUN_DEFAULT)
@ -132,4 +155,4 @@ class UVLoop:
return lib.socketify_loop_run(self._loop, lib.SOCKETIFY_RUN_ONCE)
def stop(self):
lib.socketify_loop_stop(self._loop)
lib.socketify_loop_stop(self._loop)

Wyświetl plik

@ -3,7 +3,7 @@
# import os.path
# DLL_EXPORT typedef void (*uws_listen_domain_handler)(struct us_listen_socket_t *listen_socket, const char* domain, size_t domain_length, int options, void *user_data);
# DLL_EXPORT typedef void (*uws_listen_domain_handler)(struct us_listen_socket_t *listen_socket, const char* domain, size_t domain_length, int options, void *user_data);
# DLL_EXPORT typedef void (*uws_filter_handler)(uws_res_t *response, int, void *user_data);
# DLL_EXPORT void uws_app_listen_domain(int ssl, uws_app_t *app, const char *domain,size_t server_name_length,_listen_domain_handler handler, void *user_data);
@ -15,31 +15,41 @@
from socketify import App, AppOptions, OpCode, CompressOptions
import asyncio
def ws_open(ws):
print('A WebSocket got connected!')
print("A WebSocket got connected!")
ws.send("Hello World!", OpCode.TEXT)
def ws_message(ws, message, opcode):
print(message, opcode)
#Ok is false if backpressure was built up, wait for drain
# Ok is false if backpressure was built up, wait for drain
ok = ws.send(message, opcode)
async def ws_upgrade(res, req, socket_context):
key = req.get_header("sec-websocket-key")
protocol = req.get_header("sec-websocket-protocol")
extensions = req.get_header("sec-websocket-extensions")
await asyncio.sleep(2)
res.upgrade(key, protocol, extensions, socket_context)
app = App()
app.ws("/*", {
'compression': CompressOptions.SHARED_COMPRESSOR,
'max_payload_length': 16 * 1024 * 1024,
'idle_timeout': 12,
'open': ws_open,
'message': ws_message,
'upgrade': ws_upgrade
})
app.any("/", lambda res,req: res.end("Nothing to see here!"))
app.listen(3000, lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)))
app.run()
app = App()
app.ws(
"/*",
{
"compression": CompressOptions.SHARED_COMPRESSOR,
"max_payload_length": 16 * 1024 * 1024,
"idle_timeout": 12,
"open": ws_open,
"message": ws_message,
"upgrade": ws_upgrade,
},
)
app.any("/", lambda res, req: res.end("Nothing to see here!"))
app.listen(
3000,
lambda config: print("Listening on port http://localhost:%d now\n" % (config.port)),
)
app.run()