kopia lustrzana https://github.com/micropython/micropython-lib
all: Run black over all code.
Signed-off-by: Jim Mussared <jim.mussared@gmail.com>pull/376/head
rodzic
af3e1aff9e
commit
fa13cbbc8b
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
# MicroPython will pick up glob from the current dir otherwise.
|
||||
import sys
|
||||
|
||||
sys.path.pop(0)
|
||||
|
||||
import glob
|
||||
|
@ -68,12 +69,13 @@ This is MicroPython compatibility module, allowing applications using
|
|||
MicroPython-specific features to run on CPython.
|
||||
"""
|
||||
|
||||
MICROPYTHON_DEVELS = 'micropython-lib Developers'
|
||||
MICROPYTHON_DEVELS_EMAIL = 'micro-python@googlegroups.com'
|
||||
CPYTHON_DEVELS = 'CPython Developers'
|
||||
CPYTHON_DEVELS_EMAIL = 'python-dev@python.org'
|
||||
PYPY_DEVELS = 'PyPy Developers'
|
||||
PYPY_DEVELS_EMAIL = 'pypy-dev@python.org'
|
||||
MICROPYTHON_DEVELS = "micropython-lib Developers"
|
||||
MICROPYTHON_DEVELS_EMAIL = "micro-python@googlegroups.com"
|
||||
CPYTHON_DEVELS = "CPython Developers"
|
||||
CPYTHON_DEVELS_EMAIL = "python-dev@python.org"
|
||||
PYPY_DEVELS = "PyPy Developers"
|
||||
PYPY_DEVELS_EMAIL = "pypy-dev@python.org"
|
||||
|
||||
|
||||
def parse_metadata(f):
|
||||
data = {}
|
||||
|
@ -142,7 +144,7 @@ def main():
|
|||
data["license"] = "MIT"
|
||||
elif data["srctype"] == "cpython-backport":
|
||||
assert module.startswith("cpython-")
|
||||
module = module[len("cpython-"):]
|
||||
module = module[len("cpython-") :]
|
||||
data["author"] = MICROPYTHON_DEVELS
|
||||
data["author_email"] = MICROPYTHON_DEVELS_EMAIL
|
||||
data["maintainer"] = MICROPYTHON_DEVELS
|
||||
|
@ -163,7 +165,9 @@ def main():
|
|||
|
||||
data["modules"] = "'" + data["name"].rsplit(".", 1)[0] + "'"
|
||||
if "extra_modules" in data:
|
||||
data["modules"] += ", " + ", ".join(["'" + x.strip() + "'" for x in data["extra_modules"].split(",")])
|
||||
data["modules"] += ", " + ", ".join(
|
||||
["'" + x.strip() + "'" for x in data["extra_modules"].split(",")]
|
||||
)
|
||||
|
||||
if "depends" in data:
|
||||
deps = ["micropython-" + x.strip() for x in data["depends"].split(",")]
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-test.support',
|
||||
version='0.1.3',
|
||||
description='test.support module for MicroPython',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='micropython-lib Developers',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['test'])
|
||||
setup(
|
||||
name="micropython-test.support",
|
||||
version="0.1.3",
|
||||
description="test.support module for MicroPython",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="micropython-lib Developers",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["test"],
|
||||
)
|
||||
|
|
|
@ -5,7 +5,8 @@ import gc
|
|||
import contextlib
|
||||
|
||||
|
||||
TESTFN = '@test'
|
||||
TESTFN = "@test"
|
||||
|
||||
|
||||
def run_unittest(*classes):
|
||||
suite = unittest.TestSuite()
|
||||
|
@ -21,18 +22,22 @@ def run_unittest(*classes):
|
|||
runner = unittest.TestRunner()
|
||||
result = runner.run(suite)
|
||||
|
||||
|
||||
def can_symlink():
|
||||
return False
|
||||
|
||||
|
||||
def skip_unless_symlink(test):
|
||||
"""Skip decorator for tests that require functional symlink"""
|
||||
ok = can_symlink()
|
||||
msg = "Requires functional symlink implementation"
|
||||
return test if ok else unittest.skip(msg)(test)
|
||||
|
||||
|
||||
def create_empty_file(name):
|
||||
open(name, "w").close()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def disable_gc():
|
||||
have_gc = gc.isenabled()
|
||||
|
@ -43,11 +48,13 @@ def disable_gc():
|
|||
if have_gc:
|
||||
gc.enable()
|
||||
|
||||
|
||||
def gc_collect():
|
||||
gc.collect()
|
||||
gc.collect()
|
||||
gc.collect()
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def captured_output(stream_name):
|
||||
org = getattr(sys, stream_name)
|
||||
|
@ -58,8 +65,10 @@ def captured_output(stream_name):
|
|||
finally:
|
||||
setattr(sys, stream_name, org)
|
||||
|
||||
|
||||
def captured_stderr():
|
||||
return captured_output("stderr")
|
||||
|
||||
|
||||
def requires_IEEE_754(f):
|
||||
return f
|
||||
|
|
|
@ -14,13 +14,16 @@ def print_stream(resp):
|
|||
break
|
||||
print(line.rstrip())
|
||||
|
||||
|
||||
def run(url):
|
||||
resp = yield from aiohttp.request("GET", url)
|
||||
print(resp)
|
||||
yield from print_stream(resp)
|
||||
|
||||
|
||||
import sys
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
url = sys.argv[1]
|
||||
loop = asyncio.get_event_loop()
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-uaiohttpclient',
|
||||
version='0.5.1',
|
||||
description='HTTP client module for MicroPython uasyncio module',
|
||||
long_description=open('README').read(),
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['uaiohttpclient'])
|
||||
setup(
|
||||
name="micropython-uaiohttpclient",
|
||||
version="0.5.1",
|
||||
description="HTTP client module for MicroPython uasyncio module",
|
||||
long_description=open("README").read(),
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["uaiohttpclient"],
|
||||
)
|
||||
|
|
|
@ -2,7 +2,6 @@ import uasyncio as asyncio
|
|||
|
||||
|
||||
class ClientResponse:
|
||||
|
||||
def __init__(self, reader):
|
||||
self.content = reader
|
||||
|
||||
|
@ -14,23 +13,22 @@ class ClientResponse:
|
|||
|
||||
|
||||
class ChunkedClientResponse(ClientResponse):
|
||||
|
||||
def __init__(self, reader):
|
||||
self.content = reader
|
||||
self.chunk_size = 0
|
||||
|
||||
def read(self, sz=4*1024*1024):
|
||||
def read(self, sz=4 * 1024 * 1024):
|
||||
if self.chunk_size == 0:
|
||||
l = yield from self.content.readline()
|
||||
#print("chunk line:", l)
|
||||
# print("chunk line:", l)
|
||||
l = l.split(b";", 1)[0]
|
||||
self.chunk_size = int(l, 16)
|
||||
#print("chunk size:", self.chunk_size)
|
||||
# print("chunk size:", self.chunk_size)
|
||||
if self.chunk_size == 0:
|
||||
# End of message
|
||||
sep = yield from self.content.read(2)
|
||||
assert sep == b"\r\n"
|
||||
return b''
|
||||
return b""
|
||||
data = yield from self.content.read(min(sz, self.chunk_size))
|
||||
self.chunk_size -= len(data)
|
||||
if self.chunk_size == 0:
|
||||
|
@ -54,9 +52,13 @@ def request_raw(method, url):
|
|||
# Use protocol 1.0, because 1.1 always allows to use chunked transfer-encoding
|
||||
# But explicitly set Connection: close, even though this should be default for 1.0,
|
||||
# because some servers misbehave w/o it.
|
||||
query = "%s /%s HTTP/1.0\r\nHost: %s\r\nConnection: close\r\nUser-Agent: compat\r\n\r\n" % (method, path, host)
|
||||
yield from writer.awrite(query.encode('latin-1'))
|
||||
# yield from writer.aclose()
|
||||
query = "%s /%s HTTP/1.0\r\nHost: %s\r\nConnection: close\r\nUser-Agent: compat\r\n\r\n" % (
|
||||
method,
|
||||
path,
|
||||
host,
|
||||
)
|
||||
yield from writer.awrite(query.encode("latin-1"))
|
||||
# yield from writer.aclose()
|
||||
return reader
|
||||
|
||||
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import uasyncio.core as asyncio
|
||||
import time
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
#asyncio.set_debug(True)
|
||||
# asyncio.set_debug(True)
|
||||
|
||||
|
||||
def cb():
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-uasyncio.core',
|
||||
version='2.0',
|
||||
description='Lightweight asyncio-like library for MicroPython, built around native Python coroutines. (Core event loop).',
|
||||
long_description='Lightweight asyncio-like library for MicroPython, built around native Python coroutines. (Core event loop).',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['uasyncio'])
|
||||
setup(
|
||||
name="micropython-uasyncio.core",
|
||||
version="2.0",
|
||||
description="Lightweight asyncio-like library for MicroPython, built around native Python coroutines. (Core event loop).",
|
||||
long_description="Lightweight asyncio-like library for MicroPython, built around native Python coroutines. (Core event loop).",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["uasyncio"],
|
||||
)
|
||||
|
|
|
@ -1,29 +1,35 @@
|
|||
import time
|
||||
|
||||
try:
|
||||
import uasyncio.core as asyncio
|
||||
|
||||
is_uasyncio = True
|
||||
except ImportError:
|
||||
import asyncio
|
||||
|
||||
is_uasyncio = False
|
||||
import logging
|
||||
#logging.basicConfig(level=logging.DEBUG)
|
||||
#asyncio.set_debug(True)
|
||||
|
||||
# logging.basicConfig(level=logging.DEBUG)
|
||||
# asyncio.set_debug(True)
|
||||
|
||||
|
||||
output = []
|
||||
cancelled = False
|
||||
|
||||
|
||||
def print1(msg):
|
||||
print(msg)
|
||||
output.append(msg)
|
||||
|
||||
|
||||
def looper1(iters):
|
||||
global cancelled
|
||||
try:
|
||||
for i in range(iters):
|
||||
print1("ping1")
|
||||
# sleep() isn't properly cancellable
|
||||
#yield from asyncio.sleep(1.0)
|
||||
# yield from asyncio.sleep(1.0)
|
||||
t = time.time()
|
||||
while time.time() - t < 1:
|
||||
yield from asyncio.sleep(0)
|
||||
|
@ -32,11 +38,12 @@ def looper1(iters):
|
|||
print1("cancelled")
|
||||
cancelled = True
|
||||
|
||||
|
||||
def looper2(iters):
|
||||
for i in range(iters):
|
||||
print1("ping2")
|
||||
# sleep() isn't properly cancellable
|
||||
#yield from asyncio.sleep(1.0)
|
||||
# yield from asyncio.sleep(1.0)
|
||||
t = time.time()
|
||||
while time.time() - t < 1:
|
||||
yield from asyncio.sleep(0)
|
||||
|
@ -66,7 +73,7 @@ def run_to():
|
|||
yield from asyncio.sleep(0)
|
||||
|
||||
# Once saw 3 ping3's output on CPython 3.5.2
|
||||
assert output == ['ping1', 'ping1', 'ping1', 'cancelled', 'ping2', 'ping2']
|
||||
assert output == ["ping1", "ping1", "ping1", "cancelled", "ping2", "ping2"]
|
||||
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
|
|
|
@ -22,7 +22,7 @@ async def done():
|
|||
global test_finished
|
||||
while True:
|
||||
if len(result) == COROS * ITERS:
|
||||
#print(result)
|
||||
# print(result)
|
||||
assert result == list(range(COROS)) * ITERS
|
||||
test_finished = True
|
||||
return
|
||||
|
|
|
@ -2,12 +2,12 @@
|
|||
# in case of I/O completion before that.
|
||||
import uasyncio.core as uasyncio
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
#uasyncio.set_debug(True)
|
||||
# uasyncio.set_debug(True)
|
||||
|
||||
|
||||
class MockEventLoop(uasyncio.EventLoop):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.t = 0
|
||||
|
@ -20,12 +20,14 @@ class MockEventLoop(uasyncio.EventLoop):
|
|||
self.t += delta
|
||||
|
||||
def wait(self, delay):
|
||||
#print("%d: wait(%d)" % (self.t, delay))
|
||||
# print("%d: wait(%d)" % (self.t, delay))
|
||||
self.pass_time(100)
|
||||
|
||||
if self.t == 100:
|
||||
|
||||
def cb_1st():
|
||||
self.msgs.append("I should be run first, time: %s" % self.time())
|
||||
|
||||
self.call_soon(cb_1st)
|
||||
|
||||
if self.t == 1000:
|
||||
|
@ -34,9 +36,11 @@ class MockEventLoop(uasyncio.EventLoop):
|
|||
|
||||
loop = MockEventLoop()
|
||||
|
||||
|
||||
def cb_2nd():
|
||||
loop.msgs.append("I should be run second, time: %s" % loop.time())
|
||||
|
||||
|
||||
loop.call_later_ms(500, cb_2nd)
|
||||
|
||||
try:
|
||||
|
@ -47,4 +51,6 @@ except StopIteration:
|
|||
print(loop.msgs)
|
||||
# .wait() is now called on each loop iteration, and for our mock case, it means that
|
||||
# at the time of running, self.time() will be skewed by 100 virtual time units.
|
||||
assert loop.msgs == ['I should be run first, time: 100', 'I should be run second, time: 500'], str(loop.msgs)
|
||||
assert loop.msgs == ["I should be run first, time: 100", "I should be run second, time: 500"], str(
|
||||
loop.msgs
|
||||
)
|
||||
|
|
|
@ -3,8 +3,9 @@ try:
|
|||
except ImportError:
|
||||
import asyncio
|
||||
import logging
|
||||
#logging.basicConfig(level=logging.DEBUG)
|
||||
#asyncio.set_debug(True)
|
||||
|
||||
# logging.basicConfig(level=logging.DEBUG)
|
||||
# asyncio.set_debug(True)
|
||||
|
||||
|
||||
def looper(iters):
|
||||
|
|
|
@ -8,11 +8,13 @@ type_gen = type((lambda: (yield))())
|
|||
DEBUG = 0
|
||||
log = None
|
||||
|
||||
|
||||
def set_debug(val):
|
||||
global DEBUG, log
|
||||
DEBUG = val
|
||||
if val:
|
||||
import logging
|
||||
|
||||
log = logging.getLogger("uasyncio.core")
|
||||
|
||||
|
||||
|
@ -25,7 +27,6 @@ class TimeoutError(CancelledError):
|
|||
|
||||
|
||||
class EventLoop:
|
||||
|
||||
def __init__(self, runq_len=16, waitq_len=16):
|
||||
self.runq = ucollections.deque((), runq_len, True)
|
||||
self.waitq = utimeq.utimeq(waitq_len)
|
||||
|
@ -130,7 +131,10 @@ class EventLoop:
|
|||
elif isinstance(ret, StopLoop):
|
||||
return arg
|
||||
else:
|
||||
assert False, "Unknown syscall yielded: %r (of type %r)" % (ret, type(ret))
|
||||
assert False, "Unknown syscall yielded: %r (of type %r)" % (
|
||||
ret,
|
||||
type(ret),
|
||||
)
|
||||
elif isinstance(ret, type_gen):
|
||||
self.call_soon(ret)
|
||||
elif isinstance(ret, int):
|
||||
|
@ -143,7 +147,10 @@ class EventLoop:
|
|||
# Don't reschedule
|
||||
continue
|
||||
else:
|
||||
assert False, "Unsupported coroutine yield value: %r (of type %r)" % (ret, type(ret))
|
||||
assert False, "Unsupported coroutine yield value: %r (of type %r)" % (
|
||||
ret,
|
||||
type(ret),
|
||||
)
|
||||
except StopIteration as e:
|
||||
if __debug__ and DEBUG:
|
||||
log.debug("Coroutine finished: %s", cb)
|
||||
|
@ -176,6 +183,7 @@ class EventLoop:
|
|||
def _run_and_stop():
|
||||
yield from coro
|
||||
yield StopLoop(0)
|
||||
|
||||
self.call_soon(_run_and_stop())
|
||||
self.run_forever()
|
||||
|
||||
|
@ -187,72 +195,80 @@ class EventLoop:
|
|||
|
||||
|
||||
class SysCall:
|
||||
|
||||
def __init__(self, *args):
|
||||
self.args = args
|
||||
|
||||
def handle(self):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# Optimized syscall with 1 arg
|
||||
class SysCall1(SysCall):
|
||||
|
||||
def __init__(self, arg):
|
||||
self.arg = arg
|
||||
|
||||
|
||||
class StopLoop(SysCall1):
|
||||
pass
|
||||
|
||||
|
||||
class IORead(SysCall1):
|
||||
pass
|
||||
|
||||
|
||||
class IOWrite(SysCall1):
|
||||
pass
|
||||
|
||||
|
||||
class IOReadDone(SysCall1):
|
||||
pass
|
||||
|
||||
|
||||
class IOWriteDone(SysCall1):
|
||||
pass
|
||||
|
||||
|
||||
_event_loop = None
|
||||
_event_loop_class = EventLoop
|
||||
|
||||
|
||||
def get_event_loop(runq_len=16, waitq_len=16):
|
||||
global _event_loop
|
||||
if _event_loop is None:
|
||||
_event_loop = _event_loop_class(runq_len, waitq_len)
|
||||
return _event_loop
|
||||
|
||||
|
||||
def sleep(secs):
|
||||
yield int(secs * 1000)
|
||||
|
||||
|
||||
# Implementation of sleep_ms awaitable with zero heap memory usage
|
||||
class SleepMs(SysCall1):
|
||||
|
||||
def __init__(self):
|
||||
self.v = None
|
||||
self.arg = None
|
||||
|
||||
def __call__(self, arg):
|
||||
self.v = arg
|
||||
#print("__call__")
|
||||
# print("__call__")
|
||||
return self
|
||||
|
||||
def __iter__(self):
|
||||
#print("__iter__")
|
||||
# print("__iter__")
|
||||
return self
|
||||
|
||||
def __next__(self):
|
||||
if self.v is not None:
|
||||
#print("__next__ syscall enter")
|
||||
# print("__next__ syscall enter")
|
||||
self.arg = self.v
|
||||
self.v = None
|
||||
return self
|
||||
#print("__next__ syscall exit")
|
||||
# print("__next__ syscall exit")
|
||||
_stop_iter.__traceback__ = None
|
||||
raise _stop_iter
|
||||
|
||||
|
||||
_stop_iter = StopIteration()
|
||||
sleep_ms = SleepMs()
|
||||
|
||||
|
@ -269,7 +285,6 @@ class TimeoutObj:
|
|||
|
||||
|
||||
def wait_for_ms(coro, timeout):
|
||||
|
||||
def waiter(coro, timeout_obj):
|
||||
res = yield from coro
|
||||
if __debug__ and DEBUG:
|
||||
|
@ -282,7 +297,7 @@ def wait_for_ms(coro, timeout):
|
|||
if __debug__ and DEBUG:
|
||||
log.debug("timeout_func: cancelling %s", timeout_obj.coro)
|
||||
prev = timeout_obj.coro.pend_throw(TimeoutError())
|
||||
#print("prev pend", prev)
|
||||
# print("prev pend", prev)
|
||||
if prev is False:
|
||||
_event_loop.call_soon(timeout_obj.coro)
|
||||
|
||||
|
@ -298,11 +313,13 @@ def wait_for(coro, timeout):
|
|||
def coroutine(f):
|
||||
return f
|
||||
|
||||
|
||||
#
|
||||
# The functions below are deprecated in uasyncio, and provided only
|
||||
# for compatibility with CPython asyncio
|
||||
#
|
||||
|
||||
|
||||
def ensure_future(coro, loop=_event_loop):
|
||||
_event_loop.call_soon(coro)
|
||||
# CPython asyncio incompatibility: we don't return Task object
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-uasyncio.queues',
|
||||
version='0.1.2',
|
||||
description='uasyncio.queues module for MicroPython',
|
||||
long_description='Port of asyncio.queues to uasyncio.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='micropython-lib Developers',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['uasyncio'],
|
||||
install_requires=['micropython-uasyncio.core', 'micropython-collections.deque'])
|
||||
setup(
|
||||
name="micropython-uasyncio.queues",
|
||||
version="0.1.2",
|
||||
description="uasyncio.queues module for MicroPython",
|
||||
long_description="Port of asyncio.queues to uasyncio.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="micropython-lib Developers",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["uasyncio"],
|
||||
install_requires=["micropython-uasyncio.core", "micropython-collections.deque"],
|
||||
)
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from unittest import TestCase, run_class
|
||||
import sys
|
||||
sys.path.insert(0, '../uasyncio')
|
||||
|
||||
sys.path.insert(0, "../uasyncio")
|
||||
import queues
|
||||
|
||||
|
||||
class QueueTestCase(TestCase):
|
||||
|
||||
def _val(self, gen):
|
||||
"""Returns val from generator."""
|
||||
while True:
|
||||
|
@ -53,5 +53,5 @@ class QueueTestCase(TestCase):
|
|||
self.assertTrue(q.full())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
run_class(QueueTestCase)
|
||||
|
|
|
@ -21,6 +21,7 @@ class Queue:
|
|||
with qsize(), since your single-threaded uasyncio application won't be
|
||||
interrupted between calling qsize() and doing an operation on the Queue.
|
||||
"""
|
||||
|
||||
_attempt_delay = 0.1
|
||||
|
||||
def __init__(self, maxsize=0):
|
||||
|
|
|
@ -12,7 +12,7 @@ def task(i, lock):
|
|||
yield from lock.acquire()
|
||||
print("Acquired lock in task", i)
|
||||
yield from asyncio.sleep(0.5)
|
||||
# yield lock.release()
|
||||
# yield lock.release()
|
||||
lock.release()
|
||||
|
||||
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-uasyncio.synchro',
|
||||
version='0.1.1',
|
||||
description='Synchronization primitives for uasyncio.',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='micropython-lib Developers',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['uasyncio'],
|
||||
install_requires=['micropython-uasyncio.core'])
|
||||
setup(
|
||||
name="micropython-uasyncio.synchro",
|
||||
version="0.1.1",
|
||||
description="Synchronization primitives for uasyncio.",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="micropython-lib Developers",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["uasyncio"],
|
||||
install_requires=["micropython-uasyncio.core"],
|
||||
)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from uasyncio import core
|
||||
|
||||
class Lock:
|
||||
|
||||
class Lock:
|
||||
def __init__(self):
|
||||
self.locked = False
|
||||
self.wlist = []
|
||||
|
@ -10,7 +10,7 @@ class Lock:
|
|||
assert self.locked
|
||||
self.locked = False
|
||||
if self.wlist:
|
||||
#print(self.wlist)
|
||||
# print(self.wlist)
|
||||
coro = self.wlist.pop(0)
|
||||
core.get_event_loop().call_soon(coro)
|
||||
|
||||
|
@ -18,11 +18,11 @@ class Lock:
|
|||
# As release() is not coro, assume we just released and going to acquire again
|
||||
# so, yield first to let someone else to acquire it first
|
||||
yield
|
||||
#print("acquire:", self.locked)
|
||||
# print("acquire:", self.locked)
|
||||
while 1:
|
||||
if not self.locked:
|
||||
self.locked = True
|
||||
return True
|
||||
#print("putting", core.get_event_loop().cur_task, "on waiting list")
|
||||
# print("putting", core.get_event_loop().cur_task, "on waiting list")
|
||||
self.wlist.append(core.get_event_loop().cur_task)
|
||||
yield False
|
||||
|
|
|
@ -6,6 +6,7 @@ import uasyncio
|
|||
import uasyncio.udp
|
||||
import usocket
|
||||
|
||||
|
||||
def udp_req(addr):
|
||||
s = uasyncio.udp.socket()
|
||||
print(s)
|
||||
|
@ -18,6 +19,7 @@ def udp_req(addr):
|
|||
|
||||
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
addr = usocket.getaddrinfo("127.0.0.1", 53)[0][-1]
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-uasyncio.udp',
|
||||
version='0.1.1',
|
||||
description="UDP support for MicroPython's uasyncio",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['uasyncio'],
|
||||
install_requires=['micropython-uasyncio'])
|
||||
setup(
|
||||
name="micropython-uasyncio.udp",
|
||||
version="0.1.1",
|
||||
description="UDP support for MicroPython's uasyncio",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["uasyncio"],
|
||||
install_requires=["micropython-uasyncio"],
|
||||
)
|
||||
|
|
|
@ -5,53 +5,60 @@ from uasyncio import core
|
|||
DEBUG = 0
|
||||
log = None
|
||||
|
||||
|
||||
def set_debug(val):
|
||||
global DEBUG, log
|
||||
DEBUG = val
|
||||
if val:
|
||||
import logging
|
||||
|
||||
log = logging.getLogger("uasyncio.udp")
|
||||
|
||||
|
||||
def socket(af=usocket.AF_INET):
|
||||
s = usocket.socket(af, usocket.SOCK_DGRAM)
|
||||
s.setblocking(False)
|
||||
return s
|
||||
|
||||
|
||||
def recv(s, n):
|
||||
try:
|
||||
yield core.IORead(s)
|
||||
return s.recv(n)
|
||||
except:
|
||||
#print("recv: exc, cleaning up")
|
||||
#print(uasyncio.core._event_loop.objmap, uasyncio.core._event_loop.poller)
|
||||
#uasyncio.core._event_loop.poller.dump()
|
||||
# print("recv: exc, cleaning up")
|
||||
# print(uasyncio.core._event_loop.objmap, uasyncio.core._event_loop.poller)
|
||||
# uasyncio.core._event_loop.poller.dump()
|
||||
yield core.IOReadDone(s)
|
||||
#print(uasyncio.core._event_loop.objmap)
|
||||
#uasyncio.core._event_loop.poller.dump()
|
||||
# print(uasyncio.core._event_loop.objmap)
|
||||
# uasyncio.core._event_loop.poller.dump()
|
||||
raise
|
||||
|
||||
|
||||
def recvfrom(s, n):
|
||||
try:
|
||||
yield core.IORead(s)
|
||||
return s.recvfrom(n)
|
||||
except:
|
||||
#print("recv: exc, cleaning up")
|
||||
#print(uasyncio.core._event_loop.objmap, uasyncio.core._event_loop.poller)
|
||||
#uasyncio.core._event_loop.poller.dump()
|
||||
# print("recv: exc, cleaning up")
|
||||
# print(uasyncio.core._event_loop.objmap, uasyncio.core._event_loop.poller)
|
||||
# uasyncio.core._event_loop.poller.dump()
|
||||
yield core.IOReadDone(s)
|
||||
#print(uasyncio.core._event_loop.objmap)
|
||||
#uasyncio.core._event_loop.poller.dump()
|
||||
# print(uasyncio.core._event_loop.objmap)
|
||||
# uasyncio.core._event_loop.poller.dump()
|
||||
raise
|
||||
|
||||
|
||||
def sendto(s, buf, addr=None):
|
||||
while 1:
|
||||
res = s.sendto(buf, addr)
|
||||
#print("send res:", res)
|
||||
# print("send res:", res)
|
||||
if res == len(buf):
|
||||
return
|
||||
print("sendto: IOWrite")
|
||||
yield core.IOWrite(s)
|
||||
|
||||
|
||||
def close(s):
|
||||
yield core.IOReadDone(s)
|
||||
s.close()
|
||||
|
|
|
@ -19,7 +19,8 @@ def echo(reader, writer):
|
|||
|
||||
|
||||
import logging
|
||||
#logging.basicConfig(level=logging.INFO)
|
||||
|
||||
# logging.basicConfig(level=logging.INFO)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
loop = uasyncio.get_event_loop()
|
||||
loop.create_task(uasyncio.start_server(echo, "127.0.0.1", 8081))
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-uasyncio.websocket.server',
|
||||
version='0.1',
|
||||
description='uasyncio.websocket.server module for MicroPython',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['uasyncio.websocket'],
|
||||
install_requires=['micropython-uasyncio'])
|
||||
setup(
|
||||
name="micropython-uasyncio.websocket.server",
|
||||
version="0.1",
|
||||
description="uasyncio.websocket.server module for MicroPython",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["uasyncio.websocket"],
|
||||
install_requires=["micropython-uasyncio"],
|
||||
)
|
||||
|
|
|
@ -7,13 +7,12 @@ def make_respkey(webkey):
|
|||
d = uhashlib.sha1(webkey)
|
||||
d.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11")
|
||||
respkey = d.digest()
|
||||
respkey = ubinascii.b2a_base64(respkey) #[:-1]
|
||||
respkey = ubinascii.b2a_base64(respkey) # [:-1]
|
||||
# Return with trailing "\n".
|
||||
return respkey
|
||||
|
||||
|
||||
class WSWriter:
|
||||
|
||||
def __init__(self, reader, writer):
|
||||
# Reader is passed for symmetry with WSReader() and ignored.
|
||||
self.s = writer
|
||||
|
@ -27,37 +26,39 @@ class WSWriter:
|
|||
|
||||
def WSReader(reader, writer):
|
||||
|
||||
webkey = None
|
||||
while 1:
|
||||
l = yield from reader.readline()
|
||||
print(l)
|
||||
if not l:
|
||||
raise ValueError()
|
||||
if l == b"\r\n":
|
||||
break
|
||||
if l.startswith(b'Sec-WebSocket-Key'):
|
||||
webkey = l.split(b":", 1)[1]
|
||||
webkey = webkey.strip()
|
||||
webkey = None
|
||||
while 1:
|
||||
l = yield from reader.readline()
|
||||
print(l)
|
||||
if not l:
|
||||
raise ValueError()
|
||||
if l == b"\r\n":
|
||||
break
|
||||
if l.startswith(b"Sec-WebSocket-Key"):
|
||||
webkey = l.split(b":", 1)[1]
|
||||
webkey = webkey.strip()
|
||||
|
||||
if not webkey:
|
||||
raise ValueError("Not a websocker request")
|
||||
if not webkey:
|
||||
raise ValueError("Not a websocker request")
|
||||
|
||||
respkey = make_respkey(webkey)
|
||||
respkey = make_respkey(webkey)
|
||||
|
||||
await writer.awrite(b"""\
|
||||
await writer.awrite(
|
||||
b"""\
|
||||
HTTP/1.1 101 Switching Protocols\r
|
||||
Upgrade: websocket\r
|
||||
Connection: Upgrade\r
|
||||
Sec-WebSocket-Accept: """)
|
||||
await writer.awrite(respkey)
|
||||
# This will lead to "<key>\n\r\n" being written. Not exactly
|
||||
# "\r\n\r\n", but browsers seem to eat it.
|
||||
await writer.awrite("\r\n")
|
||||
#await writer.awrite("\r\n\r\n")
|
||||
Sec-WebSocket-Accept: """
|
||||
)
|
||||
await writer.awrite(respkey)
|
||||
# This will lead to "<key>\n\r\n" being written. Not exactly
|
||||
# "\r\n\r\n", but browsers seem to eat it.
|
||||
await writer.awrite("\r\n")
|
||||
# await writer.awrite("\r\n\r\n")
|
||||
|
||||
print("Finished webrepl handshake")
|
||||
print("Finished webrepl handshake")
|
||||
|
||||
ws = websocket.websocket(reader.ios)
|
||||
rws = uasyncio.StreamReader(reader.ios, ws)
|
||||
ws = websocket.websocket(reader.ios)
|
||||
rws = uasyncio.StreamReader(reader.ios, ws)
|
||||
|
||||
return rws
|
||||
return rws
|
||||
|
|
|
@ -12,6 +12,7 @@ NUM_REQS = 1000
|
|||
seen = []
|
||||
cnt = 0
|
||||
|
||||
|
||||
def validate(resp):
|
||||
global cnt
|
||||
t = resp.text
|
||||
|
|
|
@ -5,10 +5,11 @@ import errno
|
|||
|
||||
cnt = 0
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def serve(reader, writer):
|
||||
global cnt
|
||||
#s = "Hello.\r\n"
|
||||
# s = "Hello.\r\n"
|
||||
s = "Hello. %07d\r\n" % cnt
|
||||
cnt += 1
|
||||
yield from reader.read()
|
||||
|
@ -30,11 +31,12 @@ def serve(reader, writer):
|
|||
|
||||
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
#logging.basicConfig(level=logging.DEBUG)
|
||||
# logging.basicConfig(level=logging.DEBUG)
|
||||
signal.signal(signal.SIGPIPE, signal.SIG_IGN)
|
||||
loop = asyncio.get_event_loop()
|
||||
#mem_info()
|
||||
# mem_info()
|
||||
loop.call_soon(asyncio.start_server(serve, "0.0.0.0", 8081, backlog=100))
|
||||
loop.run_forever()
|
||||
loop.close()
|
||||
|
|
|
@ -3,19 +3,20 @@ import uasyncio as asyncio
|
|||
|
||||
@asyncio.coroutine
|
||||
def serve(reader, writer):
|
||||
#print(reader, writer)
|
||||
#print("================")
|
||||
# print(reader, writer)
|
||||
# print("================")
|
||||
yield from reader.read(512)
|
||||
yield from writer.awrite("HTTP/1.0 200 OK\r\n\r\nHello.\r\n")
|
||||
yield from writer.aclose()
|
||||
#print("Finished processing request")
|
||||
# print("Finished processing request")
|
||||
|
||||
|
||||
import logging
|
||||
#logging.basicConfig(level=logging.INFO)
|
||||
|
||||
# logging.basicConfig(level=logging.INFO)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
loop = asyncio.get_event_loop()
|
||||
#mem_info()
|
||||
# mem_info()
|
||||
loop.create_task(asyncio.start_server(serve, "127.0.0.1", 8081, backlog=100))
|
||||
loop.run_forever()
|
||||
loop.close()
|
||||
|
|
|
@ -2,21 +2,23 @@ import uasyncio as asyncio
|
|||
|
||||
resp = "HTTP/1.0 200 OK\r\n\r\n" + "Hello.\r\n" * 1500
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def serve(reader, writer):
|
||||
#print(reader, writer)
|
||||
#print("================")
|
||||
# print(reader, writer)
|
||||
# print("================")
|
||||
yield from reader.read(512)
|
||||
yield from writer.awrite(resp)
|
||||
yield from writer.aclose()
|
||||
#print("Finished processing request")
|
||||
# print("Finished processing request")
|
||||
|
||||
|
||||
import logging
|
||||
#logging.basicConfig(level=logging.INFO)
|
||||
|
||||
# logging.basicConfig(level=logging.INFO)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
loop = asyncio.get_event_loop(80)
|
||||
#mem_info()
|
||||
# mem_info()
|
||||
loop.create_task(asyncio.start_server(serve, "127.0.0.1", 8081, backlog=100))
|
||||
loop.run_forever()
|
||||
loop.close()
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
import uasyncio as asyncio
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def print_http_headers(url):
|
||||
reader, writer = yield from asyncio.open_connection(url, 80)
|
||||
print(reader, writer)
|
||||
print("================")
|
||||
query = "GET / HTTP/1.0\r\n\r\n"
|
||||
yield from writer.awrite(query.encode('latin-1'))
|
||||
yield from writer.awrite(query.encode("latin-1"))
|
||||
while True:
|
||||
line = yield from reader.readline()
|
||||
if not line:
|
||||
|
@ -14,11 +15,13 @@ def print_http_headers(url):
|
|||
if line:
|
||||
print(line.rstrip())
|
||||
|
||||
|
||||
import logging
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
url = "google.com"
|
||||
loop = asyncio.get_event_loop()
|
||||
#task = asyncio.async(print_http_headers(url))
|
||||
#loop.run_until_complete(task)
|
||||
# task = asyncio.async(print_http_headers(url))
|
||||
# loop.run_until_complete(task)
|
||||
loop.run_until_complete(print_http_headers(url))
|
||||
loop.close()
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import uasyncio as asyncio
|
||||
|
||||
|
||||
@asyncio.coroutine
|
||||
def serve(reader, writer):
|
||||
print(reader, writer)
|
||||
|
@ -12,7 +13,8 @@ def serve(reader, writer):
|
|||
|
||||
|
||||
import logging
|
||||
#logging.basicConfig(level=logging.INFO)
|
||||
|
||||
# logging.basicConfig(level=logging.INFO)
|
||||
logging.basicConfig(level=logging.DEBUG)
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.call_soon(asyncio.start_server(serve, "127.0.0.1", 8081))
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-uasyncio',
|
||||
version='2.0',
|
||||
description='Lightweight asyncio-like library for MicroPython, built around native Python coroutines.',
|
||||
long_description=open('README.rst').read(),
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['uasyncio'],
|
||||
install_requires=['micropython-uasyncio.core'])
|
||||
setup(
|
||||
name="micropython-uasyncio",
|
||||
version="2.0",
|
||||
description="Lightweight asyncio-like library for MicroPython, built around native Python coroutines.",
|
||||
long_description=open("README.rst").read(),
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["uasyncio"],
|
||||
install_requires=["micropython-uasyncio.core"],
|
||||
)
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
from uasyncio import get_event_loop, open_connection, start_server, sleep_ms
|
||||
from unittest import main, TestCase
|
||||
|
||||
class EchoTestCase(TestCase):
|
||||
|
||||
class EchoTestCase(TestCase):
|
||||
def test_client_server(self):
|
||||
'''Simple client-server echo test'''
|
||||
sockaddr = ('127.0.0.1', 8080)
|
||||
"""Simple client-server echo test"""
|
||||
sockaddr = ("127.0.0.1", 8080)
|
||||
l = get_event_loop()
|
||||
|
||||
async def echo_server(reader, writer):
|
||||
|
@ -23,10 +23,10 @@ class EchoTestCase(TestCase):
|
|||
|
||||
result = []
|
||||
l.create_task(start_server(echo_server, *sockaddr))
|
||||
l.run_until_complete(echo_client(b'Hello\r\n', result))
|
||||
l.run_until_complete(echo_client(b"Hello\r\n", result))
|
||||
|
||||
self.assertEqual(result[0], b'Hello\r\n')
|
||||
self.assertEqual(result[0], b"Hello\r\n")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -10,9 +10,10 @@ except:
|
|||
|
||||
done = False
|
||||
|
||||
|
||||
async def receiver():
|
||||
global done
|
||||
with open('test_io_starve.py', 'rb') as f:
|
||||
with open("test_io_starve.py", "rb") as f:
|
||||
sreader = asyncio.StreamReader(f)
|
||||
while True:
|
||||
await asyncio.sleep(0.1)
|
||||
|
@ -27,9 +28,10 @@ async def foo():
|
|||
await asyncio.sleep(0)
|
||||
loop.stop()
|
||||
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.create_task(foo())
|
||||
loop.create_task(receiver())
|
||||
loop.run_forever()
|
||||
assert done
|
||||
print('OK')
|
||||
print("OK")
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from uasyncio import StreamReader
|
||||
|
||||
class MockSock:
|
||||
|
||||
class MockSock:
|
||||
def __init__(self, data_list):
|
||||
self.data = data_list
|
||||
|
||||
|
@ -12,11 +12,18 @@ class MockSock:
|
|||
return b""
|
||||
|
||||
|
||||
mock = MockSock([
|
||||
b"123",
|
||||
b"234", b"5",
|
||||
b"a", b"b", b"c", b"d", b"e",
|
||||
])
|
||||
mock = MockSock(
|
||||
[
|
||||
b"123",
|
||||
b"234",
|
||||
b"5",
|
||||
b"a",
|
||||
b"b",
|
||||
b"c",
|
||||
b"d",
|
||||
b"e",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def func():
|
||||
|
@ -27,5 +34,6 @@ def func():
|
|||
# This isn't how it should be, but the current behavior
|
||||
assert await sr.readexactly(10) == b""
|
||||
|
||||
|
||||
for i in func():
|
||||
pass
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
from uasyncio import StreamReader
|
||||
|
||||
class MockSock:
|
||||
|
||||
class MockSock:
|
||||
def __init__(self, data_list):
|
||||
self.data = data_list
|
||||
|
||||
|
@ -12,11 +12,15 @@ class MockSock:
|
|||
return b""
|
||||
|
||||
|
||||
mock = MockSock([
|
||||
b"line1\n",
|
||||
b"parts ", b"of ", b"line2\n",
|
||||
b"unterminated",
|
||||
])
|
||||
mock = MockSock(
|
||||
[
|
||||
b"line1\n",
|
||||
b"parts ",
|
||||
b"of ",
|
||||
b"line2\n",
|
||||
b"unterminated",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def func():
|
||||
|
@ -26,5 +30,6 @@ def func():
|
|||
assert await sr.readline() == b"unterminated"
|
||||
assert await sr.readline() == b""
|
||||
|
||||
|
||||
for i in func():
|
||||
pass
|
||||
|
|
|
@ -7,16 +7,17 @@ from uasyncio.core import *
|
|||
DEBUG = 0
|
||||
log = None
|
||||
|
||||
|
||||
def set_debug(val):
|
||||
global DEBUG, log
|
||||
DEBUG = val
|
||||
if val:
|
||||
import logging
|
||||
|
||||
log = logging.getLogger("uasyncio")
|
||||
|
||||
|
||||
class PollEventLoop(EventLoop):
|
||||
|
||||
def __init__(self, runq_len=16, waitq_len=16):
|
||||
EventLoop.__init__(self, runq_len, waitq_len)
|
||||
self.poller = select.poll()
|
||||
|
@ -67,7 +68,7 @@ class PollEventLoop(EventLoop):
|
|||
log.debug("poll.wait(%d)", delay)
|
||||
# We need one-shot behavior (second arg of 1 to .poll())
|
||||
res = self.poller.ipoll(delay, 1)
|
||||
#log.debug("poll result: %s", res)
|
||||
# log.debug("poll result: %s", res)
|
||||
# Remove "if res" workaround after
|
||||
# https://github.com/micropython/micropython/issues/2716 fixed.
|
||||
if res:
|
||||
|
@ -90,7 +91,6 @@ class PollEventLoop(EventLoop):
|
|||
|
||||
|
||||
class StreamReader:
|
||||
|
||||
def __init__(self, polls, ios=None):
|
||||
if ios is None:
|
||||
ios = polls
|
||||
|
@ -105,7 +105,7 @@ class StreamReader:
|
|||
break
|
||||
# This should not happen for real sockets, but can easily
|
||||
# happen for stream wrappers (ssl, websockets, etc.)
|
||||
#log.warn("Empty read")
|
||||
# log.warn("Empty read")
|
||||
if not res:
|
||||
yield IOReadDone(self.polls)
|
||||
return res
|
||||
|
@ -135,7 +135,7 @@ class StreamReader:
|
|||
yield IOReadDone(self.polls)
|
||||
break
|
||||
buf += res
|
||||
if buf[-1] == 0x0a:
|
||||
if buf[-1] == 0x0A:
|
||||
break
|
||||
if DEBUG and __debug__:
|
||||
log.debug("StreamReader.readline(): %s", buf)
|
||||
|
@ -150,7 +150,6 @@ class StreamReader:
|
|||
|
||||
|
||||
class StreamWriter:
|
||||
|
||||
def __init__(self, s, extra):
|
||||
self.s = s
|
||||
self.extra = extra
|
||||
|
@ -180,7 +179,7 @@ class StreamWriter:
|
|||
off += res
|
||||
sz -= res
|
||||
yield IOWrite(self.s)
|
||||
#assert s2.fileno() == self.s.fileno()
|
||||
# assert s2.fileno() == self.s.fileno()
|
||||
if DEBUG and __debug__:
|
||||
log.debug("StreamWriter.awrite(): can write more")
|
||||
|
||||
|
@ -215,13 +214,14 @@ def open_connection(host, port, ssl=False):
|
|||
if DEBUG and __debug__:
|
||||
log.debug("open_connection: After connect")
|
||||
yield IOWrite(s)
|
||||
# if __debug__:
|
||||
# assert s2.fileno() == s.fileno()
|
||||
# if __debug__:
|
||||
# assert s2.fileno() == s.fileno()
|
||||
if DEBUG and __debug__:
|
||||
log.debug("open_connection: After iowait: %s", s)
|
||||
if ssl:
|
||||
print("Warning: uasyncio SSL support is alpha")
|
||||
import ussl
|
||||
|
||||
s.setblocking(True)
|
||||
s2 = ussl.wrap_socket(s)
|
||||
s.setblocking(False)
|
||||
|
@ -255,4 +255,5 @@ def start_server(client_coro, host, port, backlog=10):
|
|||
|
||||
|
||||
import uasyncio.core
|
||||
|
||||
uasyncio.core._event_loop_class = PollEventLoop
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-ucontextlib',
|
||||
version='0.1.1',
|
||||
description='ucontextlib module for MicroPython',
|
||||
long_description='Minimal subset of contextlib for MicroPython low-memory ports',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='micropython-lib Developers',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['ucontextlib'])
|
||||
setup(
|
||||
name="micropython-ucontextlib",
|
||||
version="0.1.1",
|
||||
description="ucontextlib module for MicroPython",
|
||||
long_description="Minimal subset of contextlib for MicroPython low-memory ports",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="micropython-lib Developers",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["ucontextlib"],
|
||||
)
|
||||
|
|
|
@ -3,24 +3,23 @@ from ucontextlib import contextmanager
|
|||
|
||||
|
||||
class ContextManagerTestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self._history = []
|
||||
|
||||
@contextmanager
|
||||
def manager(x):
|
||||
self._history.append('start')
|
||||
self._history.append("start")
|
||||
try:
|
||||
yield x
|
||||
finally:
|
||||
self._history.append('finish')
|
||||
self._history.append("finish")
|
||||
|
||||
self._manager = manager
|
||||
|
||||
def test_context_manager(self):
|
||||
with self._manager(123) as x:
|
||||
self.assertEqual(x, 123)
|
||||
self.assertEqual(self._history, ['start', 'finish'])
|
||||
self.assertEqual(self._history, ["start", "finish"])
|
||||
|
||||
def test_context_manager_on_error(self):
|
||||
exc = Exception()
|
||||
|
@ -29,8 +28,8 @@ class ContextManagerTestCase(unittest.TestCase):
|
|||
raise exc
|
||||
except Exception as e:
|
||||
self.assertEqual(exc, e)
|
||||
self.assertEqual(self._history, ['start', 'finish'])
|
||||
self.assertEqual(self._history, ["start", "finish"])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
@ -9,6 +9,7 @@ Not implemented:
|
|||
- supress
|
||||
"""
|
||||
|
||||
|
||||
class ContextDecorator(object):
|
||||
"A base class or mixin that enables context managers to work as decorators."
|
||||
|
||||
|
@ -28,6 +29,7 @@ class ContextDecorator(object):
|
|||
def inner(*args, **kwds):
|
||||
with self._recreate_cm():
|
||||
return func(*args, **kwds)
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
|
@ -101,6 +103,8 @@ def contextmanager(func):
|
|||
<cleanup>
|
||||
|
||||
"""
|
||||
|
||||
def helper(*args, **kwds):
|
||||
return _GeneratorContextManager(func, *args, **kwds)
|
||||
|
||||
return helper
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-udnspkt',
|
||||
version='0.1',
|
||||
description='Make and parse DNS packets (Sans I/O approach).',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['udnspkt'])
|
||||
setup(
|
||||
name="micropython-udnspkt",
|
||||
version="0.1",
|
||||
description="Make and parse DNS packets (Sans I/O approach).",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["udnspkt"],
|
||||
)
|
||||
|
|
|
@ -14,7 +14,7 @@ def skip_fqdn(buf):
|
|||
sz = buf.readbin("B")
|
||||
if not sz:
|
||||
break
|
||||
if sz >= 0xc0:
|
||||
if sz >= 0xC0:
|
||||
buf.readbin("B")
|
||||
break
|
||||
buf.read(sz)
|
||||
|
@ -50,29 +50,29 @@ def parse_resp(buf, is_ipv6):
|
|||
acnt = buf.readbin(">H")
|
||||
nscnt = buf.readbin(">H")
|
||||
addcnt = buf.readbin(">H")
|
||||
#print(qcnt, acnt, nscnt, addcnt)
|
||||
# print(qcnt, acnt, nscnt, addcnt)
|
||||
|
||||
skip_fqdn(buf)
|
||||
v = buf.readbin(">H")
|
||||
#print(v)
|
||||
# print(v)
|
||||
v = buf.readbin(">H")
|
||||
#print(v)
|
||||
# print(v)
|
||||
|
||||
for i in range(acnt):
|
||||
#print("Resp #%d" % i)
|
||||
#v = read_fqdn(buf)
|
||||
#print(v)
|
||||
# print("Resp #%d" % i)
|
||||
# v = read_fqdn(buf)
|
||||
# print(v)
|
||||
skip_fqdn(buf)
|
||||
t = buf.readbin(">H")
|
||||
#print("Type", t)
|
||||
# print("Type", t)
|
||||
v = buf.readbin(">H")
|
||||
#print("Class", v)
|
||||
# print("Class", v)
|
||||
v = buf.readbin(">I")
|
||||
#print("TTL", v)
|
||||
# print("TTL", v)
|
||||
rlen = buf.readbin(">H")
|
||||
#print("rlen", rlen)
|
||||
# print("rlen", rlen)
|
||||
rval = buf.read(rlen)
|
||||
#print(rval)
|
||||
# print(rval)
|
||||
|
||||
if t == typ:
|
||||
return rval
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-umqtt.robust',
|
||||
version='1.0.1',
|
||||
description='Lightweight MQTT client for MicroPython ("robust" version).',
|
||||
long_description=open('README.rst').read(),
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['umqtt'])
|
||||
setup(
|
||||
name="micropython-umqtt.robust",
|
||||
version="1.0.1",
|
||||
description='Lightweight MQTT client for MicroPython ("robust" version).',
|
||||
long_description=open("README.rst").read(),
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["umqtt"],
|
||||
)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import utime
|
||||
from . import simple
|
||||
|
||||
|
||||
class MQTTClient(simple.MQTTClient):
|
||||
|
||||
DELAY = 2
|
||||
|
|
|
@ -3,11 +3,13 @@ from umqtt.simple import MQTTClient
|
|||
# Test reception e.g. with:
|
||||
# mosquitto_sub -t foo_topic
|
||||
|
||||
|
||||
def main(server="localhost"):
|
||||
c = MQTTClient("umqtt_client", server)
|
||||
c.connect()
|
||||
c.publish(b"foo_topic", b"hello")
|
||||
c.disconnect()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -8,6 +8,7 @@ from umqtt.simple import MQTTClient
|
|||
def sub_cb(topic, msg):
|
||||
print((topic, msg))
|
||||
|
||||
|
||||
def main(server="localhost"):
|
||||
c = MQTTClient("umqtt_client", server)
|
||||
c.set_callback(sub_cb)
|
||||
|
@ -26,5 +27,6 @@ def main(server="localhost"):
|
|||
|
||||
c.disconnect()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -17,6 +17,7 @@ TOPIC = b"led"
|
|||
|
||||
state = 0
|
||||
|
||||
|
||||
def sub_cb(topic, msg):
|
||||
global state
|
||||
print((topic, msg))
|
||||
|
@ -43,7 +44,7 @@ def main(server=SERVER):
|
|||
|
||||
try:
|
||||
while 1:
|
||||
#micropython.mem_info()
|
||||
# micropython.mem_info()
|
||||
c.wait_msg()
|
||||
finally:
|
||||
c.disconnect()
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-umqtt.simple',
|
||||
version='1.3.4',
|
||||
description='Lightweight MQTT client for MicroPython.',
|
||||
long_description=open('README.rst').read(),
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['umqtt'])
|
||||
setup(
|
||||
name="micropython-umqtt.simple",
|
||||
version="1.3.4",
|
||||
description="Lightweight MQTT client for MicroPython.",
|
||||
long_description=open("README.rst").read(),
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["umqtt"],
|
||||
)
|
||||
|
|
|
@ -2,13 +2,23 @@ import usocket as socket
|
|||
import ustruct as struct
|
||||
from ubinascii import hexlify
|
||||
|
||||
|
||||
class MQTTException(Exception):
|
||||
pass
|
||||
|
||||
class MQTTClient:
|
||||
|
||||
def __init__(self, client_id, server, port=0, user=None, password=None, keepalive=0,
|
||||
ssl=False, ssl_params={}):
|
||||
class MQTTClient:
|
||||
def __init__(
|
||||
self,
|
||||
client_id,
|
||||
server,
|
||||
port=0,
|
||||
user=None,
|
||||
password=None,
|
||||
keepalive=0,
|
||||
ssl=False,
|
||||
ssl_params={},
|
||||
):
|
||||
if port == 0:
|
||||
port = 8883 if ssl else 1883
|
||||
self.client_id = client_id
|
||||
|
@ -36,7 +46,7 @@ class MQTTClient:
|
|||
sh = 0
|
||||
while 1:
|
||||
b = self.sock.read(1)[0]
|
||||
n |= (b & 0x7f) << sh
|
||||
n |= (b & 0x7F) << sh
|
||||
if not b & 0x80:
|
||||
return n
|
||||
sh += 7
|
||||
|
@ -58,6 +68,7 @@ class MQTTClient:
|
|||
self.sock.connect(addr)
|
||||
if self.ssl:
|
||||
import ussl
|
||||
|
||||
self.sock = ussl.wrap_socket(self.sock, **self.ssl_params)
|
||||
premsg = bytearray(b"\x10\0\0\0\0\0")
|
||||
msg = bytearray(b"\x04MQTT\x04\x02\0\0")
|
||||
|
@ -77,15 +88,15 @@ class MQTTClient:
|
|||
msg[6] |= self.lw_retain << 5
|
||||
|
||||
i = 1
|
||||
while sz > 0x7f:
|
||||
premsg[i] = (sz & 0x7f) | 0x80
|
||||
while sz > 0x7F:
|
||||
premsg[i] = (sz & 0x7F) | 0x80
|
||||
sz >>= 7
|
||||
i += 1
|
||||
premsg[i] = sz
|
||||
|
||||
self.sock.write(premsg, i + 2)
|
||||
self.sock.write(msg)
|
||||
#print(hex(len(msg)), hexlify(msg, ":"))
|
||||
# print(hex(len(msg)), hexlify(msg, ":"))
|
||||
self._send_str(self.client_id)
|
||||
if self.lw_topic:
|
||||
self._send_str(self.lw_topic)
|
||||
|
@ -114,12 +125,12 @@ class MQTTClient:
|
|||
sz += 2
|
||||
assert sz < 2097152
|
||||
i = 1
|
||||
while sz > 0x7f:
|
||||
pkt[i] = (sz & 0x7f) | 0x80
|
||||
while sz > 0x7F:
|
||||
pkt[i] = (sz & 0x7F) | 0x80
|
||||
sz >>= 7
|
||||
i += 1
|
||||
pkt[i] = sz
|
||||
#print(hex(len(pkt)), hexlify(pkt, ":"))
|
||||
# print(hex(len(pkt)), hexlify(pkt, ":"))
|
||||
self.sock.write(pkt, i + 1)
|
||||
self._send_str(topic)
|
||||
if qos > 0:
|
||||
|
@ -146,7 +157,7 @@ class MQTTClient:
|
|||
pkt = bytearray(b"\x82\0\0\0")
|
||||
self.pid += 1
|
||||
struct.pack_into("!BH", pkt, 1, 2 + 2 + len(topic) + 1, self.pid)
|
||||
#print(hex(len(pkt)), hexlify(pkt, ":"))
|
||||
# print(hex(len(pkt)), hexlify(pkt, ":"))
|
||||
self.sock.write(pkt)
|
||||
self._send_str(topic)
|
||||
self.sock.write(qos.to_bytes(1, "little"))
|
||||
|
@ -154,7 +165,7 @@ class MQTTClient:
|
|||
op = self.wait_msg()
|
||||
if op == 0x90:
|
||||
resp = self.sock.read(4)
|
||||
#print(resp)
|
||||
# print(resp)
|
||||
assert resp[1] == pkt[2] and resp[2] == pkt[3]
|
||||
if resp[3] == 0x80:
|
||||
raise MQTTException(resp[3])
|
||||
|
@ -176,7 +187,7 @@ class MQTTClient:
|
|||
assert sz == 0
|
||||
return None
|
||||
op = res[0]
|
||||
if op & 0xf0 != 0x30:
|
||||
if op & 0xF0 != 0x30:
|
||||
return op
|
||||
sz = self._recv_len()
|
||||
topic_len = self.sock.read(2)
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-upip',
|
||||
version='1.2.4',
|
||||
description='Simple package manager for MicroPython.',
|
||||
long_description='Simple self-hosted package manager for MicroPython (requires usocket, ussl, uzlib, uctypes builtin modules). Compatible only with packages without custom setup.py code.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['upip', 'upip_utarfile'])
|
||||
setup(
|
||||
name="micropython-upip",
|
||||
version="1.2.4",
|
||||
description="Simple package manager for MicroPython.",
|
||||
long_description="Simple self-hosted package manager for MicroPython (requires usocket, ussl, uzlib, uctypes builtin modules). Compatible only with packages without custom setup.py code.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["upip", "upip_utarfile"],
|
||||
)
|
||||
|
|
|
@ -12,6 +12,7 @@ import uerrno as errno
|
|||
import ujson as json
|
||||
import uzlib
|
||||
import upip_utarfile as tarfile
|
||||
|
||||
gc.collect()
|
||||
|
||||
|
||||
|
@ -22,9 +23,11 @@ gzdict_sz = 16 + 15
|
|||
|
||||
file_buf = bytearray(512)
|
||||
|
||||
|
||||
class NotFoundError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def op_split(path):
|
||||
if path == "":
|
||||
return ("", "")
|
||||
|
@ -36,9 +39,11 @@ def op_split(path):
|
|||
head = "/"
|
||||
return (head, r[1])
|
||||
|
||||
|
||||
def op_basename(path):
|
||||
return op_split(path)[1]
|
||||
|
||||
|
||||
# Expects *file* name
|
||||
def _makedirs(name, mode=0o777):
|
||||
ret = False
|
||||
|
@ -69,26 +74,27 @@ def save_file(fname, subf):
|
|||
break
|
||||
outf.write(file_buf, sz)
|
||||
|
||||
|
||||
def install_tar(f, prefix):
|
||||
meta = {}
|
||||
for info in f:
|
||||
#print(info)
|
||||
# print(info)
|
||||
fname = info.name
|
||||
try:
|
||||
fname = fname[fname.index("/") + 1:]
|
||||
fname = fname[fname.index("/") + 1 :]
|
||||
except ValueError:
|
||||
fname = ""
|
||||
|
||||
save = True
|
||||
for p in ("setup.", "PKG-INFO", "README"):
|
||||
#print(fname, p)
|
||||
if fname.startswith(p) or ".egg-info" in fname:
|
||||
if fname.endswith("/requires.txt"):
|
||||
meta["deps"] = f.extractfile(info).read()
|
||||
save = False
|
||||
if debug:
|
||||
print("Skipping", fname)
|
||||
break
|
||||
# print(fname, p)
|
||||
if fname.startswith(p) or ".egg-info" in fname:
|
||||
if fname.endswith("/requires.txt"):
|
||||
meta["deps"] = f.extractfile(info).read()
|
||||
save = False
|
||||
if debug:
|
||||
print("Skipping", fname)
|
||||
break
|
||||
|
||||
if save:
|
||||
outfname = prefix + fname
|
||||
|
@ -100,32 +106,37 @@ def install_tar(f, prefix):
|
|||
save_file(outfname, subf)
|
||||
return meta
|
||||
|
||||
|
||||
def expandhome(s):
|
||||
if "~/" in s:
|
||||
h = os.getenv("HOME")
|
||||
s = s.replace("~/", h + "/")
|
||||
return s
|
||||
|
||||
|
||||
import ussl
|
||||
import usocket
|
||||
|
||||
warn_ussl = True
|
||||
|
||||
|
||||
def url_open(url):
|
||||
global warn_ussl
|
||||
|
||||
if debug:
|
||||
print(url)
|
||||
|
||||
proto, _, host, urlpath = url.split('/', 3)
|
||||
proto, _, host, urlpath = url.split("/", 3)
|
||||
try:
|
||||
ai = usocket.getaddrinfo(host, 443, 0, usocket.SOCK_STREAM)
|
||||
except OSError as e:
|
||||
fatal("Unable to resolve %s (no Internet?)" % host, e)
|
||||
#print("Address infos:", ai)
|
||||
# print("Address infos:", ai)
|
||||
ai = ai[0]
|
||||
|
||||
s = usocket.socket(ai[0], ai[1], ai[2])
|
||||
try:
|
||||
#print("Connect address:", addr)
|
||||
# print("Connect address:", addr)
|
||||
s.connect(ai[-1])
|
||||
|
||||
if proto == "https:":
|
||||
|
@ -146,7 +157,7 @@ def url_open(url):
|
|||
l = s.readline()
|
||||
if not l:
|
||||
raise ValueError("Unexpected EOF in HTTP headers")
|
||||
if l == b'\r\n':
|
||||
if l == b"\r\n":
|
||||
break
|
||||
except Exception as e:
|
||||
s.close()
|
||||
|
@ -169,6 +180,7 @@ def fatal(msg, exc=None):
|
|||
raise exc
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def install_pkg(pkg_spec, install_path):
|
||||
data = get_pkg_metadata(pkg_spec)
|
||||
|
||||
|
@ -192,6 +204,7 @@ def install_pkg(pkg_spec, install_path):
|
|||
gc.collect()
|
||||
return meta
|
||||
|
||||
|
||||
def install(to_install, install_path=None):
|
||||
# Calculate gzip dictionary size to use
|
||||
global gzdict_sz
|
||||
|
@ -224,9 +237,11 @@ def install(to_install, install_path=None):
|
|||
deps = deps.decode("utf-8").split("\n")
|
||||
to_install.extend(deps)
|
||||
except Exception as e:
|
||||
print("Error installing '{}': {}, packages may be partially installed".format(
|
||||
pkg_spec, e),
|
||||
file=sys.stderr)
|
||||
print(
|
||||
"Error installing '{}': {}, packages may be partially installed".format(pkg_spec, e),
|
||||
file=sys.stderr,
|
||||
)
|
||||
|
||||
|
||||
def get_install_path():
|
||||
global install_path
|
||||
|
@ -236,6 +251,7 @@ def get_install_path():
|
|||
install_path = expandhome(install_path)
|
||||
return install_path
|
||||
|
||||
|
||||
def cleanup():
|
||||
for fname in cleanup_files:
|
||||
try:
|
||||
|
@ -243,21 +259,27 @@ def cleanup():
|
|||
except OSError:
|
||||
print("Warning: Cannot delete " + fname)
|
||||
|
||||
|
||||
def help():
|
||||
print("""\
|
||||
print(
|
||||
"""\
|
||||
upip - Simple PyPI package manager for MicroPython
|
||||
Usage: micropython -m upip install [-p <path>] <package>... | -r <requirements.txt>
|
||||
import upip; upip.install(package_or_list, [<path>])
|
||||
|
||||
If <path> is not given, packages will be installed into sys.path[1]
|
||||
(can be set from MICROPYPATH environment variable, if current system
|
||||
supports that).""")
|
||||
supports that)."""
|
||||
)
|
||||
print("Current value of sys.path[1]:", sys.path[1])
|
||||
print("""\
|
||||
print(
|
||||
"""\
|
||||
|
||||
Note: only MicroPython packages (usually, named micropython-*) are supported
|
||||
for installation, upip does not support arbitrary code in setup.py.
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
global debug
|
||||
|
|
|
@ -9,11 +9,12 @@ TAR_HEADER = {
|
|||
DIRTYPE = "dir"
|
||||
REGTYPE = "file"
|
||||
|
||||
|
||||
def roundup(val, align):
|
||||
return (val + align - 1) & ~(align - 1)
|
||||
|
||||
class FileSection:
|
||||
|
||||
class FileSection:
|
||||
def __init__(self, f, content_len, aligned_len):
|
||||
self.f = f
|
||||
self.content_len = content_len
|
||||
|
@ -33,7 +34,7 @@ class FileSection:
|
|||
if self.content_len == 0:
|
||||
return 0
|
||||
if len(buf) > self.content_len:
|
||||
buf = memoryview(buf)[:self.content_len]
|
||||
buf = memoryview(buf)[: self.content_len]
|
||||
sz = self.f.readinto(buf)
|
||||
self.content_len -= sz
|
||||
return sz
|
||||
|
@ -47,13 +48,13 @@ class FileSection:
|
|||
self.f.readinto(buf, s)
|
||||
sz -= s
|
||||
|
||||
class TarInfo:
|
||||
|
||||
class TarInfo:
|
||||
def __str__(self):
|
||||
return "TarInfo(%r, %s, %d)" % (self.name, self.type, self.size)
|
||||
|
||||
class TarFile:
|
||||
|
||||
class TarFile:
|
||||
def __init__(self, name=None, fileobj=None):
|
||||
if fileobj:
|
||||
self.f = fileobj
|
||||
|
@ -62,24 +63,24 @@ class TarFile:
|
|||
self.subf = None
|
||||
|
||||
def next(self):
|
||||
if self.subf:
|
||||
self.subf.skip()
|
||||
buf = self.f.read(512)
|
||||
if not buf:
|
||||
return None
|
||||
if self.subf:
|
||||
self.subf.skip()
|
||||
buf = self.f.read(512)
|
||||
if not buf:
|
||||
return None
|
||||
|
||||
h = uctypes.struct(uctypes.addressof(buf), TAR_HEADER, uctypes.LITTLE_ENDIAN)
|
||||
h = uctypes.struct(uctypes.addressof(buf), TAR_HEADER, uctypes.LITTLE_ENDIAN)
|
||||
|
||||
# Empty block means end of archive
|
||||
if h.name[0] == 0:
|
||||
return None
|
||||
# Empty block means end of archive
|
||||
if h.name[0] == 0:
|
||||
return None
|
||||
|
||||
d = TarInfo()
|
||||
d.name = str(h.name, "utf-8").rstrip("\0")
|
||||
d.size = int(bytes(h.size), 8)
|
||||
d.type = [REGTYPE, DIRTYPE][d.name[-1] == "/"]
|
||||
self.subf = d.subf = FileSection(self.f, d.size, roundup(d.size, 512))
|
||||
return d
|
||||
d = TarInfo()
|
||||
d.name = str(h.name, "utf-8").rstrip("\0")
|
||||
d.size = int(bytes(h.size), 8)
|
||||
d.type = [REGTYPE, DIRTYPE][d.name[-1] == "/"]
|
||||
self.subf = d.subf = FileSection(self.f, d.size, roundup(d.size, 512))
|
||||
return d
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-upysh',
|
||||
version='0.6.1',
|
||||
description='Minimalistic file shell using native Python syntax.',
|
||||
long_description='Minimalistic file shell using native Python syntax.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='micropython-lib Developers',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['upysh'])
|
||||
setup(
|
||||
name="micropython-upysh",
|
||||
version="0.6.1",
|
||||
description="Minimalistic file shell using native Python syntax.",
|
||||
long_description="Minimalistic file shell using native Python syntax.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="micropython-lib Developers",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["upysh"],
|
||||
)
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import sys
|
||||
import os
|
||||
|
||||
class LS:
|
||||
|
||||
class LS:
|
||||
def __repr__(self):
|
||||
self.__call__()
|
||||
return ""
|
||||
|
@ -17,14 +17,15 @@ class LS:
|
|||
else:
|
||||
print("% 8d %s" % (st[6], f))
|
||||
|
||||
class PWD:
|
||||
|
||||
class PWD:
|
||||
def __repr__(self):
|
||||
return os.getcwd()
|
||||
|
||||
def __call__(self):
|
||||
return self.__repr__()
|
||||
|
||||
|
||||
class CLEAR:
|
||||
def __repr__(self):
|
||||
return "\x1b[2J\x1b[H"
|
||||
|
@ -43,16 +44,20 @@ mv = os.rename
|
|||
rm = os.remove
|
||||
rmdir = os.rmdir
|
||||
|
||||
|
||||
def head(f, n=10):
|
||||
with open(f) as f:
|
||||
for i in range(n):
|
||||
l = f.readline()
|
||||
if not l: break
|
||||
if not l:
|
||||
break
|
||||
sys.stdout.write(l)
|
||||
|
||||
|
||||
def cat(f):
|
||||
head(f, 1 << 30)
|
||||
|
||||
|
||||
def newfile(path):
|
||||
print("Type file contents line by line, finish with EOF (Ctrl+D).")
|
||||
with open(path, "w") as f:
|
||||
|
@ -64,10 +69,10 @@ def newfile(path):
|
|||
f.write(l)
|
||||
f.write("\n")
|
||||
|
||||
class Man():
|
||||
|
||||
class Man:
|
||||
def __repr__(self):
|
||||
return("""
|
||||
return """
|
||||
upysh is intended to be imported using:
|
||||
from upysh import *
|
||||
|
||||
|
@ -77,7 +82,8 @@ upysh commands:
|
|||
pwd, cd("new_dir"), ls, ls(...), head(...), cat(...)
|
||||
newfile(...), mv("old", "new"), rm(...), mkdir(...), rmdir(...),
|
||||
clear
|
||||
""")
|
||||
"""
|
||||
|
||||
|
||||
man = Man()
|
||||
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-urequests',
|
||||
version='0.6',
|
||||
description='urequests module for MicroPython',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['urequests'])
|
||||
setup(
|
||||
name="micropython-urequests",
|
||||
version="0.6",
|
||||
description="urequests module for MicroPython",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["urequests"],
|
||||
)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import usocket
|
||||
|
||||
class Response:
|
||||
|
||||
class Response:
|
||||
def __init__(self, f):
|
||||
self.raw = f
|
||||
self.encoding = "utf-8"
|
||||
|
@ -29,6 +29,7 @@ class Response:
|
|||
|
||||
def json(self):
|
||||
import ujson
|
||||
|
||||
return ujson.loads(self.content)
|
||||
|
||||
|
||||
|
@ -42,6 +43,7 @@ def request(method, url, data=None, json=None, headers={}, stream=None):
|
|||
port = 80
|
||||
elif proto == "https:":
|
||||
import ussl
|
||||
|
||||
port = 443
|
||||
else:
|
||||
raise ValueError("Unsupported protocol: " + proto)
|
||||
|
@ -70,6 +72,7 @@ def request(method, url, data=None, json=None, headers={}, stream=None):
|
|||
if json is not None:
|
||||
assert data is None
|
||||
import ujson
|
||||
|
||||
data = ujson.dumps(json)
|
||||
s.write(b"Content-Type: application/json\r\n")
|
||||
if data:
|
||||
|
@ -79,7 +82,7 @@ def request(method, url, data=None, json=None, headers={}, stream=None):
|
|||
s.write(data)
|
||||
|
||||
l = s.readline()
|
||||
#print(l)
|
||||
# print(l)
|
||||
l = l.split(None, 2)
|
||||
status = int(l[1])
|
||||
reason = ""
|
||||
|
@ -89,7 +92,7 @@ def request(method, url, data=None, json=None, headers={}, stream=None):
|
|||
l = s.readline()
|
||||
if not l or l == b"\r\n":
|
||||
break
|
||||
#print(l)
|
||||
# print(l)
|
||||
if l.startswith(b"Transfer-Encoding:"):
|
||||
if b"chunked" in l:
|
||||
raise ValueError("Unsupported " + l)
|
||||
|
@ -108,17 +111,22 @@ def request(method, url, data=None, json=None, headers={}, stream=None):
|
|||
def head(url, **kw):
|
||||
return request("HEAD", url, **kw)
|
||||
|
||||
|
||||
def get(url, **kw):
|
||||
return request("GET", url, **kw)
|
||||
|
||||
|
||||
def post(url, **kw):
|
||||
return request("POST", url, **kw)
|
||||
|
||||
|
||||
def put(url, **kw):
|
||||
return request("PUT", url, **kw)
|
||||
|
||||
|
||||
def patch(url, **kw):
|
||||
return request("PATCH", url, **kw)
|
||||
|
||||
|
||||
def delete(url, **kw):
|
||||
return request("DELETE", url, **kw)
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-urllib.urequest',
|
||||
version='0.6',
|
||||
description='urllib.urequest module for MicroPython',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['urllib'])
|
||||
setup(
|
||||
name="micropython-urllib.urequest",
|
||||
version="0.6",
|
||||
description="urllib.urequest module for MicroPython",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["urllib"],
|
||||
)
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import usocket
|
||||
|
||||
|
||||
def urlopen(url, data=None, method="GET"):
|
||||
if data is not None and method == "GET":
|
||||
method = "POST"
|
||||
|
@ -12,6 +13,7 @@ def urlopen(url, data=None, method="GET"):
|
|||
port = 80
|
||||
elif proto == "https:":
|
||||
import ussl
|
||||
|
||||
port = 443
|
||||
else:
|
||||
raise ValueError("Unsupported protocol: " + proto)
|
||||
|
@ -46,13 +48,13 @@ def urlopen(url, data=None, method="GET"):
|
|||
|
||||
l = s.readline()
|
||||
l = l.split(None, 2)
|
||||
#print(l)
|
||||
# print(l)
|
||||
status = int(l[1])
|
||||
while True:
|
||||
l = s.readline()
|
||||
if not l or l == b"\r\n":
|
||||
break
|
||||
#print(l)
|
||||
# print(l)
|
||||
if l.startswith(b"Transfer-Encoding:"):
|
||||
if b"chunked" in l:
|
||||
raise ValueError("Unsupported " + l)
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-utarfile',
|
||||
version='0.3.2',
|
||||
description='utarfile module for MicroPython',
|
||||
long_description='Lightweight tarfile module subset',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['utarfile'])
|
||||
setup(
|
||||
name="micropython-utarfile",
|
||||
version="0.3.2",
|
||||
description="utarfile module for MicroPython",
|
||||
long_description="Lightweight tarfile module subset",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["utarfile"],
|
||||
)
|
||||
|
|
|
@ -9,11 +9,12 @@ TAR_HEADER = {
|
|||
DIRTYPE = "dir"
|
||||
REGTYPE = "file"
|
||||
|
||||
|
||||
def roundup(val, align):
|
||||
return (val + align - 1) & ~(align - 1)
|
||||
|
||||
class FileSection:
|
||||
|
||||
class FileSection:
|
||||
def __init__(self, f, content_len, aligned_len):
|
||||
self.f = f
|
||||
self.content_len = content_len
|
||||
|
@ -33,7 +34,7 @@ class FileSection:
|
|||
if self.content_len == 0:
|
||||
return 0
|
||||
if len(buf) > self.content_len:
|
||||
buf = memoryview(buf)[:self.content_len]
|
||||
buf = memoryview(buf)[: self.content_len]
|
||||
sz = self.f.readinto(buf)
|
||||
self.content_len -= sz
|
||||
return sz
|
||||
|
@ -47,13 +48,13 @@ class FileSection:
|
|||
self.f.readinto(buf, s)
|
||||
sz -= s
|
||||
|
||||
class TarInfo:
|
||||
|
||||
class TarInfo:
|
||||
def __str__(self):
|
||||
return "TarInfo(%r, %s, %d)" % (self.name, self.type, self.size)
|
||||
|
||||
class TarFile:
|
||||
|
||||
class TarFile:
|
||||
def __init__(self, name=None, fileobj=None):
|
||||
if fileobj:
|
||||
self.f = fileobj
|
||||
|
@ -62,24 +63,24 @@ class TarFile:
|
|||
self.subf = None
|
||||
|
||||
def next(self):
|
||||
if self.subf:
|
||||
self.subf.skip()
|
||||
buf = self.f.read(512)
|
||||
if not buf:
|
||||
return None
|
||||
if self.subf:
|
||||
self.subf.skip()
|
||||
buf = self.f.read(512)
|
||||
if not buf:
|
||||
return None
|
||||
|
||||
h = uctypes.struct(uctypes.addressof(buf), TAR_HEADER, uctypes.LITTLE_ENDIAN)
|
||||
h = uctypes.struct(uctypes.addressof(buf), TAR_HEADER, uctypes.LITTLE_ENDIAN)
|
||||
|
||||
# Empty block means end of archive
|
||||
if h.name[0] == 0:
|
||||
return None
|
||||
# Empty block means end of archive
|
||||
if h.name[0] == 0:
|
||||
return None
|
||||
|
||||
d = TarInfo()
|
||||
d.name = str(h.name, "utf-8").rstrip("\0")
|
||||
d.size = int(bytes(h.size), 8)
|
||||
d.type = [REGTYPE, DIRTYPE][d.name[-1] == "/"]
|
||||
self.subf = d.subf = FileSection(self.f, d.size, roundup(d.size, 512))
|
||||
return d
|
||||
d = TarInfo()
|
||||
d.name = str(h.name, "utf-8").rstrip("\0")
|
||||
d.size = int(bytes(h.size), 8)
|
||||
d.type = [REGTYPE, DIRTYPE][d.name[-1] == "/"]
|
||||
self.subf = d.subf = FileSection(self.f, d.size, roundup(d.size, 512))
|
||||
return d
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-xmltok',
|
||||
version='0.2',
|
||||
description='xmltok module for MicroPython',
|
||||
long_description='Simple XML tokenizer',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['xmltok'])
|
||||
setup(
|
||||
name="micropython-xmltok",
|
||||
version="0.2",
|
||||
description="xmltok module for MicroPython",
|
||||
long_description="Simple XML tokenizer",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["xmltok"],
|
||||
)
|
||||
|
|
|
@ -1,18 +1,18 @@
|
|||
import xmltok
|
||||
|
||||
expected = [
|
||||
('PI', 'xml'),
|
||||
('ATTR', ('', 'version'), '1.0'),
|
||||
('START_TAG', ('s', 'Envelope')),
|
||||
('ATTR', ('xmlns', 's'), 'http://schemas.xmlsoap.org/soap/envelope/'),
|
||||
('ATTR', ('s', 'encodingStyle'), 'http://schemas.xmlsoap.org/soap/encoding/'),
|
||||
('START_TAG', ('s', 'Body')),
|
||||
('START_TAG', ('u', 'GetConnectionTypeInfo')),
|
||||
('ATTR', ('xmlns', 'u'), 'urn:schemas-upnp-org:service:WANIPConnection:1'),
|
||||
('TEXT', 'foo bar\n baz\n \n'),
|
||||
('END_TAG', ('u', 'GetConnectionTypeInfo')),
|
||||
('END_TAG', ('s', 'Body')),
|
||||
('END_TAG', ('s', 'Envelope')),
|
||||
("PI", "xml"),
|
||||
("ATTR", ("", "version"), "1.0"),
|
||||
("START_TAG", ("s", "Envelope")),
|
||||
("ATTR", ("xmlns", "s"), "http://schemas.xmlsoap.org/soap/envelope/"),
|
||||
("ATTR", ("s", "encodingStyle"), "http://schemas.xmlsoap.org/soap/encoding/"),
|
||||
("START_TAG", ("s", "Body")),
|
||||
("START_TAG", ("u", "GetConnectionTypeInfo")),
|
||||
("ATTR", ("xmlns", "u"), "urn:schemas-upnp-org:service:WANIPConnection:1"),
|
||||
("TEXT", "foo bar\n baz\n \n"),
|
||||
("END_TAG", ("u", "GetConnectionTypeInfo")),
|
||||
("END_TAG", ("s", "Body")),
|
||||
("END_TAG", ("s", "Envelope")),
|
||||
]
|
||||
|
||||
dir = "."
|
||||
|
@ -21,5 +21,5 @@ if "/" in __file__:
|
|||
|
||||
ex = iter(expected)
|
||||
for i in xmltok.tokenize(open(dir + "/test.xml")):
|
||||
#print(i)
|
||||
# print(i)
|
||||
assert i == next(ex)
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
TEXT = "TEXT"
|
||||
START_TAG = "START_TAG"
|
||||
#START_TAG_DONE = "START_TAG_DONE"
|
||||
# START_TAG_DONE = "START_TAG_DONE"
|
||||
END_TAG = "END_TAG"
|
||||
PI = "PI"
|
||||
#PI_DONE = "PI_DONE"
|
||||
# PI_DONE = "PI_DONE"
|
||||
ATTR = "ATTR"
|
||||
#ATTR_VAL = "ATTR_VAL"
|
||||
# ATTR_VAL = "ATTR_VAL"
|
||||
|
||||
|
||||
class XMLSyntaxError(Exception):
|
||||
pass
|
||||
|
||||
class XMLTokenizer:
|
||||
|
||||
class XMLTokenizer:
|
||||
def __init__(self, f):
|
||||
self.f = f
|
||||
self.nextch()
|
||||
|
@ -46,7 +47,7 @@ class XMLTokenizer:
|
|||
ident = ""
|
||||
while True:
|
||||
c = self.curch()
|
||||
if not(c.isalpha() or c.isdigit() or c in "_-."):
|
||||
if not (c.isalpha() or c.isdigit() or c in "_-."):
|
||||
break
|
||||
ident += self.getch()
|
||||
return ident
|
||||
|
@ -74,13 +75,13 @@ class XMLTokenizer:
|
|||
def lex_attrs_till(self):
|
||||
while self.isident():
|
||||
attr = self.getnsident()
|
||||
#yield (ATTR, attr)
|
||||
# yield (ATTR, attr)
|
||||
self.expect("=")
|
||||
self.expect('"')
|
||||
val = ""
|
||||
while self.curch() != '"':
|
||||
val += self.getch()
|
||||
#yield (ATTR_VAL, val)
|
||||
# yield (ATTR_VAL, val)
|
||||
self.expect('"')
|
||||
yield (ATTR, attr, val)
|
||||
|
||||
|
@ -98,7 +99,7 @@ class XMLTokenizer:
|
|||
elif self.match("!"):
|
||||
self.expect("-")
|
||||
self.expect("-")
|
||||
last3 = ''
|
||||
last3 = ""
|
||||
while True:
|
||||
last3 = last3[-2:] + self.getch()
|
||||
if last3 == "-->":
|
||||
|
@ -123,6 +124,7 @@ def gfind(gen, pred):
|
|||
if pred(i):
|
||||
return i
|
||||
|
||||
|
||||
def text_of(gen, tag):
|
||||
# Return text content of a leaf tag
|
||||
def match_tag(t):
|
||||
|
@ -138,5 +140,6 @@ def text_of(gen, tag):
|
|||
assert t == TEXT
|
||||
return val
|
||||
|
||||
|
||||
def tokenize(file):
|
||||
return XMLTokenizer(file).tokenize()
|
||||
|
|
|
@ -33,6 +33,7 @@ def recompress(fname):
|
|||
with Popen(["gzip", "-d", "-c", fname], stdout=PIPE).stdout as inf:
|
||||
gzip_4k(inf, fname)
|
||||
|
||||
|
||||
def find_latest(dir):
|
||||
res = []
|
||||
for fname in glob.glob(dir + "/*.gz"):
|
||||
|
@ -59,11 +60,12 @@ FILTERS = [
|
|||
|
||||
outbuf = io.BytesIO()
|
||||
|
||||
|
||||
def filter_tar(name):
|
||||
fin = tarfile.open(name, "r:gz")
|
||||
fout = tarfile.open(fileobj=outbuf, mode="w")
|
||||
for info in fin:
|
||||
# print(info)
|
||||
# print(info)
|
||||
if not "/" in info.name:
|
||||
continue
|
||||
fname = info.name.split("/", 1)[1]
|
||||
|
@ -93,9 +95,9 @@ def filter_tar(name):
|
|||
fin.close()
|
||||
|
||||
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
|
||||
class OptimizeUpip(Command):
|
||||
|
||||
user_options = []
|
||||
|
@ -115,7 +117,7 @@ class OptimizeUpip(Command):
|
|||
|
||||
# For testing only
|
||||
if __name__ == "__main__":
|
||||
# recompress_latest(sys.argv[1])
|
||||
# recompress_latest(sys.argv[1])
|
||||
filter_tar(sys.argv[1])
|
||||
outbuf.seek(0)
|
||||
gzip_4k(outbuf, sys.argv[1])
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-future',
|
||||
version='0.0.3',
|
||||
description='Dummy __future__ module for MicroPython',
|
||||
long_description='This is a dummy implementation of a module for MicroPython standard library.\nIt contains zero or very little functionality, and primarily intended to\navoid import errors (using idea that even if an application imports a\nmodule, it may be not using it onevery code path, so may work at least\npartially). It is expected that more complete implementation of the module\nwill be provided later. Please help with the development if you are\ninterested in this module.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='micropython-lib Developers',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['__future__'])
|
||||
setup(
|
||||
name="micropython-future",
|
||||
version="0.0.3",
|
||||
description="Dummy __future__ module for MicroPython",
|
||||
long_description="This is a dummy implementation of a module for MicroPython standard library.\nIt contains zero or very little functionality, and primarily intended to\navoid import errors (using idea that even if an application imports a\nmodule, it may be not using it onevery code path, so may work at least\npartially). It is expected that more complete implementation of the module\nwill be provided later. Please help with the development if you are\ninterested in this module.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="micropython-lib Developers",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["__future__"],
|
||||
)
|
||||
|
|
|
@ -7,15 +7,15 @@ documented public API and should not be used directly.
|
|||
|
||||
import re
|
||||
|
||||
_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match
|
||||
_declname_match = re.compile(r"[a-zA-Z][-_.a-zA-Z0-9]*\s*").match
|
||||
_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match
|
||||
_commentclose = re.compile(r'--\s*>')
|
||||
_markedsectionclose = re.compile(r']\s*]\s*>')
|
||||
_commentclose = re.compile(r"--\s*>")
|
||||
_markedsectionclose = re.compile(r"]\s*]\s*>")
|
||||
|
||||
# An analysis of the MS-Word extensions is available at
|
||||
# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf
|
||||
|
||||
_msmarkedsectionclose = re.compile(r']\s*>')
|
||||
_msmarkedsectionclose = re.compile(r"]\s*>")
|
||||
|
||||
del re
|
||||
|
||||
|
@ -26,12 +26,10 @@ class ParserBase:
|
|||
|
||||
def __init__(self):
|
||||
if self.__class__ is ParserBase:
|
||||
raise RuntimeError(
|
||||
"_markupbase.ParserBase must be subclassed")
|
||||
raise RuntimeError("_markupbase.ParserBase must be subclassed")
|
||||
|
||||
def error(self, message):
|
||||
raise NotImplementedError(
|
||||
"subclasses of ParserBase must override error()")
|
||||
raise NotImplementedError("subclasses of ParserBase must override error()")
|
||||
|
||||
def reset(self):
|
||||
self.lineno = 1
|
||||
|
@ -52,13 +50,13 @@ class ParserBase:
|
|||
nlines = rawdata.count("\n", i, j)
|
||||
if nlines:
|
||||
self.lineno = self.lineno + nlines
|
||||
pos = rawdata.rindex("\n", i, j) # Should not fail
|
||||
self.offset = j-(pos+1)
|
||||
pos = rawdata.rindex("\n", i, j) # Should not fail
|
||||
self.offset = j - (pos + 1)
|
||||
else:
|
||||
self.offset = self.offset + j-i
|
||||
self.offset = self.offset + j - i
|
||||
return j
|
||||
|
||||
_decl_otherchars = ''
|
||||
_decl_otherchars = ""
|
||||
|
||||
# Internal -- parse declaration (for use by subclasses).
|
||||
def parse_declaration(self, i):
|
||||
|
@ -75,35 +73,35 @@ class ParserBase:
|
|||
rawdata = self.rawdata
|
||||
j = i + 2
|
||||
assert rawdata[i:j] == "<!", "unexpected call to parse_declaration"
|
||||
if rawdata[j:j+1] == ">":
|
||||
if rawdata[j : j + 1] == ">":
|
||||
# the empty comment <!>
|
||||
return j + 1
|
||||
if rawdata[j:j+1] in ("-", ""):
|
||||
if rawdata[j : j + 1] in ("-", ""):
|
||||
# Start of comment followed by buffer boundary,
|
||||
# or just a buffer boundary.
|
||||
return -1
|
||||
# A simple, practical version could look like: ((name|stringlit) S*) + '>'
|
||||
n = len(rawdata)
|
||||
if rawdata[j:j+2] == '--': #comment
|
||||
if rawdata[j : j + 2] == "--": # comment
|
||||
# Locate --.*-- as the body of the comment
|
||||
return self.parse_comment(i)
|
||||
elif rawdata[j] == '[': #marked section
|
||||
elif rawdata[j] == "[": # marked section
|
||||
# Locate [statusWord [...arbitrary SGML...]] as the body of the marked section
|
||||
# Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA
|
||||
# Note that this is extended by Microsoft Office "Save as Web" function
|
||||
# to include [if...] and [endif].
|
||||
return self.parse_marked_section(i)
|
||||
else: #all other declaration elements
|
||||
else: # all other declaration elements
|
||||
decltype, j = self._scan_name(j, i)
|
||||
if j < 0:
|
||||
return j
|
||||
if decltype == "doctype":
|
||||
self._decl_otherchars = ''
|
||||
self._decl_otherchars = ""
|
||||
while j < n:
|
||||
c = rawdata[j]
|
||||
if c == ">":
|
||||
# end of declaration syntax
|
||||
data = rawdata[i+2:j]
|
||||
data = rawdata[i + 2 : j]
|
||||
if decltype == "doctype":
|
||||
self.handle_decl(data)
|
||||
else:
|
||||
|
@ -116,7 +114,7 @@ class ParserBase:
|
|||
if c in "\"'":
|
||||
m = _declstringlit_match(rawdata, j)
|
||||
if not m:
|
||||
return -1 # incomplete
|
||||
return -1 # incomplete
|
||||
j = m.end()
|
||||
elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ":
|
||||
name, j = self._scan_name(j, i)
|
||||
|
@ -135,46 +133,45 @@ class ParserBase:
|
|||
else:
|
||||
self.error("unexpected '[' char in declaration")
|
||||
else:
|
||||
self.error(
|
||||
"unexpected %r char in declaration" % rawdata[j])
|
||||
self.error("unexpected %r char in declaration" % rawdata[j])
|
||||
if j < 0:
|
||||
return j
|
||||
return -1 # incomplete
|
||||
return -1 # incomplete
|
||||
|
||||
# Internal -- parse a marked section
|
||||
# Override this to handle MS-word extension syntax <![if word]>content<![endif]>
|
||||
def parse_marked_section(self, i, report=1):
|
||||
rawdata= self.rawdata
|
||||
assert rawdata[i:i+3] == '<![', "unexpected call to parse_marked_section()"
|
||||
sectName, j = self._scan_name( i+3, i )
|
||||
rawdata = self.rawdata
|
||||
assert rawdata[i : i + 3] == "<![", "unexpected call to parse_marked_section()"
|
||||
sectName, j = self._scan_name(i + 3, i)
|
||||
if j < 0:
|
||||
return j
|
||||
if sectName in {"temp", "cdata", "ignore", "include", "rcdata"}:
|
||||
# look for standard ]]> ending
|
||||
match= _markedsectionclose.search(rawdata, i+3)
|
||||
match = _markedsectionclose.search(rawdata, i + 3)
|
||||
elif sectName in {"if", "else", "endif"}:
|
||||
# look for MS Office ]> ending
|
||||
match= _msmarkedsectionclose.search(rawdata, i+3)
|
||||
match = _msmarkedsectionclose.search(rawdata, i + 3)
|
||||
else:
|
||||
self.error('unknown status keyword %r in marked section' % rawdata[i+3:j])
|
||||
self.error("unknown status keyword %r in marked section" % rawdata[i + 3 : j])
|
||||
if not match:
|
||||
return -1
|
||||
if report:
|
||||
j = match.start(0)
|
||||
self.unknown_decl(rawdata[i+3: j])
|
||||
self.unknown_decl(rawdata[i + 3 : j])
|
||||
return match.end(0)
|
||||
|
||||
# Internal -- parse comment, return length or -1 if not terminated
|
||||
def parse_comment(self, i, report=1):
|
||||
rawdata = self.rawdata
|
||||
if rawdata[i:i+4] != '<!--':
|
||||
self.error('unexpected call to parse_comment()')
|
||||
match = _commentclose.search(rawdata, i+4)
|
||||
if rawdata[i : i + 4] != "<!--":
|
||||
self.error("unexpected call to parse_comment()")
|
||||
match = _commentclose.search(rawdata, i + 4)
|
||||
if not match:
|
||||
return -1
|
||||
if report:
|
||||
j = match.start(0)
|
||||
self.handle_comment(rawdata[i+4: j])
|
||||
self.handle_comment(rawdata[i + 4 : j])
|
||||
return match.end(0)
|
||||
|
||||
# Internal -- scan past the internal subset in a <!DOCTYPE declaration,
|
||||
|
@ -186,7 +183,7 @@ class ParserBase:
|
|||
while j < n:
|
||||
c = rawdata[j]
|
||||
if c == "<":
|
||||
s = rawdata[j:j+2]
|
||||
s = rawdata[j : j + 2]
|
||||
if s == "<":
|
||||
# end of buffer; incomplete
|
||||
return -1
|
||||
|
@ -199,7 +196,7 @@ class ParserBase:
|
|||
if (j + 4) > n:
|
||||
# end of buffer; incomplete
|
||||
return -1
|
||||
if rawdata[j:j+4] == "<!--":
|
||||
if rawdata[j : j + 4] == "<!--":
|
||||
j = self.parse_comment(j, report=0)
|
||||
if j < 0:
|
||||
return j
|
||||
|
@ -209,8 +206,7 @@ class ParserBase:
|
|||
return -1
|
||||
if name not in {"attlist", "element", "entity", "notation"}:
|
||||
self.updatepos(declstartpos, j + 2)
|
||||
self.error(
|
||||
"unknown declaration %r in internal subset" % name)
|
||||
self.error("unknown declaration %r in internal subset" % name)
|
||||
# handle the individual names
|
||||
meth = getattr(self, "_parse_doctype_" + name)
|
||||
j = meth(j, declstartpos)
|
||||
|
@ -252,7 +248,7 @@ class ParserBase:
|
|||
return -1
|
||||
# style content model; just skip until '>'
|
||||
rawdata = self.rawdata
|
||||
if '>' in rawdata[j:]:
|
||||
if ">" in rawdata[j:]:
|
||||
return rawdata.find(">", j) + 1
|
||||
return -1
|
||||
|
||||
|
@ -260,7 +256,7 @@ class ParserBase:
|
|||
def _parse_doctype_attlist(self, i, declstartpos):
|
||||
rawdata = self.rawdata
|
||||
name, j = self._scan_name(i, declstartpos)
|
||||
c = rawdata[j:j+1]
|
||||
c = rawdata[j : j + 1]
|
||||
if c == "":
|
||||
return -1
|
||||
if c == ">":
|
||||
|
@ -271,7 +267,7 @@ class ParserBase:
|
|||
name, j = self._scan_name(j, declstartpos)
|
||||
if j < 0:
|
||||
return j
|
||||
c = rawdata[j:j+1]
|
||||
c = rawdata[j : j + 1]
|
||||
if c == "":
|
||||
return -1
|
||||
if c == "(":
|
||||
|
@ -280,14 +276,14 @@ class ParserBase:
|
|||
j = rawdata.find(")", j) + 1
|
||||
else:
|
||||
return -1
|
||||
while rawdata[j:j+1].isspace():
|
||||
while rawdata[j : j + 1].isspace():
|
||||
j = j + 1
|
||||
if not rawdata[j:]:
|
||||
# end of buffer, incomplete
|
||||
return -1
|
||||
else:
|
||||
name, j = self._scan_name(j, declstartpos)
|
||||
c = rawdata[j:j+1]
|
||||
c = rawdata[j : j + 1]
|
||||
if not c:
|
||||
return -1
|
||||
if c in "'\"":
|
||||
|
@ -296,7 +292,7 @@ class ParserBase:
|
|||
j = m.end()
|
||||
else:
|
||||
return -1
|
||||
c = rawdata[j:j+1]
|
||||
c = rawdata[j : j + 1]
|
||||
if not c:
|
||||
return -1
|
||||
if c == "#":
|
||||
|
@ -306,10 +302,10 @@ class ParserBase:
|
|||
name, j = self._scan_name(j + 1, declstartpos)
|
||||
if j < 0:
|
||||
return j
|
||||
c = rawdata[j:j+1]
|
||||
c = rawdata[j : j + 1]
|
||||
if not c:
|
||||
return -1
|
||||
if c == '>':
|
||||
if c == ">":
|
||||
# all done
|
||||
return j + 1
|
||||
|
||||
|
@ -320,11 +316,11 @@ class ParserBase:
|
|||
return j
|
||||
rawdata = self.rawdata
|
||||
while 1:
|
||||
c = rawdata[j:j+1]
|
||||
c = rawdata[j : j + 1]
|
||||
if not c:
|
||||
# end of buffer; incomplete
|
||||
return -1
|
||||
if c == '>':
|
||||
if c == ">":
|
||||
return j + 1
|
||||
if c in "'\"":
|
||||
m = _declstringlit_match(rawdata, j)
|
||||
|
@ -339,10 +335,10 @@ class ParserBase:
|
|||
# Internal -- scan past <!ENTITY declarations
|
||||
def _parse_doctype_entity(self, i, declstartpos):
|
||||
rawdata = self.rawdata
|
||||
if rawdata[i:i+1] == "%":
|
||||
if rawdata[i : i + 1] == "%":
|
||||
j = i + 1
|
||||
while 1:
|
||||
c = rawdata[j:j+1]
|
||||
c = rawdata[j : j + 1]
|
||||
if not c:
|
||||
return -1
|
||||
if c.isspace():
|
||||
|
@ -355,7 +351,7 @@ class ParserBase:
|
|||
if j < 0:
|
||||
return j
|
||||
while 1:
|
||||
c = self.rawdata[j:j+1]
|
||||
c = self.rawdata[j : j + 1]
|
||||
if not c:
|
||||
return -1
|
||||
if c in "'\"":
|
||||
|
@ -363,7 +359,7 @@ class ParserBase:
|
|||
if m:
|
||||
j = m.end()
|
||||
else:
|
||||
return -1 # incomplete
|
||||
return -1 # incomplete
|
||||
elif c == ">":
|
||||
return j + 1
|
||||
else:
|
||||
|
@ -387,8 +383,7 @@ class ParserBase:
|
|||
return name.lower(), m.end()
|
||||
else:
|
||||
self.updatepos(declstartpos, i)
|
||||
self.error("expected name token at %r"
|
||||
% rawdata[declstartpos:declstartpos+20])
|
||||
self.error("expected name token at %r" % rawdata[declstartpos : declstartpos + 20])
|
||||
|
||||
# To be overridden -- handlers for unknown objects
|
||||
def unknown_decl(self, data):
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-_markupbase',
|
||||
version='3.3.3-1',
|
||||
description='CPython _markupbase module ported to MicroPython',
|
||||
long_description='This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='CPython Developers',
|
||||
author_email='python-dev@python.org',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['_markupbase'],
|
||||
install_requires=['micropython-re-pcre'])
|
||||
setup(
|
||||
name="micropython-_markupbase",
|
||||
version="3.3.3-1",
|
||||
description="CPython _markupbase module ported to MicroPython",
|
||||
long_description="This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="CPython Developers",
|
||||
author_email="python-dev@python.org",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["_markupbase"],
|
||||
install_requires=["micropython-re-pcre"],
|
||||
)
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-abc',
|
||||
version='0.0.1',
|
||||
description='Dummy abc module for MicroPython',
|
||||
long_description='This is a dummy implementation of a module for MicroPython standard library.\nIt contains zero or very little functionality, and primarily intended to\navoid import errors (using idea that even if an application imports a\nmodule, it may be not using it onevery code path, so may work at least\npartially). It is expected that more complete implementation of the module\nwill be provided later. Please help with the development if you are\ninterested in this module.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='micropython-lib Developers',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['abc'])
|
||||
setup(
|
||||
name="micropython-abc",
|
||||
version="0.0.1",
|
||||
description="Dummy abc module for MicroPython",
|
||||
long_description="This is a dummy implementation of a module for MicroPython standard library.\nIt contains zero or very little functionality, and primarily intended to\navoid import errors (using idea that even if an application imports a\nmodule, it may be not using it onevery code path, so may work at least\npartially). It is expected that more complete implementation of the module\nwill be provided later. Please help with the development if you are\ninterested in this module.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="micropython-lib Developers",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["abc"],
|
||||
)
|
||||
|
|
|
@ -104,8 +104,16 @@ class ArgumentParser:
|
|||
if not args:
|
||||
args = [dest]
|
||||
list.append(
|
||||
_Arg(args, dest, action, kwargs.get("nargs", None),
|
||||
const, default, kwargs.get("help", "")))
|
||||
_Arg(
|
||||
args,
|
||||
dest,
|
||||
action,
|
||||
kwargs.get("nargs", None),
|
||||
const,
|
||||
default,
|
||||
kwargs.get("help", ""),
|
||||
)
|
||||
)
|
||||
|
||||
def usage(self, full):
|
||||
# print short usage
|
||||
|
@ -121,8 +129,9 @@ class ArgumentParser:
|
|||
return " %s%s" % (arg.dest, arg.nargs)
|
||||
else:
|
||||
return ""
|
||||
|
||||
for opt in self.opt:
|
||||
print(" [%s%s]" % (', '.join(opt.names), render_arg(opt)), end="")
|
||||
print(" [%s%s]" % (", ".join(opt.names), render_arg(opt)), end="")
|
||||
for pos in self.pos:
|
||||
print(render_arg(pos), end="")
|
||||
print()
|
||||
|
@ -141,7 +150,7 @@ class ArgumentParser:
|
|||
print("\noptional args:")
|
||||
print(" -h, --help show this message and exit")
|
||||
for opt in self.opt:
|
||||
print(" %-16s%s" % (', '.join(opt.names) + render_arg(opt), opt.help))
|
||||
print(" %-16s%s" % (", ".join(opt.names) + render_arg(opt), opt.help))
|
||||
|
||||
def parse_args(self, args=None):
|
||||
return self._parse_args_impl(args, False)
|
||||
|
@ -171,6 +180,7 @@ class ArgumentParser:
|
|||
|
||||
# deal with unknown arguments, if needed
|
||||
unknown = []
|
||||
|
||||
def consume_unknown():
|
||||
while args and not args[0].startswith("-"):
|
||||
unknown.append(args.pop(0))
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-argparse',
|
||||
version='0.4',
|
||||
description='argparse module for MicroPython',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Damien George',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['argparse'])
|
||||
setup(
|
||||
name="micropython-argparse",
|
||||
version="0.4",
|
||||
description="argparse module for MicroPython",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Damien George",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["argparse"],
|
||||
)
|
||||
|
|
|
@ -13,38 +13,48 @@ import binascii
|
|||
|
||||
__all__ = [
|
||||
# Legacy interface exports traditional RFC 1521 Base64 encodings
|
||||
'encode', 'decode', 'encodebytes', 'decodebytes',
|
||||
"encode",
|
||||
"decode",
|
||||
"encodebytes",
|
||||
"decodebytes",
|
||||
# Generalized interface for other encodings
|
||||
'b64encode', 'b64decode', 'b32encode', 'b32decode',
|
||||
'b16encode', 'b16decode',
|
||||
"b64encode",
|
||||
"b64decode",
|
||||
"b32encode",
|
||||
"b32decode",
|
||||
"b16encode",
|
||||
"b16decode",
|
||||
# Standard Base64 encoding
|
||||
'standard_b64encode', 'standard_b64decode',
|
||||
"standard_b64encode",
|
||||
"standard_b64decode",
|
||||
# Some common Base64 alternatives. As referenced by RFC 3458, see thread
|
||||
# starting at:
|
||||
#
|
||||
# http://zgp.org/pipermail/p2p-hackers/2001-September/000316.html
|
||||
'urlsafe_b64encode', 'urlsafe_b64decode',
|
||||
]
|
||||
"urlsafe_b64encode",
|
||||
"urlsafe_b64decode",
|
||||
]
|
||||
|
||||
|
||||
bytes_types = (bytes, bytearray) # Types acceptable as binary data
|
||||
|
||||
|
||||
def _bytes_from_decode_data(s):
|
||||
if isinstance(s, str):
|
||||
try:
|
||||
return s.encode('ascii')
|
||||
# except UnicodeEncodeError:
|
||||
return s.encode("ascii")
|
||||
# except UnicodeEncodeError:
|
||||
except:
|
||||
raise ValueError('string argument should contain only ASCII characters')
|
||||
raise ValueError("string argument should contain only ASCII characters")
|
||||
elif isinstance(s, bytes_types):
|
||||
return s
|
||||
else:
|
||||
raise TypeError("argument should be bytes or ASCII string, not %s" % s.__class__.__name__)
|
||||
|
||||
|
||||
|
||||
# Base64 encoding/decoding uses binascii
|
||||
|
||||
|
||||
def b64encode(s, altchars=None):
|
||||
"""Encode a byte string using Base64.
|
||||
|
||||
|
@ -61,10 +71,9 @@ def b64encode(s, altchars=None):
|
|||
encoded = binascii.b2a_base64(s)[:-1]
|
||||
if altchars is not None:
|
||||
if not isinstance(altchars, bytes_types):
|
||||
raise TypeError("expected bytes, not %s"
|
||||
% altchars.__class__.__name__)
|
||||
raise TypeError("expected bytes, not %s" % altchars.__class__.__name__)
|
||||
assert len(altchars) == 2, repr(altchars)
|
||||
return encoded.translate(bytes.maketrans(b'+/', altchars))
|
||||
return encoded.translate(bytes.maketrans(b"+/", altchars))
|
||||
return encoded
|
||||
|
||||
|
||||
|
@ -86,9 +95,9 @@ def b64decode(s, altchars=None, validate=False):
|
|||
if altchars is not None:
|
||||
altchars = _bytes_from_decode_data(altchars)
|
||||
assert len(altchars) == 2, repr(altchars)
|
||||
s = s.translate(bytes.maketrans(altchars, b'+/'))
|
||||
if validate and not re.match(b'^[A-Za-z0-9+/]*={0,2}$', s):
|
||||
raise binascii.Error('Non-base64 digit found')
|
||||
s = s.translate(bytes.maketrans(altchars, b"+/"))
|
||||
if validate and not re.match(b"^[A-Za-z0-9+/]*={0,2}$", s):
|
||||
raise binascii.Error("Non-base64 digit found")
|
||||
return binascii.a2b_base64(s)
|
||||
|
||||
|
||||
|
@ -99,6 +108,7 @@ def standard_b64encode(s):
|
|||
"""
|
||||
return b64encode(s)
|
||||
|
||||
|
||||
def standard_b64decode(s):
|
||||
"""Decode a byte string encoded with the standard Base64 alphabet.
|
||||
|
||||
|
@ -110,8 +120,9 @@ def standard_b64decode(s):
|
|||
return b64decode(s)
|
||||
|
||||
|
||||
#_urlsafe_encode_translation = bytes.maketrans(b'+/', b'-_')
|
||||
#_urlsafe_decode_translation = bytes.maketrans(b'-_', b'+/')
|
||||
# _urlsafe_encode_translation = bytes.maketrans(b'+/', b'-_')
|
||||
# _urlsafe_decode_translation = bytes.maketrans(b'-_', b'+/')
|
||||
|
||||
|
||||
def urlsafe_b64encode(s):
|
||||
"""Encode a byte string using a url-safe Base64 alphabet.
|
||||
|
@ -120,9 +131,10 @@ def urlsafe_b64encode(s):
|
|||
returned. The alphabet uses '-' instead of '+' and '_' instead of
|
||||
'/'.
|
||||
"""
|
||||
# return b64encode(s).translate(_urlsafe_encode_translation)
|
||||
# return b64encode(s).translate(_urlsafe_encode_translation)
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
def urlsafe_b64decode(s):
|
||||
"""Decode a byte string encoded with the standard Base64 alphabet.
|
||||
|
||||
|
@ -133,25 +145,47 @@ def urlsafe_b64decode(s):
|
|||
|
||||
The alphabet uses '-' instead of '+' and '_' instead of '/'.
|
||||
"""
|
||||
# s = _bytes_from_decode_data(s)
|
||||
# s = s.translate(_urlsafe_decode_translation)
|
||||
# return b64decode(s)
|
||||
# s = _bytes_from_decode_data(s)
|
||||
# s = s.translate(_urlsafe_decode_translation)
|
||||
# return b64decode(s)
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
|
||||
# Base32 encoding/decoding must be done in Python
|
||||
_b32alphabet = {
|
||||
0: b'A', 9: b'J', 18: b'S', 27: b'3',
|
||||
1: b'B', 10: b'K', 19: b'T', 28: b'4',
|
||||
2: b'C', 11: b'L', 20: b'U', 29: b'5',
|
||||
3: b'D', 12: b'M', 21: b'V', 30: b'6',
|
||||
4: b'E', 13: b'N', 22: b'W', 31: b'7',
|
||||
5: b'F', 14: b'O', 23: b'X',
|
||||
6: b'G', 15: b'P', 24: b'Y',
|
||||
7: b'H', 16: b'Q', 25: b'Z',
|
||||
8: b'I', 17: b'R', 26: b'2',
|
||||
}
|
||||
0: b"A",
|
||||
9: b"J",
|
||||
18: b"S",
|
||||
27: b"3",
|
||||
1: b"B",
|
||||
10: b"K",
|
||||
19: b"T",
|
||||
28: b"4",
|
||||
2: b"C",
|
||||
11: b"L",
|
||||
20: b"U",
|
||||
29: b"5",
|
||||
3: b"D",
|
||||
12: b"M",
|
||||
21: b"V",
|
||||
30: b"6",
|
||||
4: b"E",
|
||||
13: b"N",
|
||||
22: b"W",
|
||||
31: b"7",
|
||||
5: b"F",
|
||||
14: b"O",
|
||||
23: b"X",
|
||||
6: b"G",
|
||||
15: b"P",
|
||||
24: b"Y",
|
||||
7: b"H",
|
||||
16: b"Q",
|
||||
25: b"Z",
|
||||
8: b"I",
|
||||
17: b"R",
|
||||
26: b"2",
|
||||
}
|
||||
|
||||
_b32tab = [v[0] for k, v in sorted(_b32alphabet.items())]
|
||||
_b32rev = dict([(v[0], k) for k, v in _b32alphabet.items()])
|
||||
|
@ -176,27 +210,30 @@ def b32encode(s):
|
|||
# leftover bit of c1 and tack it onto c2. Then we take the 2 leftover
|
||||
# bits of c2 and tack them onto c3. The shifts and masks are intended
|
||||
# to give us values of exactly 5 bits in width.
|
||||
c1, c2, c3 = struct.unpack('!HHB', s[i*5:(i+1)*5])
|
||||
c2 += (c1 & 1) << 16 # 17 bits wide
|
||||
c1, c2, c3 = struct.unpack("!HHB", s[i * 5 : (i + 1) * 5])
|
||||
c2 += (c1 & 1) << 16 # 17 bits wide
|
||||
c3 += (c2 & 3) << 8 # 10 bits wide
|
||||
encoded += bytes([_b32tab[c1 >> 11], # bits 1 - 5
|
||||
_b32tab[(c1 >> 6) & 0x1f], # bits 6 - 10
|
||||
_b32tab[(c1 >> 1) & 0x1f], # bits 11 - 15
|
||||
_b32tab[c2 >> 12], # bits 16 - 20 (1 - 5)
|
||||
_b32tab[(c2 >> 7) & 0x1f], # bits 21 - 25 (6 - 10)
|
||||
_b32tab[(c2 >> 2) & 0x1f], # bits 26 - 30 (11 - 15)
|
||||
_b32tab[c3 >> 5], # bits 31 - 35 (1 - 5)
|
||||
_b32tab[c3 & 0x1f], # bits 36 - 40 (1 - 5)
|
||||
])
|
||||
encoded += bytes(
|
||||
[
|
||||
_b32tab[c1 >> 11], # bits 1 - 5
|
||||
_b32tab[(c1 >> 6) & 0x1F], # bits 6 - 10
|
||||
_b32tab[(c1 >> 1) & 0x1F], # bits 11 - 15
|
||||
_b32tab[c2 >> 12], # bits 16 - 20 (1 - 5)
|
||||
_b32tab[(c2 >> 7) & 0x1F], # bits 21 - 25 (6 - 10)
|
||||
_b32tab[(c2 >> 2) & 0x1F], # bits 26 - 30 (11 - 15)
|
||||
_b32tab[c3 >> 5], # bits 31 - 35 (1 - 5)
|
||||
_b32tab[c3 & 0x1F], # bits 36 - 40 (1 - 5)
|
||||
]
|
||||
)
|
||||
# Adjust for any leftover partial quanta
|
||||
if leftover == 1:
|
||||
encoded = encoded[:-6] + b'======'
|
||||
encoded = encoded[:-6] + b"======"
|
||||
elif leftover == 2:
|
||||
encoded = encoded[:-4] + b'===='
|
||||
encoded = encoded[:-4] + b"===="
|
||||
elif leftover == 3:
|
||||
encoded = encoded[:-3] + b'==='
|
||||
encoded = encoded[:-3] + b"==="
|
||||
elif leftover == 4:
|
||||
encoded = encoded[:-1] + b'='
|
||||
encoded = encoded[:-1] + b"="
|
||||
return bytes(encoded)
|
||||
|
||||
|
||||
|
@ -222,20 +259,20 @@ def b32decode(s, casefold=False, map01=None):
|
|||
s = _bytes_from_decode_data(s)
|
||||
quanta, leftover = divmod(len(s), 8)
|
||||
if leftover:
|
||||
raise binascii.Error('Incorrect padding')
|
||||
raise binascii.Error("Incorrect padding")
|
||||
# Handle section 2.4 zero and one mapping. The flag map01 will be either
|
||||
# False, or the character to map the digit 1 (one) to. It should be
|
||||
# either L (el) or I (eye).
|
||||
if map01 is not None:
|
||||
map01 = _bytes_from_decode_data(map01)
|
||||
assert len(map01) == 1, repr(map01)
|
||||
s = s.translate(bytes.maketrans(b'01', b'O' + map01))
|
||||
s = s.translate(bytes.maketrans(b"01", b"O" + map01))
|
||||
if casefold:
|
||||
s = s.upper()
|
||||
# Strip off pad characters from the right. We need to count the pad
|
||||
# characters because this will tell us how many null bytes to remove from
|
||||
# the end of the decoded string.
|
||||
padchars = s.find(b'=')
|
||||
padchars = s.find(b"=")
|
||||
if padchars > 0:
|
||||
padchars = len(s) - padchars
|
||||
s = s[:-padchars]
|
||||
|
@ -249,17 +286,17 @@ def b32decode(s, casefold=False, map01=None):
|
|||
for c in s:
|
||||
val = _b32rev.get(c)
|
||||
if val is None:
|
||||
raise binascii.Error('Non-base32 digit found')
|
||||
raise binascii.Error("Non-base32 digit found")
|
||||
acc += _b32rev[c] << shift
|
||||
shift -= 5
|
||||
if shift < 0:
|
||||
parts.append(binascii.unhexlify(bytes('%010x' % acc, "ascii")))
|
||||
parts.append(binascii.unhexlify(bytes("%010x" % acc, "ascii")))
|
||||
acc = 0
|
||||
shift = 35
|
||||
# Process the last, partial quanta
|
||||
last = binascii.unhexlify(bytes('%010x' % acc, "ascii"))
|
||||
last = binascii.unhexlify(bytes("%010x" % acc, "ascii"))
|
||||
if padchars == 0:
|
||||
last = b'' # No characters
|
||||
last = b"" # No characters
|
||||
elif padchars == 1:
|
||||
last = last[:-1]
|
||||
elif padchars == 3:
|
||||
|
@ -269,10 +306,9 @@ def b32decode(s, casefold=False, map01=None):
|
|||
elif padchars == 6:
|
||||
last = last[:-4]
|
||||
else:
|
||||
raise binascii.Error('Incorrect padding')
|
||||
raise binascii.Error("Incorrect padding")
|
||||
parts.append(last)
|
||||
return b''.join(parts)
|
||||
|
||||
return b"".join(parts)
|
||||
|
||||
|
||||
# RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns
|
||||
|
@ -302,18 +338,18 @@ def b16decode(s, casefold=False):
|
|||
s = _bytes_from_decode_data(s)
|
||||
if casefold:
|
||||
s = s.upper()
|
||||
if re.search(b'[^0-9A-F]', s):
|
||||
raise binascii.Error('Non-base16 digit found')
|
||||
if re.search(b"[^0-9A-F]", s):
|
||||
raise binascii.Error("Non-base16 digit found")
|
||||
return binascii.unhexlify(s)
|
||||
|
||||
|
||||
|
||||
# Legacy interface. This code could be cleaned up since I don't believe
|
||||
# binascii has any line length limitations. It just doesn't seem worth it
|
||||
# though. The files should be opened in binary mode.
|
||||
|
||||
MAXLINESIZE = 76 # Excluding the CRLF
|
||||
MAXBINSIZE = (MAXLINESIZE//4)*3
|
||||
MAXLINESIZE = 76 # Excluding the CRLF
|
||||
MAXBINSIZE = (MAXLINESIZE // 4) * 3
|
||||
|
||||
|
||||
def encode(input, output):
|
||||
"""Encode a file; input and output are binary files."""
|
||||
|
@ -322,7 +358,7 @@ def encode(input, output):
|
|||
if not s:
|
||||
break
|
||||
while len(s) < MAXBINSIZE:
|
||||
ns = input.read(MAXBINSIZE-len(s))
|
||||
ns = input.read(MAXBINSIZE - len(s))
|
||||
if not ns:
|
||||
break
|
||||
s += ns
|
||||
|
@ -351,11 +387,12 @@ def encodebytes(s):
|
|||
pieces.append(binascii.b2a_base64(chunk))
|
||||
return b"".join(pieces)
|
||||
|
||||
|
||||
def encodestring(s):
|
||||
"""Legacy alias of encodebytes()."""
|
||||
import warnings
|
||||
warnings.warn("encodestring() is a deprecated alias, use encodebytes()",
|
||||
DeprecationWarning, 2)
|
||||
|
||||
warnings.warn("encodestring() is a deprecated alias, use encodebytes()", DeprecationWarning, 2)
|
||||
return encodebytes(s)
|
||||
|
||||
|
||||
|
@ -365,11 +402,12 @@ def decodebytes(s):
|
|||
raise TypeError("expected bytes, not %s" % s.__class__.__name__)
|
||||
return binascii.a2b_base64(s)
|
||||
|
||||
|
||||
def decodestring(s):
|
||||
"""Legacy alias of decodebytes()."""
|
||||
import warnings
|
||||
warnings.warn("decodestring() is a deprecated alias, use decodebytes()",
|
||||
DeprecationWarning, 2)
|
||||
|
||||
warnings.warn("decodestring() is a deprecated alias, use decodebytes()", DeprecationWarning, 2)
|
||||
return decodebytes(s)
|
||||
|
||||
|
||||
|
@ -377,24 +415,33 @@ def decodestring(s):
|
|||
def main():
|
||||
"""Small main program"""
|
||||
import sys, getopt
|
||||
|
||||
try:
|
||||
opts, args = getopt.getopt(sys.argv[1:], 'deut')
|
||||
opts, args = getopt.getopt(sys.argv[1:], "deut")
|
||||
except getopt.error as msg:
|
||||
sys.stdout = sys.stderr
|
||||
print(msg)
|
||||
print("""usage: %s [-d|-e|-u|-t] [file|-]
|
||||
print(
|
||||
"""usage: %s [-d|-e|-u|-t] [file|-]
|
||||
-d, -u: decode
|
||||
-e: encode (default)
|
||||
-t: encode and decode string 'Aladdin:open sesame'"""%sys.argv[0])
|
||||
-t: encode and decode string 'Aladdin:open sesame'"""
|
||||
% sys.argv[0]
|
||||
)
|
||||
sys.exit(2)
|
||||
func = encode
|
||||
for o, a in opts:
|
||||
if o == '-e': func = encode
|
||||
if o == '-d': func = decode
|
||||
if o == '-u': func = decode
|
||||
if o == '-t': test(); return
|
||||
if args and args[0] != '-':
|
||||
with open(args[0], 'rb') as f:
|
||||
if o == "-e":
|
||||
func = encode
|
||||
if o == "-d":
|
||||
func = decode
|
||||
if o == "-u":
|
||||
func = decode
|
||||
if o == "-t":
|
||||
test()
|
||||
return
|
||||
if args and args[0] != "-":
|
||||
with open(args[0], "rb") as f:
|
||||
func(f, sys.stdout.buffer)
|
||||
else:
|
||||
func(sys.stdin.buffer, sys.stdout.buffer)
|
||||
|
@ -410,5 +457,5 @@ def test():
|
|||
assert s0 == s2
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-base64',
|
||||
version='3.3.3-4',
|
||||
description='CPython base64 module ported to MicroPython',
|
||||
long_description='This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='CPython Developers',
|
||||
author_email='python-dev@python.org',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['base64'],
|
||||
install_requires=['micropython-binascii', 'micropython-re-pcre', 'micropython-struct'])
|
||||
setup(
|
||||
name="micropython-base64",
|
||||
version="3.3.3-4",
|
||||
description="CPython base64 module ported to MicroPython",
|
||||
long_description="This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="CPython Developers",
|
||||
author_email="python-dev@python.org",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["base64"],
|
||||
install_requires=["micropython-binascii", "micropython-re-pcre", "micropython-struct"],
|
||||
)
|
||||
|
|
|
@ -1,22 +1,22 @@
|
|||
import base64
|
||||
|
||||
b = base64.b64encode(b'zlutoucky kun upel dabelske ody')
|
||||
b = base64.b64encode(b"zlutoucky kun upel dabelske ody")
|
||||
print(b)
|
||||
|
||||
if b != b'emx1dG91Y2t5IGt1biB1cGVsIGRhYmVsc2tlIG9keQ==':
|
||||
if b != b"emx1dG91Y2t5IGt1biB1cGVsIGRhYmVsc2tlIG9keQ==":
|
||||
raise Exception("Error")
|
||||
|
||||
d = base64.b64decode(b)
|
||||
print(d)
|
||||
|
||||
if d != b'zlutoucky kun upel dabelske ody':
|
||||
if d != b"zlutoucky kun upel dabelske ody":
|
||||
raise Exception("Error")
|
||||
|
||||
base64.test()
|
||||
|
||||
binary = b'\x99\x10\xaa'
|
||||
binary = b"\x99\x10\xaa"
|
||||
b = base64.b64encode(binary)
|
||||
if b != b'mRCq':
|
||||
if b != b"mRCq":
|
||||
raise Exception("Error")
|
||||
|
||||
d = base64.b64decode(b)
|
||||
|
@ -24,13 +24,13 @@ print(d)
|
|||
if d != binary:
|
||||
raise Exception("Error")
|
||||
|
||||
d = base64.b32encode(b'zlutoucky kun upel dabelske ody')
|
||||
if d != b'PJWHK5DPOVRWW6JANN2W4IDVOBSWYIDEMFRGK3DTNNSSA33EPE======':
|
||||
d = base64.b32encode(b"zlutoucky kun upel dabelske ody")
|
||||
if d != b"PJWHK5DPOVRWW6JANN2W4IDVOBSWYIDEMFRGK3DTNNSSA33EPE======":
|
||||
raise Exception("Error")
|
||||
|
||||
print(d)
|
||||
b = base64.b32decode(d)
|
||||
if b != b'zlutoucky kun upel dabelske ody':
|
||||
if b != b"zlutoucky kun upel dabelske ody":
|
||||
raise Exception("Error")
|
||||
|
||||
print("OK")
|
||||
|
|
|
@ -1,45 +1,292 @@
|
|||
from ubinascii import *
|
||||
|
||||
if not "unhexlify" in globals():
|
||||
|
||||
def unhexlify(data):
|
||||
if len(data) % 2 != 0:
|
||||
raise ValueError("Odd-length string")
|
||||
|
||||
return bytes([ int(data[i:i+2], 16) for i in range(0, len(data), 2) ])
|
||||
return bytes([int(data[i : i + 2], 16) for i in range(0, len(data), 2)])
|
||||
|
||||
|
||||
b2a_hex = hexlify
|
||||
a2b_hex = unhexlify
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
PAD = '='
|
||||
PAD = "="
|
||||
|
||||
table_a2b_base64 = [
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,62, -1,-1,-1,63,
|
||||
52,53,54,55, 56,57,58,59, 60,61,-1,-1, -1,-1,-1,-1, # Note PAD->-1 here
|
||||
-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,10, 11,12,13,14,
|
||||
15,16,17,18, 19,20,21,22, 23,24,25,-1, -1,-1,-1,-1,
|
||||
-1,26,27,28, 29,30,31,32, 33,34,35,36, 37,38,39,40,
|
||||
41,42,43,44, 45,46,47,48, 49,50,51,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1, -1,-1,-1,-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
62,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
63,
|
||||
52,
|
||||
53,
|
||||
54,
|
||||
55,
|
||||
56,
|
||||
57,
|
||||
58,
|
||||
59,
|
||||
60,
|
||||
61,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1, # Note PAD->-1 here
|
||||
-1,
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
5,
|
||||
6,
|
||||
7,
|
||||
8,
|
||||
9,
|
||||
10,
|
||||
11,
|
||||
12,
|
||||
13,
|
||||
14,
|
||||
15,
|
||||
16,
|
||||
17,
|
||||
18,
|
||||
19,
|
||||
20,
|
||||
21,
|
||||
22,
|
||||
23,
|
||||
24,
|
||||
25,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
26,
|
||||
27,
|
||||
28,
|
||||
29,
|
||||
30,
|
||||
31,
|
||||
32,
|
||||
33,
|
||||
34,
|
||||
35,
|
||||
36,
|
||||
37,
|
||||
38,
|
||||
39,
|
||||
40,
|
||||
41,
|
||||
42,
|
||||
43,
|
||||
44,
|
||||
45,
|
||||
46,
|
||||
47,
|
||||
48,
|
||||
49,
|
||||
50,
|
||||
51,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
-1,
|
||||
]
|
||||
|
||||
|
||||
def _transform(n):
|
||||
if n == -1:
|
||||
return '\xff'
|
||||
return "\xff"
|
||||
else:
|
||||
return chr(n)
|
||||
table_a2b_base64 = ''.join(map(_transform, table_a2b_base64))
|
||||
|
||||
|
||||
table_a2b_base64 = "".join(map(_transform, table_a2b_base64))
|
||||
assert len(table_a2b_base64) == 256
|
||||
|
||||
|
||||
def a2b_base64(ascii):
|
||||
"Decode a line of base64 data."
|
||||
|
||||
|
@ -53,12 +300,12 @@ def a2b_base64(ascii):
|
|||
c = chr(c)
|
||||
if c == PAD:
|
||||
if quad_pos > 2 or (quad_pos == 2 and last_char_was_a_pad):
|
||||
break # stop on 'xxx=' or on 'xx=='
|
||||
break # stop on 'xxx=' or on 'xx=='
|
||||
last_char_was_a_pad = True
|
||||
else:
|
||||
n = ord(table_a2b_base64[ord(c)])
|
||||
if n == 0xff:
|
||||
continue # ignore strange characters
|
||||
if n == 0xFF:
|
||||
continue # ignore strange characters
|
||||
#
|
||||
# Shift it in on the low end, and see if there's
|
||||
# a byte ready for output.
|
||||
|
@ -68,20 +315,21 @@ def a2b_base64(ascii):
|
|||
#
|
||||
if leftbits >= 8:
|
||||
leftbits -= 8
|
||||
res.append((leftchar >> leftbits).to_bytes(1, 'big'))
|
||||
leftchar &= ((1 << leftbits) - 1)
|
||||
res.append((leftchar >> leftbits).to_bytes(1, "big"))
|
||||
leftchar &= (1 << leftbits) - 1
|
||||
#
|
||||
last_char_was_a_pad = False
|
||||
else:
|
||||
if leftbits != 0:
|
||||
raise Exception("Incorrect padding")
|
||||
|
||||
return b''.join(res)
|
||||
return b"".join(res)
|
||||
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
table_b2a_base64 = (
|
||||
"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/")
|
||||
table_b2a_base64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
||||
|
||||
|
||||
def b2a_base64(bin):
|
||||
"Base64-code line of data."
|
||||
|
@ -96,10 +344,10 @@ def b2a_base64(bin):
|
|||
# Shift into our buffer, and output any 6bits ready
|
||||
leftchar = (leftchar << 8) | c
|
||||
leftbits += 8
|
||||
res.append(table_b2a_base64[(leftchar >> (leftbits-6)) & 0x3f])
|
||||
res.append(table_b2a_base64[(leftchar >> (leftbits - 6)) & 0x3F])
|
||||
leftbits -= 6
|
||||
if leftbits >= 6:
|
||||
res.append(table_b2a_base64[(leftchar >> (leftbits-6)) & 0x3f])
|
||||
res.append(table_b2a_base64[(leftchar >> (leftbits - 6)) & 0x3F])
|
||||
leftbits -= 6
|
||||
#
|
||||
if leftbits == 2:
|
||||
|
@ -107,7 +355,7 @@ def b2a_base64(bin):
|
|||
res.append(PAD)
|
||||
res.append(PAD)
|
||||
elif leftbits == 4:
|
||||
res.append(table_b2a_base64[(leftchar & 0xf) << 2])
|
||||
res.append(table_b2a_base64[(leftchar & 0xF) << 2])
|
||||
res.append(PAD)
|
||||
res.append('\n')
|
||||
return ''.join(res).encode('ascii')
|
||||
res.append("\n")
|
||||
return "".join(res).encode("ascii")
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-binascii',
|
||||
version='2.4.0-5',
|
||||
description='PyPy binascii module ported to MicroPython',
|
||||
long_description='This is a module ported from PyPy standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='PyPy Developers',
|
||||
author_email='pypy-dev@python.org',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['binascii'])
|
||||
setup(
|
||||
name="micropython-binascii",
|
||||
version="2.4.0-5",
|
||||
description="PyPy binascii module ported to MicroPython",
|
||||
long_description="This is a module ported from PyPy standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="PyPy Developers",
|
||||
author_email="pypy-dev@python.org",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["binascii"],
|
||||
)
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
from binascii import *
|
||||
import utime
|
||||
|
||||
data = b'zlutoucky kun upel dabelske ody'
|
||||
data = b"zlutoucky kun upel dabelske ody"
|
||||
h = hexlify(data)
|
||||
|
||||
if h != b'7a6c75746f75636b79206b756e207570656c20646162656c736b65206f6479':
|
||||
if h != b"7a6c75746f75636b79206b756e207570656c20646162656c736b65206f6479":
|
||||
raise Exception("Error")
|
||||
|
||||
data2 = unhexlify(h)
|
||||
|
@ -12,7 +12,7 @@ data2 = unhexlify(h)
|
|||
if data2 != data:
|
||||
raise Exception("Error")
|
||||
|
||||
a2b_base64(b"as==") == b'j'
|
||||
a2b_base64(b"as==") == b"j"
|
||||
|
||||
start = utime.time()
|
||||
for x in range(100000):
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
"""Bisection algorithms."""
|
||||
|
||||
|
||||
def insort_right(a, x, lo=0, hi=None):
|
||||
"""Insert item x in list a, and keep it sorted assuming a is sorted.
|
||||
|
||||
|
@ -10,16 +11,20 @@ def insort_right(a, x, lo=0, hi=None):
|
|||
"""
|
||||
|
||||
if lo < 0:
|
||||
raise ValueError('lo must be non-negative')
|
||||
raise ValueError("lo must be non-negative")
|
||||
if hi is None:
|
||||
hi = len(a)
|
||||
while lo < hi:
|
||||
mid = (lo+hi)//2
|
||||
if x < a[mid]: hi = mid
|
||||
else: lo = mid+1
|
||||
mid = (lo + hi) // 2
|
||||
if x < a[mid]:
|
||||
hi = mid
|
||||
else:
|
||||
lo = mid + 1
|
||||
a.insert(lo, x)
|
||||
|
||||
insort = insort_right # backward compatibility
|
||||
|
||||
insort = insort_right # backward compatibility
|
||||
|
||||
|
||||
def bisect_right(a, x, lo=0, hi=None):
|
||||
"""Return the index where to insert item x in list a, assuming a is sorted.
|
||||
|
@ -33,16 +38,20 @@ def bisect_right(a, x, lo=0, hi=None):
|
|||
"""
|
||||
|
||||
if lo < 0:
|
||||
raise ValueError('lo must be non-negative')
|
||||
raise ValueError("lo must be non-negative")
|
||||
if hi is None:
|
||||
hi = len(a)
|
||||
while lo < hi:
|
||||
mid = (lo+hi)//2
|
||||
if x < a[mid]: hi = mid
|
||||
else: lo = mid+1
|
||||
mid = (lo + hi) // 2
|
||||
if x < a[mid]:
|
||||
hi = mid
|
||||
else:
|
||||
lo = mid + 1
|
||||
return lo
|
||||
|
||||
bisect = bisect_right # backward compatibility
|
||||
|
||||
bisect = bisect_right # backward compatibility
|
||||
|
||||
|
||||
def insort_left(a, x, lo=0, hi=None):
|
||||
"""Insert item x in list a, and keep it sorted assuming a is sorted.
|
||||
|
@ -54,13 +63,15 @@ def insort_left(a, x, lo=0, hi=None):
|
|||
"""
|
||||
|
||||
if lo < 0:
|
||||
raise ValueError('lo must be non-negative')
|
||||
raise ValueError("lo must be non-negative")
|
||||
if hi is None:
|
||||
hi = len(a)
|
||||
while lo < hi:
|
||||
mid = (lo+hi)//2
|
||||
if a[mid] < x: lo = mid+1
|
||||
else: hi = mid
|
||||
mid = (lo + hi) // 2
|
||||
if a[mid] < x:
|
||||
lo = mid + 1
|
||||
else:
|
||||
hi = mid
|
||||
a.insert(lo, x)
|
||||
|
||||
|
||||
|
@ -76,15 +87,18 @@ def bisect_left(a, x, lo=0, hi=None):
|
|||
"""
|
||||
|
||||
if lo < 0:
|
||||
raise ValueError('lo must be non-negative')
|
||||
raise ValueError("lo must be non-negative")
|
||||
if hi is None:
|
||||
hi = len(a)
|
||||
while lo < hi:
|
||||
mid = (lo+hi)//2
|
||||
if a[mid] < x: lo = mid+1
|
||||
else: hi = mid
|
||||
mid = (lo + hi) // 2
|
||||
if a[mid] < x:
|
||||
lo = mid + 1
|
||||
else:
|
||||
hi = mid
|
||||
return lo
|
||||
|
||||
|
||||
# Overwrite above definitions with a fast C implementation
|
||||
try:
|
||||
from _bisect import *
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise distutils will peek up our
|
||||
# module instead of system.
|
||||
sys.path.pop(0)
|
||||
|
@ -6,18 +7,23 @@ from setuptools import setup
|
|||
|
||||
|
||||
def desc_dummy(name):
|
||||
return 'Dummy %s module to MicroPython' % name
|
||||
return "Dummy %s module to MicroPython" % name
|
||||
|
||||
|
||||
def desc_cpython(name):
|
||||
return 'CPython %s module ported to MicroPython' % name
|
||||
return "CPython %s module ported to MicroPython" % name
|
||||
|
||||
NAME = 'bisect'
|
||||
|
||||
setup(name='micropython-' + NAME,
|
||||
version='0.5',
|
||||
description=desc_cpython(NAME),
|
||||
url='https://github.com/micropython/micropython/issues/405',
|
||||
author='CPython Developers',
|
||||
maintainer='MicroPython Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
py_modules=[NAME])
|
||||
NAME = "bisect"
|
||||
|
||||
setup(
|
||||
name="micropython-" + NAME,
|
||||
version="0.5",
|
||||
description=desc_cpython(NAME),
|
||||
url="https://github.com/micropython/micropython/issues/405",
|
||||
author="CPython Developers",
|
||||
maintainer="MicroPython Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
py_modules=[NAME],
|
||||
)
|
||||
|
|
|
@ -41,17 +41,29 @@ import html
|
|||
import locale
|
||||
import tempfile
|
||||
|
||||
__all__ = ["MiniFieldStorage", "FieldStorage",
|
||||
"parse", "parse_qs", "parse_qsl", "parse_multipart",
|
||||
"parse_header", "print_exception", "print_environ",
|
||||
"print_form", "print_directory", "print_arguments",
|
||||
"print_environ_usage", "escape"]
|
||||
__all__ = [
|
||||
"MiniFieldStorage",
|
||||
"FieldStorage",
|
||||
"parse",
|
||||
"parse_qs",
|
||||
"parse_qsl",
|
||||
"parse_multipart",
|
||||
"parse_header",
|
||||
"print_exception",
|
||||
"print_environ",
|
||||
"print_form",
|
||||
"print_directory",
|
||||
"print_arguments",
|
||||
"print_environ_usage",
|
||||
"escape",
|
||||
]
|
||||
|
||||
# Logging support
|
||||
# ===============
|
||||
|
||||
logfile = "" # Filename to log to, if not empty
|
||||
logfp = None # File object to log to, if not None
|
||||
logfile = "" # Filename to log to, if not empty
|
||||
logfp = None # File object to log to, if not None
|
||||
|
||||
|
||||
def initlog(*allargs):
|
||||
"""Write a log message, if there is a log file.
|
||||
|
@ -88,24 +100,28 @@ def initlog(*allargs):
|
|||
log = dolog
|
||||
log(*allargs)
|
||||
|
||||
|
||||
def dolog(fmt, *args):
|
||||
"""Write a log message to the log file. See initlog() for docs."""
|
||||
logfp.write(fmt%args + "\n")
|
||||
logfp.write(fmt % args + "\n")
|
||||
|
||||
|
||||
def nolog(*allargs):
|
||||
"""Dummy function, assigned to log when logging is disabled."""
|
||||
pass
|
||||
|
||||
|
||||
def closelog():
|
||||
"""Close the log file."""
|
||||
global log, logfile, logfp
|
||||
logfile = ''
|
||||
logfile = ""
|
||||
if logfp:
|
||||
logfp.close()
|
||||
logfp = None
|
||||
log = initlog
|
||||
|
||||
log = initlog # The current logging function
|
||||
|
||||
log = initlog # The current logging function
|
||||
|
||||
|
||||
# Parsing functions
|
||||
|
@ -115,87 +131,90 @@ log = initlog # The current logging function
|
|||
# 0 ==> unlimited input
|
||||
maxlen = 0
|
||||
|
||||
|
||||
def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
|
||||
"""Parse a query in the environment or from a file (default stdin)
|
||||
|
||||
Arguments, all optional:
|
||||
Arguments, all optional:
|
||||
|
||||
fp : file pointer; default: sys.stdin.buffer
|
||||
fp : file pointer; default: sys.stdin.buffer
|
||||
|
||||
environ : environment dictionary; default: os.environ
|
||||
environ : environment dictionary; default: os.environ
|
||||
|
||||
keep_blank_values: flag indicating whether blank values in
|
||||
percent-encoded forms should be treated as blank strings.
|
||||
A true value indicates that blanks should be retained as
|
||||
blank strings. The default false value indicates that
|
||||
blank values are to be ignored and treated as if they were
|
||||
not included.
|
||||
keep_blank_values: flag indicating whether blank values in
|
||||
percent-encoded forms should be treated as blank strings.
|
||||
A true value indicates that blanks should be retained as
|
||||
blank strings. The default false value indicates that
|
||||
blank values are to be ignored and treated as if they were
|
||||
not included.
|
||||
|
||||
strict_parsing: flag indicating what to do with parsing errors.
|
||||
If false (the default), errors are silently ignored.
|
||||
If true, errors raise a ValueError exception.
|
||||
strict_parsing: flag indicating what to do with parsing errors.
|
||||
If false (the default), errors are silently ignored.
|
||||
If true, errors raise a ValueError exception.
|
||||
"""
|
||||
if fp is None:
|
||||
fp = sys.stdin
|
||||
|
||||
# field keys and values (except for files) are returned as strings
|
||||
# an encoding is required to decode the bytes read from self.fp
|
||||
if hasattr(fp,'encoding'):
|
||||
if hasattr(fp, "encoding"):
|
||||
encoding = fp.encoding
|
||||
else:
|
||||
encoding = 'latin-1'
|
||||
encoding = "latin-1"
|
||||
|
||||
# fp.read() must return bytes
|
||||
if isinstance(fp, TextIOWrapper):
|
||||
fp = fp.buffer
|
||||
|
||||
if not 'REQUEST_METHOD' in environ:
|
||||
environ['REQUEST_METHOD'] = 'GET' # For testing stand-alone
|
||||
if environ['REQUEST_METHOD'] == 'POST':
|
||||
ctype, pdict = parse_header(environ['CONTENT_TYPE'])
|
||||
if ctype == 'multipart/form-data':
|
||||
if not "REQUEST_METHOD" in environ:
|
||||
environ["REQUEST_METHOD"] = "GET" # For testing stand-alone
|
||||
if environ["REQUEST_METHOD"] == "POST":
|
||||
ctype, pdict = parse_header(environ["CONTENT_TYPE"])
|
||||
if ctype == "multipart/form-data":
|
||||
return parse_multipart(fp, pdict)
|
||||
elif ctype == 'application/x-www-form-urlencoded':
|
||||
clength = int(environ['CONTENT_LENGTH'])
|
||||
elif ctype == "application/x-www-form-urlencoded":
|
||||
clength = int(environ["CONTENT_LENGTH"])
|
||||
if maxlen and clength > maxlen:
|
||||
raise ValueError('Maximum content length exceeded')
|
||||
raise ValueError("Maximum content length exceeded")
|
||||
qs = fp.read(clength).decode(encoding)
|
||||
else:
|
||||
qs = '' # Unknown content-type
|
||||
if 'QUERY_STRING' in environ:
|
||||
if qs: qs = qs + '&'
|
||||
qs = qs + environ['QUERY_STRING']
|
||||
qs = "" # Unknown content-type
|
||||
if "QUERY_STRING" in environ:
|
||||
if qs:
|
||||
qs = qs + "&"
|
||||
qs = qs + environ["QUERY_STRING"]
|
||||
elif sys.argv[1:]:
|
||||
if qs: qs = qs + '&'
|
||||
if qs:
|
||||
qs = qs + "&"
|
||||
qs = qs + sys.argv[1]
|
||||
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
|
||||
elif 'QUERY_STRING' in environ:
|
||||
qs = environ['QUERY_STRING']
|
||||
environ["QUERY_STRING"] = qs # XXX Shouldn't, really
|
||||
elif "QUERY_STRING" in environ:
|
||||
qs = environ["QUERY_STRING"]
|
||||
else:
|
||||
if sys.argv[1:]:
|
||||
qs = sys.argv[1]
|
||||
else:
|
||||
qs = ""
|
||||
environ['QUERY_STRING'] = qs # XXX Shouldn't, really
|
||||
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing,
|
||||
encoding=encoding)
|
||||
environ["QUERY_STRING"] = qs # XXX Shouldn't, really
|
||||
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing, encoding=encoding)
|
||||
|
||||
|
||||
# parse query string function called from urlparse,
|
||||
# this is done in order to maintain backward compatiblity.
|
||||
|
||||
|
||||
def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
|
||||
"""Parse a query given as a string argument."""
|
||||
warn("cgi.parse_qs is deprecated, use urllib.parse.parse_qs instead",
|
||||
DeprecationWarning, 2)
|
||||
warn("cgi.parse_qs is deprecated, use urllib.parse.parse_qs instead", DeprecationWarning, 2)
|
||||
return urllib.parse.parse_qs(qs, keep_blank_values, strict_parsing)
|
||||
|
||||
|
||||
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
|
||||
"""Parse a query given as a string argument."""
|
||||
warn("cgi.parse_qsl is deprecated, use urllib.parse.parse_qsl instead",
|
||||
DeprecationWarning, 2)
|
||||
warn("cgi.parse_qsl is deprecated, use urllib.parse.parse_qsl instead", DeprecationWarning, 2)
|
||||
return urllib.parse.parse_qsl(qs, keep_blank_values, strict_parsing)
|
||||
|
||||
|
||||
def parse_multipart(fp, pdict):
|
||||
"""Parse multipart input.
|
||||
|
||||
|
@ -224,11 +243,10 @@ def parse_multipart(fp, pdict):
|
|||
import http.client
|
||||
|
||||
boundary = b""
|
||||
if 'boundary' in pdict:
|
||||
boundary = pdict['boundary']
|
||||
if "boundary" in pdict:
|
||||
boundary = pdict["boundary"]
|
||||
if not valid_boundary(boundary):
|
||||
raise ValueError('Invalid boundary in multipart form: %r'
|
||||
% (boundary,))
|
||||
raise ValueError("Invalid boundary in multipart form: %r" % (boundary,))
|
||||
|
||||
nextpart = b"--" + boundary
|
||||
lastpart = b"--" + boundary + b"--"
|
||||
|
@ -241,7 +259,7 @@ def parse_multipart(fp, pdict):
|
|||
if terminator:
|
||||
# At start of next part. Read headers first.
|
||||
headers = http.client.parse_headers(fp)
|
||||
clength = headers.get('content-length')
|
||||
clength = headers.get("content-length")
|
||||
if clength:
|
||||
try:
|
||||
bytes = int(clength)
|
||||
|
@ -249,7 +267,7 @@ def parse_multipart(fp, pdict):
|
|||
pass
|
||||
if bytes > 0:
|
||||
if maxlen and bytes > maxlen:
|
||||
raise ValueError('Maximum content length exceeded')
|
||||
raise ValueError("Maximum content length exceeded")
|
||||
data = fp.read(bytes)
|
||||
else:
|
||||
data = b""
|
||||
|
@ -258,7 +276,7 @@ def parse_multipart(fp, pdict):
|
|||
while 1:
|
||||
line = fp.readline()
|
||||
if not line:
|
||||
terminator = lastpart # End outer loop
|
||||
terminator = lastpart # End outer loop
|
||||
break
|
||||
if line.startswith(b"--"):
|
||||
terminator = line.rstrip()
|
||||
|
@ -278,14 +296,14 @@ def parse_multipart(fp, pdict):
|
|||
line = line[:-1]
|
||||
lines[-1] = line
|
||||
data = b"".join(lines)
|
||||
line = headers['content-disposition']
|
||||
line = headers["content-disposition"]
|
||||
if not line:
|
||||
continue
|
||||
key, params = parse_header(line)
|
||||
if key != 'form-data':
|
||||
if key != "form-data":
|
||||
continue
|
||||
if 'name' in params:
|
||||
name = params['name']
|
||||
if "name" in params:
|
||||
name = params["name"]
|
||||
else:
|
||||
continue
|
||||
if name in partdict:
|
||||
|
@ -297,34 +315,35 @@ def parse_multipart(fp, pdict):
|
|||
|
||||
|
||||
def _parseparam(s):
|
||||
while s[:1] == ';':
|
||||
while s[:1] == ";":
|
||||
s = s[1:]
|
||||
end = s.find(';')
|
||||
end = s.find(";")
|
||||
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
|
||||
end = s.find(';', end + 1)
|
||||
end = s.find(";", end + 1)
|
||||
if end < 0:
|
||||
end = len(s)
|
||||
f = s[:end]
|
||||
yield f.strip()
|
||||
s = s[end:]
|
||||
|
||||
|
||||
def parse_header(line):
|
||||
"""Parse a Content-type like header.
|
||||
|
||||
Return the main content-type and a dictionary of options.
|
||||
|
||||
"""
|
||||
parts = _parseparam(';' + line)
|
||||
parts = _parseparam(";" + line)
|
||||
key = parts.__next__()
|
||||
pdict = {}
|
||||
for p in parts:
|
||||
i = p.find('=')
|
||||
i = p.find("=")
|
||||
if i >= 0:
|
||||
name = p[:i].strip().lower()
|
||||
value = p[i+1:].strip()
|
||||
value = p[i + 1 :].strip()
|
||||
if len(value) >= 2 and value[0] == value[-1] == '"':
|
||||
value = value[1:-1]
|
||||
value = value.replace('\\\\', '\\').replace('\\"', '"')
|
||||
value = value.replace("\\\\", "\\").replace('\\"', '"')
|
||||
pdict[name] = value
|
||||
return key, pdict
|
||||
|
||||
|
@ -332,6 +351,7 @@ def parse_header(line):
|
|||
# Classes for field storage
|
||||
# =========================
|
||||
|
||||
|
||||
class MiniFieldStorage:
|
||||
|
||||
"""Like FieldStorage, for use when no file uploads are possible."""
|
||||
|
@ -400,9 +420,19 @@ class FieldStorage:
|
|||
directory and unlinking them as soon as they have been opened.
|
||||
|
||||
"""
|
||||
def __init__(self, fp=None, headers=None, outerboundary=b'',
|
||||
environ=os.environ, keep_blank_values=0, strict_parsing=0,
|
||||
limit=None, encoding='utf-8', errors='replace'):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
fp=None,
|
||||
headers=None,
|
||||
outerboundary=b"",
|
||||
environ=os.environ,
|
||||
keep_blank_values=0,
|
||||
strict_parsing=0,
|
||||
limit=None,
|
||||
encoding="utf-8",
|
||||
errors="replace",
|
||||
):
|
||||
"""Constructor. Read multipart/* until last part.
|
||||
|
||||
Arguments, all optional:
|
||||
|
@ -443,35 +473,34 @@ class FieldStorage:
|
|||
header)
|
||||
|
||||
"""
|
||||
method = 'GET'
|
||||
method = "GET"
|
||||
self.keep_blank_values = keep_blank_values
|
||||
self.strict_parsing = strict_parsing
|
||||
if 'REQUEST_METHOD' in environ:
|
||||
method = environ['REQUEST_METHOD'].upper()
|
||||
if "REQUEST_METHOD" in environ:
|
||||
method = environ["REQUEST_METHOD"].upper()
|
||||
self.qs_on_post = None
|
||||
if method == 'GET' or method == 'HEAD':
|
||||
if 'QUERY_STRING' in environ:
|
||||
qs = environ['QUERY_STRING']
|
||||
if method == "GET" or method == "HEAD":
|
||||
if "QUERY_STRING" in environ:
|
||||
qs = environ["QUERY_STRING"]
|
||||
elif sys.argv[1:]:
|
||||
qs = sys.argv[1]
|
||||
else:
|
||||
qs = ""
|
||||
qs = qs.encode(locale.getpreferredencoding(), 'surrogateescape')
|
||||
qs = qs.encode(locale.getpreferredencoding(), "surrogateescape")
|
||||
fp = BytesIO(qs)
|
||||
if headers is None:
|
||||
headers = {'content-type':
|
||||
"application/x-www-form-urlencoded"}
|
||||
headers = {"content-type": "application/x-www-form-urlencoded"}
|
||||
if headers is None:
|
||||
headers = {}
|
||||
if method == 'POST':
|
||||
if method == "POST":
|
||||
# Set default content-type for POST to what's traditional
|
||||
headers['content-type'] = "application/x-www-form-urlencoded"
|
||||
if 'CONTENT_TYPE' in environ:
|
||||
headers['content-type'] = environ['CONTENT_TYPE']
|
||||
if 'QUERY_STRING' in environ:
|
||||
self.qs_on_post = environ['QUERY_STRING']
|
||||
if 'CONTENT_LENGTH' in environ:
|
||||
headers['content-length'] = environ['CONTENT_LENGTH']
|
||||
headers["content-type"] = "application/x-www-form-urlencoded"
|
||||
if "CONTENT_TYPE" in environ:
|
||||
headers["content-type"] = environ["CONTENT_TYPE"]
|
||||
if "QUERY_STRING" in environ:
|
||||
self.qs_on_post = environ["QUERY_STRING"]
|
||||
if "CONTENT_LENGTH" in environ:
|
||||
headers["content-length"] = environ["CONTENT_LENGTH"]
|
||||
if fp is None:
|
||||
self.fp = sys.stdin.buffer
|
||||
# self.fp.read() must return bytes
|
||||
|
@ -485,8 +514,7 @@ class FieldStorage:
|
|||
|
||||
self.headers = headers
|
||||
if not isinstance(outerboundary, bytes):
|
||||
raise TypeError('outerboundary must be bytes, not %s'
|
||||
% type(outerboundary).__name__)
|
||||
raise TypeError("outerboundary must be bytes, not %s" % type(outerboundary).__name__)
|
||||
self.outerboundary = outerboundary
|
||||
|
||||
self.bytes_read = 0
|
||||
|
@ -494,16 +522,16 @@ class FieldStorage:
|
|||
|
||||
# Process content-disposition header
|
||||
cdisp, pdict = "", {}
|
||||
if 'content-disposition' in self.headers:
|
||||
cdisp, pdict = parse_header(self.headers['content-disposition'])
|
||||
if "content-disposition" in self.headers:
|
||||
cdisp, pdict = parse_header(self.headers["content-disposition"])
|
||||
self.disposition = cdisp
|
||||
self.disposition_options = pdict
|
||||
self.name = None
|
||||
if 'name' in pdict:
|
||||
self.name = pdict['name']
|
||||
if "name" in pdict:
|
||||
self.name = pdict["name"]
|
||||
self.filename = None
|
||||
if 'filename' in pdict:
|
||||
self.filename = pdict['filename']
|
||||
if "filename" in pdict:
|
||||
self.filename = pdict["filename"]
|
||||
self._binary_file = self.filename is not None
|
||||
|
||||
# Process content-type header
|
||||
|
@ -518,50 +546,49 @@ class FieldStorage:
|
|||
#
|
||||
# See below for what we do if there does exist a content-type header,
|
||||
# but it happens to be something we don't understand.
|
||||
if 'content-type' in self.headers:
|
||||
ctype, pdict = parse_header(self.headers['content-type'])
|
||||
elif self.outerboundary or method != 'POST':
|
||||
if "content-type" in self.headers:
|
||||
ctype, pdict = parse_header(self.headers["content-type"])
|
||||
elif self.outerboundary or method != "POST":
|
||||
ctype, pdict = "text/plain", {}
|
||||
else:
|
||||
ctype, pdict = 'application/x-www-form-urlencoded', {}
|
||||
ctype, pdict = "application/x-www-form-urlencoded", {}
|
||||
self.type = ctype
|
||||
self.type_options = pdict
|
||||
if 'boundary' in pdict:
|
||||
self.innerboundary = pdict['boundary'].encode(self.encoding)
|
||||
if "boundary" in pdict:
|
||||
self.innerboundary = pdict["boundary"].encode(self.encoding)
|
||||
else:
|
||||
self.innerboundary = b""
|
||||
|
||||
clen = -1
|
||||
if 'content-length' in self.headers:
|
||||
if "content-length" in self.headers:
|
||||
try:
|
||||
clen = int(self.headers['content-length'])
|
||||
clen = int(self.headers["content-length"])
|
||||
except ValueError:
|
||||
pass
|
||||
if maxlen and clen > maxlen:
|
||||
raise ValueError('Maximum content length exceeded')
|
||||
raise ValueError("Maximum content length exceeded")
|
||||
self.length = clen
|
||||
if self.limit is None and clen:
|
||||
self.limit = clen
|
||||
|
||||
self.list = self.file = None
|
||||
self.done = 0
|
||||
if ctype == 'application/x-www-form-urlencoded':
|
||||
if ctype == "application/x-www-form-urlencoded":
|
||||
self.read_urlencoded()
|
||||
elif ctype[:10] == 'multipart/':
|
||||
elif ctype[:10] == "multipart/":
|
||||
self.read_multi(environ, keep_blank_values, strict_parsing)
|
||||
else:
|
||||
self.read_single()
|
||||
|
||||
def __repr__(self):
|
||||
"""Return a printable representation."""
|
||||
return "FieldStorage(%r, %r, %r)" % (
|
||||
self.name, self.filename, self.value)
|
||||
return "FieldStorage(%r, %r, %r)" % (self.name, self.filename, self.value)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.keys())
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name != 'value':
|
||||
if name != "value":
|
||||
raise AttributeError(name)
|
||||
if self.file:
|
||||
self.file.seek(0)
|
||||
|
@ -579,7 +606,8 @@ class FieldStorage:
|
|||
raise TypeError("not indexable")
|
||||
found = []
|
||||
for item in self.list:
|
||||
if item.name == key: found.append(item)
|
||||
if item.name == key:
|
||||
found.append(item)
|
||||
if not found:
|
||||
raise KeyError(key)
|
||||
if len(found) == 1:
|
||||
|
@ -643,15 +671,18 @@ class FieldStorage:
|
|||
"""Internal: read data in query string format."""
|
||||
qs = self.fp.read(self.length)
|
||||
if not isinstance(qs, bytes):
|
||||
raise ValueError("%s should return bytes, got %s" \
|
||||
% (self.fp, type(qs).__name__))
|
||||
raise ValueError("%s should return bytes, got %s" % (self.fp, type(qs).__name__))
|
||||
qs = qs.decode(self.encoding, self.errors)
|
||||
if self.qs_on_post:
|
||||
qs += '&' + self.qs_on_post
|
||||
qs += "&" + self.qs_on_post
|
||||
self.list = []
|
||||
query = urllib.parse.parse_qsl(
|
||||
qs, self.keep_blank_values, self.strict_parsing,
|
||||
encoding=self.encoding, errors=self.errors)
|
||||
qs,
|
||||
self.keep_blank_values,
|
||||
self.strict_parsing,
|
||||
encoding=self.encoding,
|
||||
errors=self.errors,
|
||||
)
|
||||
for key, value in query:
|
||||
self.list.append(MiniFieldStorage(key, value))
|
||||
self.skip_lines()
|
||||
|
@ -662,21 +693,26 @@ class FieldStorage:
|
|||
"""Internal: read a part that is itself multipart."""
|
||||
ib = self.innerboundary
|
||||
if not valid_boundary(ib):
|
||||
raise ValueError('Invalid boundary in multipart form: %r' % (ib,))
|
||||
raise ValueError("Invalid boundary in multipart form: %r" % (ib,))
|
||||
self.list = []
|
||||
if self.qs_on_post:
|
||||
query = urllib.parse.parse_qsl(
|
||||
self.qs_on_post, self.keep_blank_values, self.strict_parsing,
|
||||
encoding=self.encoding, errors=self.errors)
|
||||
self.qs_on_post,
|
||||
self.keep_blank_values,
|
||||
self.strict_parsing,
|
||||
encoding=self.encoding,
|
||||
errors=self.errors,
|
||||
)
|
||||
for key, value in query:
|
||||
self.list.append(MiniFieldStorage(key, value))
|
||||
FieldStorageClass = None
|
||||
|
||||
klass = self.FieldStorageClass or self.__class__
|
||||
first_line = self.fp.readline() # bytes
|
||||
first_line = self.fp.readline() # bytes
|
||||
if not isinstance(first_line, bytes):
|
||||
raise ValueError("%s should return bytes, got %s" \
|
||||
% (self.fp, type(first_line).__name__))
|
||||
raise ValueError(
|
||||
"%s should return bytes, got %s" % (self.fp, type(first_line).__name__)
|
||||
)
|
||||
self.bytes_read += len(first_line)
|
||||
# first line holds boundary ; ignore it, or check that
|
||||
# b"--" + ib == first_line.strip() ?
|
||||
|
@ -694,9 +730,17 @@ class FieldStorage:
|
|||
self.bytes_read += len(hdr_text)
|
||||
parser.feed(hdr_text.decode(self.encoding, self.errors))
|
||||
headers = parser.close()
|
||||
part = klass(self.fp, headers, ib, environ, keep_blank_values,
|
||||
strict_parsing,self.limit-self.bytes_read,
|
||||
self.encoding, self.errors)
|
||||
part = klass(
|
||||
self.fp,
|
||||
headers,
|
||||
ib,
|
||||
environ,
|
||||
keep_blank_values,
|
||||
strict_parsing,
|
||||
self.limit - self.bytes_read,
|
||||
self.encoding,
|
||||
self.errors,
|
||||
)
|
||||
self.bytes_read += part.bytes_read
|
||||
self.list.append(part)
|
||||
if part.done or self.bytes_read >= self.length > 0:
|
||||
|
@ -712,7 +756,7 @@ class FieldStorage:
|
|||
self.read_lines()
|
||||
self.file.seek(0)
|
||||
|
||||
bufsize = 8*1024 # I/O buffering size for copy to file
|
||||
bufsize = 8 * 1024 # I/O buffering size for copy to file
|
||||
|
||||
def read_binary(self):
|
||||
"""Internal: read binary data."""
|
||||
|
@ -720,10 +764,11 @@ class FieldStorage:
|
|||
todo = self.length
|
||||
if todo >= 0:
|
||||
while todo > 0:
|
||||
data = self.fp.read(min(todo, self.bufsize)) # bytes
|
||||
data = self.fp.read(min(todo, self.bufsize)) # bytes
|
||||
if not isinstance(data, bytes):
|
||||
raise ValueError("%s should return bytes, got %s"
|
||||
% (self.fp, type(data).__name__))
|
||||
raise ValueError(
|
||||
"%s should return bytes, got %s" % (self.fp, type(data).__name__)
|
||||
)
|
||||
self.bytes_read += len(data)
|
||||
if not data:
|
||||
self.done = -1
|
||||
|
@ -734,9 +779,9 @@ class FieldStorage:
|
|||
def read_lines(self):
|
||||
"""Internal: read lines until EOF or outerboundary."""
|
||||
if self._binary_file:
|
||||
self.file = self.__file = BytesIO() # store data as bytes for files
|
||||
self.file = self.__file = BytesIO() # store data as bytes for files
|
||||
else:
|
||||
self.file = self.__file = StringIO() # as strings for other fields
|
||||
self.file = self.__file = StringIO() # as strings for other fields
|
||||
if self.outerboundary:
|
||||
self.read_lines_to_outerboundary()
|
||||
else:
|
||||
|
@ -760,7 +805,7 @@ class FieldStorage:
|
|||
def read_lines_to_eof(self):
|
||||
"""Internal: read lines until EOF."""
|
||||
while 1:
|
||||
line = self.fp.readline(1<<16) # bytes
|
||||
line = self.fp.readline(1 << 16) # bytes
|
||||
self.bytes_read += len(line)
|
||||
if not line:
|
||||
self.done = -1
|
||||
|
@ -780,7 +825,7 @@ class FieldStorage:
|
|||
while 1:
|
||||
if _read >= self.limit:
|
||||
break
|
||||
line = self.fp.readline(1<<16) # bytes
|
||||
line = self.fp.readline(1 << 16) # bytes
|
||||
self.bytes_read += len(line)
|
||||
_read += len(line)
|
||||
if not line:
|
||||
|
@ -824,7 +869,7 @@ class FieldStorage:
|
|||
last_boundary = next_boundary + b"--"
|
||||
last_line_lfend = True
|
||||
while True:
|
||||
line = self.fp.readline(1<<16)
|
||||
line = self.fp.readline(1 << 16)
|
||||
self.bytes_read += len(line)
|
||||
if not line:
|
||||
self.done = -1
|
||||
|
@ -836,7 +881,7 @@ class FieldStorage:
|
|||
if strippedline == last_boundary:
|
||||
self.done = 1
|
||||
break
|
||||
last_line_lfend = line.endswith(b'\n')
|
||||
last_line_lfend = line.endswith(b"\n")
|
||||
|
||||
def make_file(self):
|
||||
"""Overridable: return a readable & writable file.
|
||||
|
@ -865,13 +910,13 @@ class FieldStorage:
|
|||
if self._binary_file:
|
||||
return tempfile.TemporaryFile("wb+")
|
||||
else:
|
||||
return tempfile.TemporaryFile("w+",
|
||||
encoding=self.encoding, newline = '\n')
|
||||
return tempfile.TemporaryFile("w+", encoding=self.encoding, newline="\n")
|
||||
|
||||
|
||||
# Test/debug code
|
||||
# ===============
|
||||
|
||||
|
||||
def test(environ=os.environ):
|
||||
"""Robust test CGI script, usable as main program.
|
||||
|
||||
|
@ -881,18 +926,21 @@ def test(environ=os.environ):
|
|||
"""
|
||||
print("Content-type: text/html")
|
||||
print()
|
||||
#sys.stderr = sys.stdout
|
||||
# sys.stderr = sys.stdout
|
||||
try:
|
||||
form = FieldStorage() # Replace with other classes to test those
|
||||
form = FieldStorage() # Replace with other classes to test those
|
||||
print_directory()
|
||||
print_arguments()
|
||||
print_form(form)
|
||||
print_environ(environ)
|
||||
print_environ_usage()
|
||||
|
||||
def f():
|
||||
exec("testing print_exception() -- <I>italics?</I>")
|
||||
|
||||
def g(f=f):
|
||||
f()
|
||||
|
||||
print("<H3>What follows is a test, not an actual exception:</H3>")
|
||||
g()
|
||||
except:
|
||||
|
@ -903,7 +951,7 @@ def test(environ=os.environ):
|
|||
global maxlen
|
||||
maxlen = 50
|
||||
try:
|
||||
form = FieldStorage() # Replace with other classes to test those
|
||||
form = FieldStorage() # Replace with other classes to test those
|
||||
print_directory()
|
||||
print_arguments()
|
||||
print_form(form)
|
||||
|
@ -911,20 +959,25 @@ def test(environ=os.environ):
|
|||
except:
|
||||
print_exception()
|
||||
|
||||
|
||||
def print_exception(type=None, value=None, tb=None, limit=None):
|
||||
if type is None:
|
||||
type, value, tb = sys.exc_info()
|
||||
import traceback
|
||||
|
||||
print()
|
||||
print("<H3>Traceback (most recent call last):</H3>")
|
||||
list = traceback.format_tb(tb, limit) + \
|
||||
traceback.format_exception_only(type, value)
|
||||
print("<PRE>%s<B>%s</B></PRE>" % (
|
||||
html.escape("".join(list[:-1])),
|
||||
html.escape(list[-1]),
|
||||
))
|
||||
list = traceback.format_tb(tb, limit) + traceback.format_exception_only(type, value)
|
||||
print(
|
||||
"<PRE>%s<B>%s</B></PRE>"
|
||||
% (
|
||||
html.escape("".join(list[:-1])),
|
||||
html.escape(list[-1]),
|
||||
)
|
||||
)
|
||||
del tb
|
||||
|
||||
|
||||
def print_environ(environ=os.environ):
|
||||
"""Dump the shell environment as HTML."""
|
||||
keys = sorted(environ.keys())
|
||||
|
@ -936,6 +989,7 @@ def print_environ(environ=os.environ):
|
|||
print("</DL>")
|
||||
print()
|
||||
|
||||
|
||||
def print_form(form):
|
||||
"""Dump the contents of a form as HTML."""
|
||||
keys = sorted(form.keys())
|
||||
|
@ -945,13 +999,14 @@ def print_form(form):
|
|||
print("<P>No form fields.")
|
||||
print("<DL>")
|
||||
for key in keys:
|
||||
print("<DT>" + html.escape(key) + ":", end=' ')
|
||||
print("<DT>" + html.escape(key) + ":", end=" ")
|
||||
value = form[key]
|
||||
print("<i>" + html.escape(repr(type(value))) + "</i>")
|
||||
print("<DD>" + html.escape(repr(value)))
|
||||
print("</DL>")
|
||||
print()
|
||||
|
||||
|
||||
def print_directory():
|
||||
"""Dump the current directory as HTML."""
|
||||
print()
|
||||
|
@ -964,6 +1019,7 @@ def print_directory():
|
|||
print(html.escape(pwd))
|
||||
print()
|
||||
|
||||
|
||||
def print_arguments():
|
||||
print()
|
||||
print("<H3>Command Line Arguments:</H3>")
|
||||
|
@ -971,9 +1027,11 @@ def print_arguments():
|
|||
print(sys.argv)
|
||||
print()
|
||||
|
||||
|
||||
def print_environ_usage():
|
||||
"""Dump a list of environment variables used by CGI as HTML."""
|
||||
print("""
|
||||
print(
|
||||
"""
|
||||
<H3>These environment variables could have been set:</H3>
|
||||
<UL>
|
||||
<LI>AUTH_TYPE
|
||||
|
@ -1012,17 +1070,18 @@ environment as well. Here are some common variable names:
|
|||
<LI>HTTP_REFERER
|
||||
<LI>HTTP_USER_AGENT
|
||||
</UL>
|
||||
""")
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
# Utilities
|
||||
# =========
|
||||
|
||||
|
||||
def escape(s, quote=None):
|
||||
"""Deprecated API."""
|
||||
warn("cgi.escape is deprecated, use html.escape instead",
|
||||
DeprecationWarning, stacklevel=2)
|
||||
s = s.replace("&", "&") # Must be done first!
|
||||
warn("cgi.escape is deprecated, use html.escape instead", DeprecationWarning, stacklevel=2)
|
||||
s = s.replace("&", "&") # Must be done first!
|
||||
s = s.replace("<", "<")
|
||||
s = s.replace(">", ">")
|
||||
if quote:
|
||||
|
@ -1032,15 +1091,17 @@ def escape(s, quote=None):
|
|||
|
||||
def valid_boundary(s, _vb_pattern=None):
|
||||
import re
|
||||
|
||||
if isinstance(s, bytes):
|
||||
_vb_pattern = b"^[ -~]{0,200}[!-~]$"
|
||||
else:
|
||||
_vb_pattern = "^[ -~]{0,200}[!-~]$"
|
||||
return re.match(_vb_pattern, s)
|
||||
|
||||
|
||||
# Invoke mainline
|
||||
# ===============
|
||||
|
||||
# Call test() when this file is run as a script (not imported as a module)
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
test()
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-cgi',
|
||||
version='3.3.3-2',
|
||||
description='CPython cgi module ported to MicroPython',
|
||||
long_description='This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='CPython Developers',
|
||||
author_email='python-dev@python.org',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['cgi'])
|
||||
setup(
|
||||
name="micropython-cgi",
|
||||
version="3.3.3-2",
|
||||
description="CPython cgi module ported to MicroPython",
|
||||
long_description="This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="CPython Developers",
|
||||
author_email="python-dev@python.org",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["cgi"],
|
||||
)
|
||||
|
|
|
@ -51,14 +51,15 @@ this means that that help by doc string feature doesn't work.
|
|||
completions have also been stripped out.
|
||||
"""
|
||||
|
||||
#import string, sys
|
||||
import sys # MiroPython doesn't yet have a string module
|
||||
# import string, sys
|
||||
import sys # MiroPython doesn't yet have a string module
|
||||
|
||||
__all__ = ["Cmd"]
|
||||
|
||||
PROMPT = '(Cmd) '
|
||||
#IDENTCHARS = string.ascii_letters + string.digits + '_'
|
||||
IDENTCHARS = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
|
||||
PROMPT = "(Cmd) "
|
||||
# IDENTCHARS = string.ascii_letters + string.digits + '_'
|
||||
IDENTCHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"
|
||||
|
||||
|
||||
class Cmd:
|
||||
"""A simple framework for writing line-oriented command interpreters.
|
||||
|
@ -72,10 +73,11 @@ class Cmd:
|
|||
in order to inherit Cmd's methods and encapsulate action methods.
|
||||
|
||||
"""
|
||||
|
||||
prompt = PROMPT
|
||||
identchars = IDENTCHARS
|
||||
ruler = '='
|
||||
lastcmd = ''
|
||||
ruler = "="
|
||||
lastcmd = ""
|
||||
intro = None
|
||||
doc_leader = ""
|
||||
doc_header = "Documented commands (type help <topic>):"
|
||||
|
@ -114,7 +116,7 @@ class Cmd:
|
|||
if intro is not None:
|
||||
self.intro = intro
|
||||
if self.intro:
|
||||
self.stdout.write(str(self.intro)+"\n")
|
||||
self.stdout.write(str(self.intro) + "\n")
|
||||
stop = None
|
||||
while not stop:
|
||||
if self.cmdqueue:
|
||||
|
@ -124,15 +126,15 @@ class Cmd:
|
|||
try:
|
||||
line = input(self.prompt)
|
||||
except EOFError:
|
||||
line = 'EOF'
|
||||
line = "EOF"
|
||||
else:
|
||||
self.stdout.write(self.prompt)
|
||||
self.stdout.flush()
|
||||
line = self.stdin.readline()
|
||||
if not len(line):
|
||||
line = 'EOF'
|
||||
line = "EOF"
|
||||
else:
|
||||
line = line.rstrip('\r\n')
|
||||
line = line.rstrip("\r\n")
|
||||
line = self.precmd(line)
|
||||
stop = self.onecmd(line)
|
||||
stop = self.postcmd(stop, line)
|
||||
|
@ -170,15 +172,16 @@ class Cmd:
|
|||
line = line.strip()
|
||||
if not line:
|
||||
return None, None, line
|
||||
elif line[0] == '?':
|
||||
line = 'help ' + line[1:]
|
||||
elif line[0] == '!':
|
||||
if hasattr(self, 'do_shell'):
|
||||
line = 'shell ' + line[1:]
|
||||
elif line[0] == "?":
|
||||
line = "help " + line[1:]
|
||||
elif line[0] == "!":
|
||||
if hasattr(self, "do_shell"):
|
||||
line = "shell " + line[1:]
|
||||
else:
|
||||
return None, None, line
|
||||
i, n = 0, len(line)
|
||||
while i < n and line[i] in self.identchars: i = i+1
|
||||
while i < n and line[i] in self.identchars:
|
||||
i = i + 1
|
||||
cmd, arg = line[:i], line[i:].strip()
|
||||
return cmd, arg, line
|
||||
|
||||
|
@ -198,13 +201,13 @@ class Cmd:
|
|||
if cmd is None:
|
||||
return self.default(line)
|
||||
self.lastcmd = line
|
||||
if line == 'EOF' :
|
||||
self.lastcmd = ''
|
||||
if cmd == '':
|
||||
if line == "EOF":
|
||||
self.lastcmd = ""
|
||||
if cmd == "":
|
||||
return self.default(line)
|
||||
else:
|
||||
try:
|
||||
func = getattr(self, 'do_' + cmd)
|
||||
func = getattr(self, "do_" + cmd)
|
||||
except AttributeError:
|
||||
return self.default(line)
|
||||
return func(arg)
|
||||
|
@ -226,7 +229,7 @@ class Cmd:
|
|||
returns.
|
||||
|
||||
"""
|
||||
self.stdout.write('*** Unknown syntax: %s\n'%line)
|
||||
self.stdout.write("*** Unknown syntax: %s\n" % line)
|
||||
|
||||
def get_names(self):
|
||||
# This method used to pull in base class attributes
|
||||
|
@ -238,9 +241,9 @@ class Cmd:
|
|||
if arg:
|
||||
# XXX check arg syntax
|
||||
try:
|
||||
func = getattr(self, 'help_' + arg)
|
||||
func = getattr(self, "help_" + arg)
|
||||
except AttributeError:
|
||||
self.stdout.write("%s\n"%str(self.nohelp % (arg,)))
|
||||
self.stdout.write("%s\n" % str(self.nohelp % (arg,)))
|
||||
return
|
||||
func()
|
||||
else:
|
||||
|
@ -249,33 +252,33 @@ class Cmd:
|
|||
cmds_undoc = []
|
||||
help = {}
|
||||
for name in names:
|
||||
if name[:5] == 'help_':
|
||||
help[name[5:]]=1
|
||||
if name[:5] == "help_":
|
||||
help[name[5:]] = 1
|
||||
names.sort()
|
||||
# There can be duplicates if routines overridden
|
||||
prevname = ''
|
||||
prevname = ""
|
||||
for name in names:
|
||||
if name[:3] == 'do_':
|
||||
if name[:3] == "do_":
|
||||
if name == prevname:
|
||||
continue
|
||||
prevname = name
|
||||
cmd=name[3:]
|
||||
cmd = name[3:]
|
||||
if cmd in help:
|
||||
cmds_doc.append(cmd)
|
||||
del help[cmd]
|
||||
else:
|
||||
cmds_undoc.append(cmd)
|
||||
self.stdout.write("%s\n"%str(self.doc_leader))
|
||||
self.print_topics(self.doc_header, cmds_doc, 15,80)
|
||||
self.print_topics(self.misc_header, list(help.keys()),15,80)
|
||||
self.print_topics(self.undoc_header, cmds_undoc, 15,80)
|
||||
self.stdout.write("%s\n" % str(self.doc_leader))
|
||||
self.print_topics(self.doc_header, cmds_doc, 15, 80)
|
||||
self.print_topics(self.misc_header, list(help.keys()), 15, 80)
|
||||
self.print_topics(self.undoc_header, cmds_undoc, 15, 80)
|
||||
|
||||
def print_topics(self, header, cmds, cmdlen, maxcol):
|
||||
if cmds:
|
||||
self.stdout.write("%s\n"%str(header))
|
||||
self.stdout.write("%s\n" % str(header))
|
||||
if self.ruler:
|
||||
self.stdout.write("%s\n"%str(self.ruler * len(header)))
|
||||
self.columnize(cmds, maxcol-1)
|
||||
self.stdout.write("%s\n" % str(self.ruler * len(header)))
|
||||
self.columnize(cmds, maxcol - 1)
|
||||
self.stdout.write("\n")
|
||||
|
||||
def columnize(self, list, displaywidth=80):
|
||||
|
@ -288,24 +291,22 @@ class Cmd:
|
|||
self.stdout.write("<empty>\n")
|
||||
return
|
||||
|
||||
nonstrings = [i for i in range(len(list))
|
||||
if not isinstance(list[i], str)]
|
||||
nonstrings = [i for i in range(len(list)) if not isinstance(list[i], str)]
|
||||
if nonstrings:
|
||||
raise TypeError("list[i] not a string for i in %s"
|
||||
% ", ".join(map(str, nonstrings)))
|
||||
raise TypeError("list[i] not a string for i in %s" % ", ".join(map(str, nonstrings)))
|
||||
size = len(list)
|
||||
if size == 1:
|
||||
self.stdout.write('%s\n'%str(list[0]))
|
||||
self.stdout.write("%s\n" % str(list[0]))
|
||||
return
|
||||
# Try every row count from 1 upwards
|
||||
for nrows in range(1, len(list)):
|
||||
ncols = (size+nrows-1) // nrows
|
||||
ncols = (size + nrows - 1) // nrows
|
||||
colwidths = []
|
||||
totwidth = -2
|
||||
for col in range(ncols):
|
||||
colwidth = 0
|
||||
for row in range(nrows):
|
||||
i = row + nrows*col
|
||||
i = row + nrows * col
|
||||
if i >= size:
|
||||
break
|
||||
x = list[i]
|
||||
|
@ -323,7 +324,7 @@ class Cmd:
|
|||
for row in range(nrows):
|
||||
texts = []
|
||||
for col in range(ncols):
|
||||
i = row + nrows*col
|
||||
i = row + nrows * col
|
||||
if i >= size:
|
||||
x = ""
|
||||
else:
|
||||
|
@ -332,6 +333,6 @@ class Cmd:
|
|||
while texts and not texts[-1]:
|
||||
del texts[-1]
|
||||
for col in range(len(texts)):
|
||||
#texts[col] = texts[col].ljust(colwidths[col])
|
||||
texts[col] = '%-*s' % (colwidths[col], texts[col])
|
||||
self.stdout.write("%s\n"%str(" ".join(texts)))
|
||||
# texts[col] = texts[col].ljust(colwidths[col])
|
||||
texts[col] = "%-*s" % (colwidths[col], texts[col])
|
||||
self.stdout.write("%s\n" % str(" ".join(texts)))
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-cmd',
|
||||
version='3.4.0-2',
|
||||
description='CPython cmd module ported to MicroPython',
|
||||
long_description='This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='CPython Developers',
|
||||
author_email='python-dev@python.org',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['cmd'])
|
||||
setup(
|
||||
name="micropython-cmd",
|
||||
version="3.4.0-2",
|
||||
description="CPython cmd module ported to MicroPython",
|
||||
long_description="This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="CPython Developers",
|
||||
author_email="python-dev@python.org",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["cmd"],
|
||||
)
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
class defaultdict:
|
||||
|
||||
@staticmethod
|
||||
def __new__(cls, default_factory=None, **kwargs):
|
||||
# Some code (e.g. urllib.urlparse) expects that basic defaultdict
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-collections.defaultdict',
|
||||
version='0.3',
|
||||
description='collections.defaultdict module for MicroPython',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='Paul Sokolovsky',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['collections'])
|
||||
setup(
|
||||
name="micropython-collections.defaultdict",
|
||||
version="0.3",
|
||||
description="collections.defaultdict module for MicroPython",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="Paul Sokolovsky",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["collections"],
|
||||
)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from collections import defaultdict
|
||||
|
||||
d = defaultdict.defaultdict(lambda:42)
|
||||
d = defaultdict.defaultdict(lambda: 42)
|
||||
assert d[1] == 42
|
||||
d[2] = 3
|
||||
assert d[2] == 3
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
class deque:
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
if iterable is None:
|
||||
self.q = []
|
||||
|
@ -34,4 +33,4 @@ class deque:
|
|||
yield from self.q
|
||||
|
||||
def __str__(self):
|
||||
return 'deque({})'.format(self.q)
|
||||
return "deque({})".format(self.q)
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-collections.deque',
|
||||
version='0.1.3',
|
||||
description='collections.deque module for MicroPython',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='micropython-lib Developers',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['collections'])
|
||||
setup(
|
||||
name="micropython-collections.deque",
|
||||
version="0.1.3",
|
||||
description="collections.deque module for MicroPython",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="micropython-lib Developers",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["collections"],
|
||||
)
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
# CPython implementation brings in metaclasses and other bloat.
|
||||
# This is going to be just import-all for other modules in a namespace package
|
||||
from ucollections import *
|
||||
|
||||
try:
|
||||
from .defaultdict import defaultdict
|
||||
except ImportError:
|
||||
|
@ -12,5 +13,6 @@ try:
|
|||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class MutableMapping:
|
||||
pass
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-collections',
|
||||
version='0.1.2',
|
||||
description='collections module for MicroPython',
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='micropython-lib Developers',
|
||||
author_email='micro-python@googlegroups.com',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='MIT',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['collections'])
|
||||
setup(
|
||||
name="micropython-collections",
|
||||
version="0.1.2",
|
||||
description="collections module for MicroPython",
|
||||
long_description="This is a module reimplemented specifically for MicroPython standard library,\nwith efficient and lean design in mind. Note that this module is likely work\nin progress and likely supports just a subset of CPython's corresponding\nmodule. Please help with the development if you are interested in this\nmodule.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="micropython-lib Developers",
|
||||
author_email="micro-python@googlegroups.com",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="MIT",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["collections"],
|
||||
)
|
||||
|
|
|
@ -29,10 +29,13 @@ class closing(object):
|
|||
f.close()
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, thing):
|
||||
self.thing = thing
|
||||
|
||||
def __enter__(self):
|
||||
return self.thing
|
||||
|
||||
def __exit__(self, *exc_info):
|
||||
self.thing.close()
|
||||
|
||||
|
@ -66,6 +69,7 @@ class suppress:
|
|||
# See http://bugs.python.org/issue12029 for more details
|
||||
return exctype is not None and issubclass(exctype, self._exceptions)
|
||||
|
||||
|
||||
# Inspired by discussions on http://bugs.python.org/issue13585
|
||||
class ExitStack(object):
|
||||
"""Context manager for dynamic management of a stack of exit callbacks
|
||||
|
@ -79,6 +83,7 @@ class ExitStack(object):
|
|||
# in the list raise an exception
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._exit_callbacks = deque()
|
||||
|
||||
|
@ -91,8 +96,10 @@ class ExitStack(object):
|
|||
|
||||
def _push_cm_exit(self, cm, cm_exit):
|
||||
"""Helper to correctly register callbacks to __exit__ methods"""
|
||||
|
||||
def _exit_wrapper(*exc_details):
|
||||
return cm_exit(cm, *exc_details)
|
||||
|
||||
self.push(_exit_wrapper)
|
||||
|
||||
def push(self, exit):
|
||||
|
@ -113,17 +120,19 @@ class ExitStack(object):
|
|||
self._exit_callbacks.append(exit)
|
||||
else:
|
||||
self._push_cm_exit(exit, exit_method)
|
||||
return exit # Allow use as a decorator
|
||||
return exit # Allow use as a decorator
|
||||
|
||||
def callback(self, callback, *args, **kwds):
|
||||
"""Registers an arbitrary callback and arguments.
|
||||
|
||||
Cannot suppress exceptions.
|
||||
"""
|
||||
|
||||
def _exit_wrapper(exc_type, exc, tb):
|
||||
callback(*args, **kwds)
|
||||
|
||||
self.push(_exit_wrapper)
|
||||
return callback # Allow use as a decorator
|
||||
return callback # Allow use as a decorator
|
||||
|
||||
def enter_context(self, cm):
|
||||
"""Enters the supplied context manager
|
||||
|
|
|
@ -1,21 +1,25 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-contextlib',
|
||||
version='3.4.2-4',
|
||||
description='CPython contextlib module ported to MicroPython',
|
||||
long_description='This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='CPython Developers',
|
||||
author_email='python-dev@python.org',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['contextlib'],
|
||||
install_requires=['micropython-ucontextlib', 'micropython-collections'])
|
||||
setup(
|
||||
name="micropython-contextlib",
|
||||
version="3.4.2-4",
|
||||
description="CPython contextlib module ported to MicroPython",
|
||||
long_description="This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="CPython Developers",
|
||||
author_email="python-dev@python.org",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["contextlib"],
|
||||
install_requires=["micropython-ucontextlib", "micropython-collections"],
|
||||
)
|
||||
|
|
|
@ -4,7 +4,6 @@ from contextlib import closing, suppress, ExitStack
|
|||
|
||||
|
||||
class ClosingTestCase(unittest.TestCase):
|
||||
|
||||
class Closable:
|
||||
def __init__(self):
|
||||
self.closed = False
|
||||
|
@ -30,7 +29,6 @@ class ClosingTestCase(unittest.TestCase):
|
|||
|
||||
|
||||
class SuppressTestCase(unittest.TestCase):
|
||||
|
||||
def test_suppress(self):
|
||||
with suppress(ValueError, TypeError):
|
||||
raise ValueError()
|
||||
|
@ -55,15 +53,17 @@ class TestExitStack(unittest.TestCase):
|
|||
expected = [
|
||||
((), {}),
|
||||
((1,), {}),
|
||||
((1,2), {}),
|
||||
((1, 2), {}),
|
||||
((), dict(example=1)),
|
||||
((1,), dict(example=1)),
|
||||
((1,2), dict(example=1)),
|
||||
((1, 2), dict(example=1)),
|
||||
]
|
||||
result = []
|
||||
|
||||
def _exit(*args, **kwds):
|
||||
"""Test metadata propagation"""
|
||||
result.append((args, kwds))
|
||||
|
||||
with ExitStack() as stack:
|
||||
for args, kwds in reversed(expected):
|
||||
if args and kwds:
|
||||
|
@ -83,21 +83,28 @@ class TestExitStack(unittest.TestCase):
|
|||
|
||||
def test_push(self):
|
||||
exc_raised = ZeroDivisionError
|
||||
|
||||
def _expect_exc(exc_type, exc, exc_tb):
|
||||
self.assertIs(exc_type, exc_raised)
|
||||
|
||||
def _suppress_exc(*exc_details):
|
||||
return True
|
||||
|
||||
def _expect_ok(exc_type, exc, exc_tb):
|
||||
self.assertIsNone(exc_type)
|
||||
self.assertIsNone(exc)
|
||||
self.assertIsNone(exc_tb)
|
||||
|
||||
class ExitCM(object):
|
||||
def __init__(self, check_exc):
|
||||
self.check_exc = check_exc
|
||||
|
||||
def __enter__(self):
|
||||
self.fail("Should not be called!")
|
||||
|
||||
def __exit__(self, *exc_details):
|
||||
self.check_exc(*exc_details)
|
||||
|
||||
with ExitStack() as stack:
|
||||
stack.push(_expect_ok)
|
||||
self.assertIs(tuple(stack._exit_callbacks)[-1], _expect_ok)
|
||||
|
@ -113,21 +120,24 @@ class TestExitStack(unittest.TestCase):
|
|||
self.assertIs(tuple(stack._exit_callbacks)[-1], _expect_exc)
|
||||
stack.push(_expect_exc)
|
||||
self.assertIs(tuple(stack._exit_callbacks)[-1], _expect_exc)
|
||||
1/0
|
||||
1 / 0
|
||||
|
||||
def test_enter_context(self):
|
||||
class TestCM(object):
|
||||
def __enter__(self):
|
||||
result.append(1)
|
||||
|
||||
def __exit__(self, *exc_details):
|
||||
result.append(3)
|
||||
|
||||
result = []
|
||||
cm = TestCM()
|
||||
with ExitStack() as stack:
|
||||
|
||||
@stack.callback # Registered first => cleaned up last
|
||||
def _exit():
|
||||
result.append(4)
|
||||
|
||||
self.assertIsNotNone(_exit)
|
||||
stack.enter_context(cm)
|
||||
# self.assertIs(stack._exit_callbacks[-1].__self__, cm)
|
||||
|
@ -137,9 +147,11 @@ class TestExitStack(unittest.TestCase):
|
|||
def test_close(self):
|
||||
result = []
|
||||
with ExitStack() as stack:
|
||||
|
||||
@stack.callback
|
||||
def _exit():
|
||||
result.append(1)
|
||||
|
||||
self.assertIsNotNone(_exit)
|
||||
stack.close()
|
||||
result.append(2)
|
||||
|
@ -148,9 +160,11 @@ class TestExitStack(unittest.TestCase):
|
|||
def test_pop_all(self):
|
||||
result = []
|
||||
with ExitStack() as stack:
|
||||
|
||||
@stack.callback
|
||||
def _exit():
|
||||
result.append(3)
|
||||
|
||||
self.assertIsNotNone(_exit)
|
||||
new_stack = stack.pop_all()
|
||||
result.append(1)
|
||||
|
@ -162,22 +176,25 @@ class TestExitStack(unittest.TestCase):
|
|||
with self.assertRaises(ZeroDivisionError):
|
||||
with ExitStack() as stack:
|
||||
stack.push(lambda *exc: False)
|
||||
1/0
|
||||
1 / 0
|
||||
|
||||
def test_exit_suppress(self):
|
||||
with ExitStack() as stack:
|
||||
stack.push(lambda *exc: True)
|
||||
1/0
|
||||
1 / 0
|
||||
|
||||
def test_exit_exception_chaining_reference(self):
|
||||
# Sanity check to make sure that ExitStack chaining matches
|
||||
# actual nested with statements
|
||||
exc_chain = []
|
||||
|
||||
class RaiseExc:
|
||||
def __init__(self, exc):
|
||||
self.exc = exc
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_details):
|
||||
exc_chain.append(exc_details[0])
|
||||
raise self.exc
|
||||
|
@ -186,8 +203,10 @@ class TestExitStack(unittest.TestCase):
|
|||
def __init__(self, outer, inner):
|
||||
self.outer = outer
|
||||
self.inner = inner
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_details):
|
||||
try:
|
||||
exc_chain.append(exc_details[0])
|
||||
|
@ -199,6 +218,7 @@ class TestExitStack(unittest.TestCase):
|
|||
class SuppressExc:
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_details):
|
||||
type(self).saved_details = exc_details
|
||||
return True
|
||||
|
@ -215,7 +235,13 @@ class TestExitStack(unittest.TestCase):
|
|||
# Inner exceptions were suppressed
|
||||
# self.assertIsNone(exc.__context__.__context__.__context__)
|
||||
exc_chain.append(type(exc))
|
||||
assert tuple(exc_chain) == (ZeroDivisionError, None, AttributeError, KeyError, IndexError)
|
||||
assert tuple(exc_chain) == (
|
||||
ZeroDivisionError,
|
||||
None,
|
||||
AttributeError,
|
||||
KeyError,
|
||||
IndexError,
|
||||
)
|
||||
else:
|
||||
self.fail("Expected IndexError, but no exception was raised")
|
||||
# Check the inner exceptions
|
||||
|
@ -226,6 +252,7 @@ class TestExitStack(unittest.TestCase):
|
|||
def test_exit_exception_chaining(self):
|
||||
# Ensure exception chaining matches the reference behaviour
|
||||
exc_chain = []
|
||||
|
||||
def raise_exc(exc):
|
||||
frame_exc = sys.exc_info()[0]
|
||||
if frame_exc is not None:
|
||||
|
@ -234,6 +261,7 @@ class TestExitStack(unittest.TestCase):
|
|||
raise exc
|
||||
|
||||
saved_details = None
|
||||
|
||||
def suppress_exc(*exc_details):
|
||||
nonlocal saved_details
|
||||
saved_details = exc_details
|
||||
|
@ -254,7 +282,13 @@ class TestExitStack(unittest.TestCase):
|
|||
# self.assertIsInstance(exc.__context__.__context__, AttributeError)
|
||||
# Inner exceptions were suppressed
|
||||
# self.assertIsNone(exc.__context__.__context__.__context__)
|
||||
assert tuple(exc_chain) == (ZeroDivisionError, None, AttributeError, KeyError, IndexError)
|
||||
assert tuple(exc_chain) == (
|
||||
ZeroDivisionError,
|
||||
None,
|
||||
AttributeError,
|
||||
KeyError,
|
||||
IndexError,
|
||||
)
|
||||
else:
|
||||
self.fail("Expected IndexError, but no exception was raised")
|
||||
# Check the inner exceptions
|
||||
|
@ -317,8 +351,7 @@ class TestExitStack(unittest.TestCase):
|
|||
self.assertIs(exc.__context__, exc3)
|
||||
self.assertIs(exc.__context__.__context__, exc2)
|
||||
self.assertIs(exc.__context__.__context__.__context__, exc1)
|
||||
self.assertIsNone(
|
||||
exc.__context__.__context__.__context__.__context__)
|
||||
self.assertIsNone(exc.__context__.__context__.__context__.__context__)
|
||||
|
||||
def _test_exit_exception_with_existing_context(self):
|
||||
# Addresses a lack of test coverage discovered after checking in a
|
||||
|
@ -328,6 +361,7 @@ class TestExitStack(unittest.TestCase):
|
|||
raise inner_exc
|
||||
finally:
|
||||
raise outer_exc
|
||||
|
||||
exc1 = Exception(1)
|
||||
exc2 = Exception(2)
|
||||
exc3 = Exception(3)
|
||||
|
@ -343,27 +377,24 @@ class TestExitStack(unittest.TestCase):
|
|||
self.assertIs(exc.__context__, exc4)
|
||||
self.assertIs(exc.__context__.__context__, exc3)
|
||||
self.assertIs(exc.__context__.__context__.__context__, exc2)
|
||||
self.assertIs(
|
||||
exc.__context__.__context__.__context__.__context__, exc1)
|
||||
self.assertIsNone(
|
||||
exc.__context__.__context__.__context__.__context__.__context__)
|
||||
|
||||
|
||||
self.assertIs(exc.__context__.__context__.__context__.__context__, exc1)
|
||||
self.assertIsNone(exc.__context__.__context__.__context__.__context__.__context__)
|
||||
|
||||
def test_body_exception_suppress(self):
|
||||
def suppress_exc(*exc_details):
|
||||
return True
|
||||
|
||||
try:
|
||||
with ExitStack() as stack:
|
||||
stack.push(suppress_exc)
|
||||
1/0
|
||||
1 / 0
|
||||
except IndexError as exc:
|
||||
self.fail("Expected no exception, got IndexError")
|
||||
|
||||
def test_exit_exception_chaining_suppress(self):
|
||||
with ExitStack() as stack:
|
||||
stack.push(lambda *exc: True)
|
||||
stack.push(lambda *exc: 1/0)
|
||||
stack.push(lambda *exc: 1 / 0)
|
||||
stack.push(lambda *exc: {}[1])
|
||||
|
||||
def test_excessive_nesting(self):
|
||||
|
@ -373,7 +404,9 @@ class TestExitStack(unittest.TestCase):
|
|||
stack.callback(int)
|
||||
|
||||
def test_instance_bypass(self):
|
||||
class Example(object): pass
|
||||
class Example(object):
|
||||
pass
|
||||
|
||||
cm = Example()
|
||||
cm.__exit__ = object()
|
||||
stack = ExitStack()
|
||||
|
@ -382,5 +415,5 @@ class TestExitStack(unittest.TestCase):
|
|||
self.assertIs(tuple(stack._exit_callbacks)[-1], cm)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
|
|
@ -49,13 +49,17 @@ __getstate__() and __setstate__(). See the documentation for module
|
|||
"""
|
||||
|
||||
import types
|
||||
#import weakref
|
||||
#from copyreg import dispatch_table
|
||||
#import builtins
|
||||
|
||||
# import weakref
|
||||
# from copyreg import dispatch_table
|
||||
# import builtins
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
error = Error # backward compatibility
|
||||
|
||||
|
||||
error = Error # backward compatibility
|
||||
|
||||
try:
|
||||
from ucollections import OrderedDict
|
||||
|
@ -69,6 +73,7 @@ except ImportError:
|
|||
|
||||
__all__ = ["Error", "copy", "deepcopy"]
|
||||
|
||||
|
||||
def copy(x):
|
||||
"""Shallow copy operation on arbitrary Python objects.
|
||||
|
||||
|
@ -107,35 +112,54 @@ def copy(x):
|
|||
|
||||
_copy_dispatch = d = {}
|
||||
|
||||
|
||||
def _copy_immutable(x):
|
||||
return x
|
||||
for t in (type(None), int, float, bool, str, tuple,
|
||||
type, range,
|
||||
types.BuiltinFunctionType, type(Ellipsis),
|
||||
types.FunctionType):
|
||||
|
||||
|
||||
for t in (
|
||||
type(None),
|
||||
int,
|
||||
float,
|
||||
bool,
|
||||
str,
|
||||
tuple,
|
||||
type,
|
||||
range,
|
||||
types.BuiltinFunctionType,
|
||||
type(Ellipsis),
|
||||
types.FunctionType,
|
||||
):
|
||||
d[t] = _copy_immutable
|
||||
t = getattr(types, "CodeType", None)
|
||||
if t is not None:
|
||||
d[t] = _copy_immutable
|
||||
#for name in ("complex", "unicode"):
|
||||
# for name in ("complex", "unicode"):
|
||||
# t = getattr(builtins, name, None)
|
||||
# if t is not None:
|
||||
# d[t] = _copy_immutable
|
||||
|
||||
|
||||
def _copy_with_constructor(x):
|
||||
return type(x)(x)
|
||||
|
||||
|
||||
for t in (list, dict, set):
|
||||
d[t] = _copy_with_constructor
|
||||
if OrderedDict is not None:
|
||||
d[OrderedDict] = _copy_with_constructor
|
||||
|
||||
|
||||
def _copy_with_copy_method(x):
|
||||
return x.copy()
|
||||
|
||||
|
||||
if PyStringMap is not None:
|
||||
d[PyStringMap] = _copy_with_copy_method
|
||||
|
||||
del d
|
||||
|
||||
|
||||
def deepcopy(x, memo=None, _nil=[]):
|
||||
"""Deep copy operation on arbitrary Python objects.
|
||||
|
||||
|
@ -158,7 +182,7 @@ def deepcopy(x, memo=None, _nil=[]):
|
|||
else:
|
||||
try:
|
||||
issc = issubclass(cls, type)
|
||||
except TypeError: # cls is not a class (old Boost; see SF #502085)
|
||||
except TypeError: # cls is not a class (old Boost; see SF #502085)
|
||||
issc = 0
|
||||
if issc:
|
||||
y = _deepcopy_atomic(x, memo)
|
||||
|
@ -179,20 +203,23 @@ def deepcopy(x, memo=None, _nil=[]):
|
|||
if reductor:
|
||||
rv = reductor()
|
||||
else:
|
||||
raise Error(
|
||||
"un(deep)copyable object of type %s" % cls)
|
||||
raise Error("un(deep)copyable object of type %s" % cls)
|
||||
y = _reconstruct(x, rv, 1, memo)
|
||||
|
||||
# If is its own copy, don't memoize.
|
||||
if y is not x:
|
||||
memo[d] = y
|
||||
_keep_alive(x, memo) # Make sure x lives at least as long as d
|
||||
_keep_alive(x, memo) # Make sure x lives at least as long as d
|
||||
return y
|
||||
|
||||
|
||||
_deepcopy_dispatch = d = {}
|
||||
|
||||
|
||||
def _deepcopy_atomic(x, memo):
|
||||
return x
|
||||
|
||||
|
||||
d[type(None)] = _deepcopy_atomic
|
||||
d[type(Ellipsis)] = _deepcopy_atomic
|
||||
d[int] = _deepcopy_atomic
|
||||
|
@ -212,7 +239,8 @@ d[type] = _deepcopy_atomic
|
|||
d[range] = _deepcopy_atomic
|
||||
d[types.BuiltinFunctionType] = _deepcopy_atomic
|
||||
d[types.FunctionType] = _deepcopy_atomic
|
||||
#d[weakref.ref] = _deepcopy_atomic
|
||||
# d[weakref.ref] = _deepcopy_atomic
|
||||
|
||||
|
||||
def _deepcopy_list(x, memo):
|
||||
y = []
|
||||
|
@ -220,8 +248,11 @@ def _deepcopy_list(x, memo):
|
|||
for a in x:
|
||||
y.append(deepcopy(a, memo))
|
||||
return y
|
||||
|
||||
|
||||
d[list] = _deepcopy_list
|
||||
|
||||
|
||||
def _deepcopy_tuple(x, memo):
|
||||
y = []
|
||||
for a in x:
|
||||
|
@ -239,24 +270,33 @@ def _deepcopy_tuple(x, memo):
|
|||
else:
|
||||
y = x
|
||||
return y
|
||||
|
||||
|
||||
d[tuple] = _deepcopy_tuple
|
||||
|
||||
|
||||
def _deepcopy_dict(x, memo):
|
||||
y = type(x)()
|
||||
memo[id(x)] = y
|
||||
for key, value in x.items():
|
||||
y[deepcopy(key, memo)] = deepcopy(value, memo)
|
||||
return y
|
||||
|
||||
|
||||
d[dict] = _deepcopy_dict
|
||||
if OrderedDict is not None:
|
||||
d[OrderedDict] = _deepcopy_dict
|
||||
if PyStringMap is not None:
|
||||
d[PyStringMap] = _deepcopy_dict
|
||||
|
||||
def _deepcopy_method(x, memo): # Copy instance methods
|
||||
|
||||
def _deepcopy_method(x, memo): # Copy instance methods
|
||||
return type(x)(x.__func__, deepcopy(x.__self__, memo))
|
||||
|
||||
|
||||
_deepcopy_dispatch[types.MethodType] = _deepcopy_method
|
||||
|
||||
|
||||
def _keep_alive(x, memo):
|
||||
"""Keeps a reference to the object x in the memo.
|
||||
|
||||
|
@ -271,7 +311,8 @@ def _keep_alive(x, memo):
|
|||
memo[id(memo)].append(x)
|
||||
except KeyError:
|
||||
# aha, this is the first one :-)
|
||||
memo[id(memo)]=[x]
|
||||
memo[id(memo)] = [x]
|
||||
|
||||
|
||||
def _reconstruct(x, info, deep, memo=None):
|
||||
if isinstance(info, str):
|
||||
|
@ -302,7 +343,7 @@ def _reconstruct(x, info, deep, memo=None):
|
|||
if state:
|
||||
if deep:
|
||||
state = deepcopy(state, memo)
|
||||
if hasattr(y, '__setstate__'):
|
||||
if hasattr(y, "__setstate__"):
|
||||
y.__setstate__(state)
|
||||
else:
|
||||
if isinstance(state, tuple) and len(state) == 2:
|
||||
|
@ -328,6 +369,7 @@ def _reconstruct(x, info, deep, memo=None):
|
|||
y[key] = value
|
||||
return y
|
||||
|
||||
|
||||
del d
|
||||
|
||||
del types
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-copy',
|
||||
version='3.3.3-2',
|
||||
description='CPython copy module ported to MicroPython',
|
||||
long_description='This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='CPython Developers',
|
||||
author_email='python-dev@python.org',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
py_modules=['copy'])
|
||||
setup(
|
||||
name="micropython-copy",
|
||||
version="3.3.3-2",
|
||||
description="CPython copy module ported to MicroPython",
|
||||
long_description="This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="CPython Developers",
|
||||
author_email="python-dev@python.org",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
py_modules=["copy"],
|
||||
)
|
||||
|
|
|
@ -1,84 +1,161 @@
|
|||
"""Constants and membership tests for ASCII characters"""
|
||||
|
||||
NUL = 0x00 # ^@
|
||||
SOH = 0x01 # ^A
|
||||
STX = 0x02 # ^B
|
||||
ETX = 0x03 # ^C
|
||||
EOT = 0x04 # ^D
|
||||
ENQ = 0x05 # ^E
|
||||
ACK = 0x06 # ^F
|
||||
BEL = 0x07 # ^G
|
||||
BS = 0x08 # ^H
|
||||
TAB = 0x09 # ^I
|
||||
HT = 0x09 # ^I
|
||||
LF = 0x0a # ^J
|
||||
NL = 0x0a # ^J
|
||||
VT = 0x0b # ^K
|
||||
FF = 0x0c # ^L
|
||||
CR = 0x0d # ^M
|
||||
SO = 0x0e # ^N
|
||||
SI = 0x0f # ^O
|
||||
DLE = 0x10 # ^P
|
||||
DC1 = 0x11 # ^Q
|
||||
DC2 = 0x12 # ^R
|
||||
DC3 = 0x13 # ^S
|
||||
DC4 = 0x14 # ^T
|
||||
NAK = 0x15 # ^U
|
||||
SYN = 0x16 # ^V
|
||||
ETB = 0x17 # ^W
|
||||
CAN = 0x18 # ^X
|
||||
EM = 0x19 # ^Y
|
||||
SUB = 0x1a # ^Z
|
||||
ESC = 0x1b # ^[
|
||||
FS = 0x1c # ^\
|
||||
GS = 0x1d # ^]
|
||||
RS = 0x1e # ^^
|
||||
US = 0x1f # ^_
|
||||
SP = 0x20 # space
|
||||
DEL = 0x7f # delete
|
||||
NUL = 0x00 # ^@
|
||||
SOH = 0x01 # ^A
|
||||
STX = 0x02 # ^B
|
||||
ETX = 0x03 # ^C
|
||||
EOT = 0x04 # ^D
|
||||
ENQ = 0x05 # ^E
|
||||
ACK = 0x06 # ^F
|
||||
BEL = 0x07 # ^G
|
||||
BS = 0x08 # ^H
|
||||
TAB = 0x09 # ^I
|
||||
HT = 0x09 # ^I
|
||||
LF = 0x0A # ^J
|
||||
NL = 0x0A # ^J
|
||||
VT = 0x0B # ^K
|
||||
FF = 0x0C # ^L
|
||||
CR = 0x0D # ^M
|
||||
SO = 0x0E # ^N
|
||||
SI = 0x0F # ^O
|
||||
DLE = 0x10 # ^P
|
||||
DC1 = 0x11 # ^Q
|
||||
DC2 = 0x12 # ^R
|
||||
DC3 = 0x13 # ^S
|
||||
DC4 = 0x14 # ^T
|
||||
NAK = 0x15 # ^U
|
||||
SYN = 0x16 # ^V
|
||||
ETB = 0x17 # ^W
|
||||
CAN = 0x18 # ^X
|
||||
EM = 0x19 # ^Y
|
||||
SUB = 0x1A # ^Z
|
||||
ESC = 0x1B # ^[
|
||||
FS = 0x1C # ^\
|
||||
GS = 0x1D # ^]
|
||||
RS = 0x1E # ^^
|
||||
US = 0x1F # ^_
|
||||
SP = 0x20 # space
|
||||
DEL = 0x7F # delete
|
||||
|
||||
controlnames = [
|
||||
"NUL", "SOH", "STX", "ETX", "EOT", "ENQ", "ACK", "BEL",
|
||||
"BS", "HT", "LF", "VT", "FF", "CR", "SO", "SI",
|
||||
"DLE", "DC1", "DC2", "DC3", "DC4", "NAK", "SYN", "ETB",
|
||||
"CAN", "EM", "SUB", "ESC", "FS", "GS", "RS", "US",
|
||||
"SP"
|
||||
"NUL",
|
||||
"SOH",
|
||||
"STX",
|
||||
"ETX",
|
||||
"EOT",
|
||||
"ENQ",
|
||||
"ACK",
|
||||
"BEL",
|
||||
"BS",
|
||||
"HT",
|
||||
"LF",
|
||||
"VT",
|
||||
"FF",
|
||||
"CR",
|
||||
"SO",
|
||||
"SI",
|
||||
"DLE",
|
||||
"DC1",
|
||||
"DC2",
|
||||
"DC3",
|
||||
"DC4",
|
||||
"NAK",
|
||||
"SYN",
|
||||
"ETB",
|
||||
"CAN",
|
||||
"EM",
|
||||
"SUB",
|
||||
"ESC",
|
||||
"FS",
|
||||
"GS",
|
||||
"RS",
|
||||
"US",
|
||||
"SP",
|
||||
]
|
||||
|
||||
|
||||
def _ctoi(c):
|
||||
if type(c) == type(""):
|
||||
return ord(c)
|
||||
else:
|
||||
return c
|
||||
|
||||
def isalnum(c): return isalpha(c) or isdigit(c)
|
||||
def isalpha(c): return isupper(c) or islower(c)
|
||||
def isascii(c): return _ctoi(c) <= 127 # ?
|
||||
def isblank(c): return _ctoi(c) in (8,32)
|
||||
def iscntrl(c): return _ctoi(c) <= 31
|
||||
def isdigit(c): return _ctoi(c) >= 48 and _ctoi(c) <= 57
|
||||
def isgraph(c): return _ctoi(c) >= 33 and _ctoi(c) <= 126
|
||||
def islower(c): return _ctoi(c) >= 97 and _ctoi(c) <= 122
|
||||
def isprint(c): return _ctoi(c) >= 32 and _ctoi(c) <= 126
|
||||
def ispunct(c): return _ctoi(c) != 32 and not isalnum(c)
|
||||
def isspace(c): return _ctoi(c) in (9, 10, 11, 12, 13, 32)
|
||||
def isupper(c): return _ctoi(c) >= 65 and _ctoi(c) <= 90
|
||||
def isxdigit(c): return isdigit(c) or \
|
||||
(_ctoi(c) >= 65 and _ctoi(c) <= 70) or (_ctoi(c) >= 97 and _ctoi(c) <= 102)
|
||||
def isctrl(c): return _ctoi(c) < 32
|
||||
def ismeta(c): return _ctoi(c) > 127
|
||||
|
||||
def isalnum(c):
|
||||
return isalpha(c) or isdigit(c)
|
||||
|
||||
|
||||
def isalpha(c):
|
||||
return isupper(c) or islower(c)
|
||||
|
||||
|
||||
def isascii(c):
|
||||
return _ctoi(c) <= 127 # ?
|
||||
|
||||
|
||||
def isblank(c):
|
||||
return _ctoi(c) in (8, 32)
|
||||
|
||||
|
||||
def iscntrl(c):
|
||||
return _ctoi(c) <= 31
|
||||
|
||||
|
||||
def isdigit(c):
|
||||
return _ctoi(c) >= 48 and _ctoi(c) <= 57
|
||||
|
||||
|
||||
def isgraph(c):
|
||||
return _ctoi(c) >= 33 and _ctoi(c) <= 126
|
||||
|
||||
|
||||
def islower(c):
|
||||
return _ctoi(c) >= 97 and _ctoi(c) <= 122
|
||||
|
||||
|
||||
def isprint(c):
|
||||
return _ctoi(c) >= 32 and _ctoi(c) <= 126
|
||||
|
||||
|
||||
def ispunct(c):
|
||||
return _ctoi(c) != 32 and not isalnum(c)
|
||||
|
||||
|
||||
def isspace(c):
|
||||
return _ctoi(c) in (9, 10, 11, 12, 13, 32)
|
||||
|
||||
|
||||
def isupper(c):
|
||||
return _ctoi(c) >= 65 and _ctoi(c) <= 90
|
||||
|
||||
|
||||
def isxdigit(c):
|
||||
return (
|
||||
isdigit(c) or (_ctoi(c) >= 65 and _ctoi(c) <= 70) or (_ctoi(c) >= 97 and _ctoi(c) <= 102)
|
||||
)
|
||||
|
||||
|
||||
def isctrl(c):
|
||||
return _ctoi(c) < 32
|
||||
|
||||
|
||||
def ismeta(c):
|
||||
return _ctoi(c) > 127
|
||||
|
||||
|
||||
def ascii(c):
|
||||
if type(c) == type(""):
|
||||
return chr(_ctoi(c) & 0x7f)
|
||||
return chr(_ctoi(c) & 0x7F)
|
||||
else:
|
||||
return _ctoi(c) & 0x7f
|
||||
return _ctoi(c) & 0x7F
|
||||
|
||||
|
||||
def ctrl(c):
|
||||
if type(c) == type(""):
|
||||
return chr(_ctoi(c) & 0x1f)
|
||||
return chr(_ctoi(c) & 0x1F)
|
||||
else:
|
||||
return _ctoi(c) & 0x1f
|
||||
return _ctoi(c) & 0x1F
|
||||
|
||||
|
||||
def alt(c):
|
||||
if type(c) == type(""):
|
||||
|
@ -86,14 +163,15 @@ def alt(c):
|
|||
else:
|
||||
return _ctoi(c) | 0x80
|
||||
|
||||
|
||||
def unctrl(c):
|
||||
bits = _ctoi(c)
|
||||
if bits == 0x7f:
|
||||
if bits == 0x7F:
|
||||
rep = "^?"
|
||||
elif isprint(bits & 0x7f):
|
||||
rep = chr(bits & 0x7f)
|
||||
elif isprint(bits & 0x7F):
|
||||
rep = chr(bits & 0x7F)
|
||||
else:
|
||||
rep = "^" + chr(((bits & 0x7f) | 0x20) + 0x20)
|
||||
rep = "^" + chr(((bits & 0x7F) | 0x20) + 0x20)
|
||||
if bits & 0x80:
|
||||
return "!" + rep
|
||||
return rep
|
||||
|
|
|
@ -1,20 +1,24 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-curses.ascii',
|
||||
version='3.4.2-1',
|
||||
description='CPython curses.ascii module ported to MicroPython',
|
||||
long_description='This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='CPython Developers',
|
||||
author_email='python-dev@python.org',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['curses'])
|
||||
setup(
|
||||
name="micropython-curses.ascii",
|
||||
version="3.4.2-1",
|
||||
description="CPython curses.ascii module ported to MicroPython",
|
||||
long_description="This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="CPython Developers",
|
||||
author_email="python-dev@python.org",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["curses"],
|
||||
)
|
||||
|
|
|
@ -3,11 +3,11 @@
|
|||
# Contact: email-sig@python.org
|
||||
|
||||
__all__ = [
|
||||
'Charset',
|
||||
'add_alias',
|
||||
'add_charset',
|
||||
'add_codec',
|
||||
]
|
||||
"Charset",
|
||||
"add_alias",
|
||||
"add_charset",
|
||||
"add_codec",
|
||||
]
|
||||
|
||||
from functools import partial
|
||||
|
||||
|
@ -18,93 +18,90 @@ from email import errors
|
|||
from email.encoders import encode_7or8bit
|
||||
|
||||
|
||||
|
||||
# Flags for types of header encodings
|
||||
QP = 1 # Quoted-Printable
|
||||
BASE64 = 2 # Base64
|
||||
SHORTEST = 3 # the shorter of QP and base64, but only for headers
|
||||
QP = 1 # Quoted-Printable
|
||||
BASE64 = 2 # Base64
|
||||
SHORTEST = 3 # the shorter of QP and base64, but only for headers
|
||||
|
||||
# In "=?charset?q?hello_world?=", the =?, ?q?, and ?= add up to 7
|
||||
RFC2047_CHROME_LEN = 7
|
||||
|
||||
DEFAULT_CHARSET = 'us-ascii'
|
||||
UNKNOWN8BIT = 'unknown-8bit'
|
||||
EMPTYSTRING = ''
|
||||
DEFAULT_CHARSET = "us-ascii"
|
||||
UNKNOWN8BIT = "unknown-8bit"
|
||||
EMPTYSTRING = ""
|
||||
|
||||
|
||||
|
||||
# Defaults
|
||||
CHARSETS = {
|
||||
# input header enc body enc output conv
|
||||
'iso-8859-1': (QP, QP, None),
|
||||
'iso-8859-2': (QP, QP, None),
|
||||
'iso-8859-3': (QP, QP, None),
|
||||
'iso-8859-4': (QP, QP, None),
|
||||
"iso-8859-1": (QP, QP, None),
|
||||
"iso-8859-2": (QP, QP, None),
|
||||
"iso-8859-3": (QP, QP, None),
|
||||
"iso-8859-4": (QP, QP, None),
|
||||
# iso-8859-5 is Cyrillic, and not especially used
|
||||
# iso-8859-6 is Arabic, also not particularly used
|
||||
# iso-8859-7 is Greek, QP will not make it readable
|
||||
# iso-8859-8 is Hebrew, QP will not make it readable
|
||||
'iso-8859-9': (QP, QP, None),
|
||||
'iso-8859-10': (QP, QP, None),
|
||||
"iso-8859-9": (QP, QP, None),
|
||||
"iso-8859-10": (QP, QP, None),
|
||||
# iso-8859-11 is Thai, QP will not make it readable
|
||||
'iso-8859-13': (QP, QP, None),
|
||||
'iso-8859-14': (QP, QP, None),
|
||||
'iso-8859-15': (QP, QP, None),
|
||||
'iso-8859-16': (QP, QP, None),
|
||||
'windows-1252':(QP, QP, None),
|
||||
'viscii': (QP, QP, None),
|
||||
'us-ascii': (None, None, None),
|
||||
'big5': (BASE64, BASE64, None),
|
||||
'gb2312': (BASE64, BASE64, None),
|
||||
'euc-jp': (BASE64, None, 'iso-2022-jp'),
|
||||
'shift_jis': (BASE64, None, 'iso-2022-jp'),
|
||||
'iso-2022-jp': (BASE64, None, None),
|
||||
'koi8-r': (BASE64, BASE64, None),
|
||||
'utf-8': (SHORTEST, BASE64, 'utf-8'),
|
||||
}
|
||||
"iso-8859-13": (QP, QP, None),
|
||||
"iso-8859-14": (QP, QP, None),
|
||||
"iso-8859-15": (QP, QP, None),
|
||||
"iso-8859-16": (QP, QP, None),
|
||||
"windows-1252": (QP, QP, None),
|
||||
"viscii": (QP, QP, None),
|
||||
"us-ascii": (None, None, None),
|
||||
"big5": (BASE64, BASE64, None),
|
||||
"gb2312": (BASE64, BASE64, None),
|
||||
"euc-jp": (BASE64, None, "iso-2022-jp"),
|
||||
"shift_jis": (BASE64, None, "iso-2022-jp"),
|
||||
"iso-2022-jp": (BASE64, None, None),
|
||||
"koi8-r": (BASE64, BASE64, None),
|
||||
"utf-8": (SHORTEST, BASE64, "utf-8"),
|
||||
}
|
||||
|
||||
# Aliases for other commonly-used names for character sets. Map
|
||||
# them to the real ones used in email.
|
||||
ALIASES = {
|
||||
'latin_1': 'iso-8859-1',
|
||||
'latin-1': 'iso-8859-1',
|
||||
'latin_2': 'iso-8859-2',
|
||||
'latin-2': 'iso-8859-2',
|
||||
'latin_3': 'iso-8859-3',
|
||||
'latin-3': 'iso-8859-3',
|
||||
'latin_4': 'iso-8859-4',
|
||||
'latin-4': 'iso-8859-4',
|
||||
'latin_5': 'iso-8859-9',
|
||||
'latin-5': 'iso-8859-9',
|
||||
'latin_6': 'iso-8859-10',
|
||||
'latin-6': 'iso-8859-10',
|
||||
'latin_7': 'iso-8859-13',
|
||||
'latin-7': 'iso-8859-13',
|
||||
'latin_8': 'iso-8859-14',
|
||||
'latin-8': 'iso-8859-14',
|
||||
'latin_9': 'iso-8859-15',
|
||||
'latin-9': 'iso-8859-15',
|
||||
'latin_10':'iso-8859-16',
|
||||
'latin-10':'iso-8859-16',
|
||||
'cp949': 'ks_c_5601-1987',
|
||||
'euc_jp': 'euc-jp',
|
||||
'euc_kr': 'euc-kr',
|
||||
'ascii': 'us-ascii',
|
||||
}
|
||||
"latin_1": "iso-8859-1",
|
||||
"latin-1": "iso-8859-1",
|
||||
"latin_2": "iso-8859-2",
|
||||
"latin-2": "iso-8859-2",
|
||||
"latin_3": "iso-8859-3",
|
||||
"latin-3": "iso-8859-3",
|
||||
"latin_4": "iso-8859-4",
|
||||
"latin-4": "iso-8859-4",
|
||||
"latin_5": "iso-8859-9",
|
||||
"latin-5": "iso-8859-9",
|
||||
"latin_6": "iso-8859-10",
|
||||
"latin-6": "iso-8859-10",
|
||||
"latin_7": "iso-8859-13",
|
||||
"latin-7": "iso-8859-13",
|
||||
"latin_8": "iso-8859-14",
|
||||
"latin-8": "iso-8859-14",
|
||||
"latin_9": "iso-8859-15",
|
||||
"latin-9": "iso-8859-15",
|
||||
"latin_10": "iso-8859-16",
|
||||
"latin-10": "iso-8859-16",
|
||||
"cp949": "ks_c_5601-1987",
|
||||
"euc_jp": "euc-jp",
|
||||
"euc_kr": "euc-kr",
|
||||
"ascii": "us-ascii",
|
||||
}
|
||||
|
||||
|
||||
# Map charsets to their Unicode codec strings.
|
||||
CODEC_MAP = {
|
||||
'gb2312': 'eucgb2312_cn',
|
||||
'big5': 'big5_tw',
|
||||
"gb2312": "eucgb2312_cn",
|
||||
"big5": "big5_tw",
|
||||
# Hack: We don't want *any* conversion for stuff marked us-ascii, as all
|
||||
# sorts of garbage might be sent to us in the guise of 7-bit us-ascii.
|
||||
# Let that stuff pass through without conversion to/from Unicode.
|
||||
'us-ascii': None,
|
||||
}
|
||||
"us-ascii": None,
|
||||
}
|
||||
|
||||
|
||||
|
||||
# Convenience functions for extending the above mappings
|
||||
def add_charset(charset, header_enc=None, body_enc=None, output_charset=None):
|
||||
"""Add character set properties to the global registry.
|
||||
|
@ -130,7 +127,7 @@ def add_charset(charset, header_enc=None, body_enc=None, output_charset=None):
|
|||
documentation for more information.
|
||||
"""
|
||||
if body_enc == SHORTEST:
|
||||
raise ValueError('SHORTEST not allowed for body_enc')
|
||||
raise ValueError("SHORTEST not allowed for body_enc")
|
||||
CHARSETS[charset] = (header_enc, body_enc, output_charset)
|
||||
|
||||
|
||||
|
@ -153,17 +150,15 @@ def add_codec(charset, codecname):
|
|||
CODEC_MAP[charset] = codecname
|
||||
|
||||
|
||||
|
||||
# Convenience function for encoding strings, taking into account
|
||||
# that they might be unknown-8bit (ie: have surrogate-escaped bytes)
|
||||
def _encode(string, codec):
|
||||
if codec == UNKNOWN8BIT:
|
||||
return string.encode('ascii', 'surrogateescape')
|
||||
return string.encode("ascii", "surrogateescape")
|
||||
else:
|
||||
return string.encode(codec)
|
||||
|
||||
|
||||
|
||||
class Charset:
|
||||
"""Map character sets to their email properties.
|
||||
|
||||
|
@ -208,6 +203,7 @@ class Charset:
|
|||
to the output_charset. If no conversion codec is necessary,
|
||||
this attribute will have the same value as the input_codec.
|
||||
"""
|
||||
|
||||
def __init__(self, input_charset=DEFAULT_CHARSET):
|
||||
# RFC 2046, $4.1.2 says charsets are not case sensitive. We coerce to
|
||||
# unicode because its .lower() is locale insensitive. If the argument
|
||||
|
@ -215,9 +211,9 @@ class Charset:
|
|||
# charset is ASCII, as the standard (RFC XXX) requires.
|
||||
try:
|
||||
if isinstance(input_charset, str):
|
||||
input_charset.encode('ascii')
|
||||
input_charset.encode("ascii")
|
||||
else:
|
||||
input_charset = str(input_charset, 'ascii')
|
||||
input_charset = str(input_charset, "ascii")
|
||||
except UnicodeError:
|
||||
raise errors.CharsetError(input_charset)
|
||||
input_charset = input_charset.lower()
|
||||
|
@ -226,8 +222,7 @@ class Charset:
|
|||
# We can try to guess which encoding and conversion to use by the
|
||||
# charset_map dictionary. Try that first, but let the user override
|
||||
# it.
|
||||
henc, benc, conv = CHARSETS.get(self.input_charset,
|
||||
(SHORTEST, BASE64, None))
|
||||
henc, benc, conv = CHARSETS.get(self.input_charset, (SHORTEST, BASE64, None))
|
||||
if not conv:
|
||||
conv = self.input_charset
|
||||
# Set the attributes, allowing the arguments to override the default.
|
||||
|
@ -236,10 +231,8 @@ class Charset:
|
|||
self.output_charset = ALIASES.get(conv, conv)
|
||||
# Now set the codecs. If one isn't defined for input_charset,
|
||||
# guess and try a Unicode codec with the same name as input_codec.
|
||||
self.input_codec = CODEC_MAP.get(self.input_charset,
|
||||
self.input_charset)
|
||||
self.output_codec = CODEC_MAP.get(self.output_charset,
|
||||
self.output_charset)
|
||||
self.input_codec = CODEC_MAP.get(self.input_charset, self.input_charset)
|
||||
self.output_codec = CODEC_MAP.get(self.output_charset, self.output_charset)
|
||||
|
||||
def __str__(self):
|
||||
return self.input_charset.lower()
|
||||
|
@ -267,9 +260,9 @@ class Charset:
|
|||
"""
|
||||
assert self.body_encoding != SHORTEST
|
||||
if self.body_encoding == QP:
|
||||
return 'quoted-printable'
|
||||
return "quoted-printable"
|
||||
elif self.body_encoding == BASE64:
|
||||
return 'base64'
|
||||
return "base64"
|
||||
else:
|
||||
return encode_7or8bit
|
||||
|
||||
|
@ -292,7 +285,7 @@ class Charset:
|
|||
output codec.
|
||||
:return: The encoded string, with RFC 2047 chrome.
|
||||
"""
|
||||
codec = self.output_codec or 'us-ascii'
|
||||
codec = self.output_codec or "us-ascii"
|
||||
header_bytes = _encode(string, codec)
|
||||
# 7bit/8bit encodings return the string unchanged (modulo conversions)
|
||||
encoder_module = self._get_encoder(header_bytes)
|
||||
|
@ -318,7 +311,7 @@ class Charset:
|
|||
:return: Lines of encoded strings, each with RFC 2047 chrome.
|
||||
"""
|
||||
# See which encoding we should use.
|
||||
codec = self.output_codec or 'us-ascii'
|
||||
codec = self.output_codec or "us-ascii"
|
||||
header_bytes = _encode(string, codec)
|
||||
encoder_module = self._get_encoder(header_bytes)
|
||||
encoder = partial(encoder_module.header_encode, charset=codec)
|
||||
|
@ -351,7 +344,7 @@ class Charset:
|
|||
if not lines and not current_line:
|
||||
lines.append(None)
|
||||
else:
|
||||
separator = (' ' if lines else '')
|
||||
separator = " " if lines else ""
|
||||
joined_line = EMPTYSTRING.join(current_line)
|
||||
header_bytes = _encode(joined_line, codec)
|
||||
lines.append(encoder(header_bytes))
|
||||
|
@ -404,9 +397,9 @@ class Charset:
|
|||
# being bytes has never been nailed down, so fixing that is a
|
||||
# longer term TODO.
|
||||
if isinstance(string, str):
|
||||
string = string.encode(self.output_charset).decode('latin1')
|
||||
string = string.encode(self.output_charset).decode("latin1")
|
||||
return email.quoprimime.body_encode(string)
|
||||
else:
|
||||
if isinstance(string, str):
|
||||
string = string.encode(self.output_charset).decode('ascii')
|
||||
string = string.encode(self.output_charset).decode("ascii")
|
||||
return string
|
||||
|
|
|
@ -1,21 +1,29 @@
|
|||
import sys
|
||||
|
||||
# Remove current dir from sys.path, otherwise setuptools will peek up our
|
||||
# module instead of system's.
|
||||
sys.path.pop(0)
|
||||
from setuptools import setup
|
||||
|
||||
sys.path.append("..")
|
||||
import sdist_upip
|
||||
|
||||
setup(name='micropython-email.charset',
|
||||
version='0.5.1',
|
||||
description='CPython email.charset module ported to MicroPython',
|
||||
long_description='This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.',
|
||||
url='https://github.com/micropython/micropython-lib',
|
||||
author='CPython Developers',
|
||||
author_email='python-dev@python.org',
|
||||
maintainer='micropython-lib Developers',
|
||||
maintainer_email='micro-python@googlegroups.com',
|
||||
license='Python',
|
||||
cmdclass={'sdist': sdist_upip.sdist},
|
||||
packages=['email'],
|
||||
install_requires=['micropython-functools', 'micropython-email.encoders', 'micropython-email.errors'])
|
||||
setup(
|
||||
name="micropython-email.charset",
|
||||
version="0.5.1",
|
||||
description="CPython email.charset module ported to MicroPython",
|
||||
long_description="This is a module ported from CPython standard library to be compatible with\nMicroPython interpreter. Usually, this means applying small patches for\nfeatures not supported (yet, or at all) in MicroPython. Sometimes, heavier\nchanges are required. Note that CPython modules are written with availability\nof vast resources in mind, and may not work for MicroPython ports with\nlimited heap. If you are affected by such a case, please help reimplement\nthe module from scratch.",
|
||||
url="https://github.com/micropython/micropython-lib",
|
||||
author="CPython Developers",
|
||||
author_email="python-dev@python.org",
|
||||
maintainer="micropython-lib Developers",
|
||||
maintainer_email="micro-python@googlegroups.com",
|
||||
license="Python",
|
||||
cmdclass={"sdist": sdist_upip.sdist},
|
||||
packages=["email"],
|
||||
install_requires=[
|
||||
"micropython-functools",
|
||||
"micropython-email.encoders",
|
||||
"micropython-email.errors",
|
||||
],
|
||||
)
|
||||
|
|
|
@ -25,27 +25,26 @@ module.
|
|||
"""
|
||||
|
||||
__all__ = [
|
||||
'body_decode',
|
||||
'body_encode',
|
||||
'decode',
|
||||
'decodestring',
|
||||
'header_encode',
|
||||
'header_length',
|
||||
]
|
||||
"body_decode",
|
||||
"body_encode",
|
||||
"decode",
|
||||
"decodestring",
|
||||
"header_encode",
|
||||
"header_length",
|
||||
]
|
||||
|
||||
|
||||
from base64 import b64encode
|
||||
from binascii import b2a_base64, a2b_base64
|
||||
|
||||
CRLF = '\r\n'
|
||||
NL = '\n'
|
||||
EMPTYSTRING = ''
|
||||
CRLF = "\r\n"
|
||||
NL = "\n"
|
||||
EMPTYSTRING = ""
|
||||
|
||||
# See also Charset.py
|
||||
MISC_LEN = 7
|
||||
|
||||
|
||||
|
||||
# Helpers
|
||||
def header_length(bytearray):
|
||||
"""Return the length of s when it is encoded with base64."""
|
||||
|
@ -57,8 +56,7 @@ def header_length(bytearray):
|
|||
return n
|
||||
|
||||
|
||||
|
||||
def header_encode(header_bytes, charset='iso-8859-1'):
|
||||
def header_encode(header_bytes, charset="iso-8859-1"):
|
||||
"""Encode a single header line with Base64 encoding in a given charset.
|
||||
|
||||
charset names the character set to use to encode the header. It defaults
|
||||
|
@ -69,10 +67,9 @@ def header_encode(header_bytes, charset='iso-8859-1'):
|
|||
if isinstance(header_bytes, str):
|
||||
header_bytes = header_bytes.encode(charset)
|
||||
encoded = b64encode(header_bytes).decode("ascii")
|
||||
return '=?%s?b?%s?=' % (charset, encoded)
|
||||
return "=?%s?b?%s?=" % (charset, encoded)
|
||||
|
||||
|
||||
|
||||
def body_encode(s, maxlinelen=76, eol=NL):
|
||||
r"""Encode a string with base64.
|
||||
|
||||
|
@ -91,14 +88,13 @@ def body_encode(s, maxlinelen=76, eol=NL):
|
|||
for i in range(0, len(s), max_unencoded):
|
||||
# BAW: should encode() inherit b2a_base64()'s dubious behavior in
|
||||
# adding a newline to the encoded string?
|
||||
enc = b2a_base64(s[i:i + max_unencoded]).decode("ascii")
|
||||
enc = b2a_base64(s[i : i + max_unencoded]).decode("ascii")
|
||||
if enc.endswith(NL) and eol != NL:
|
||||
enc = enc[:-1] + eol
|
||||
encvec.append(enc)
|
||||
return EMPTYSTRING.join(encvec)
|
||||
|
||||
|
||||
|
||||
def decode(string):
|
||||
"""Decode a raw base64 string, returning a bytes object.
|
||||
|
||||
|
@ -109,7 +105,7 @@ def decode(string):
|
|||
if not string:
|
||||
return bytes()
|
||||
elif isinstance(string, str):
|
||||
return a2b_base64(string.encode('raw-unicode-escape'))
|
||||
return a2b_base64(string.encode("raw-unicode-escape"))
|
||||
else:
|
||||
return a2b_base64(string)
|
||||
|
||||
|
|
|
@ -5,22 +5,21 @@
|
|||
"""Encodings and related functions."""
|
||||
|
||||
__all__ = [
|
||||
'encode_7or8bit',
|
||||
'encode_base64',
|
||||
'encode_noop',
|
||||
'encode_quopri',
|
||||
]
|
||||
"encode_7or8bit",
|
||||
"encode_base64",
|
||||
"encode_noop",
|
||||
"encode_quopri",
|
||||
]
|
||||
|
||||
|
||||
from base64 import encodebytes as _bencode
|
||||
from quopri import encodestring as _encodestring
|
||||
|
||||
|
||||
|
||||
def _qencode(s):
|
||||
enc = _encodestring(s, quotetabs=True)
|
||||
# Must encode spaces, which quopri.encodestring() doesn't do
|
||||
return enc.replace(b' ', b'=20')
|
||||
return enc.replace(b" ", b"=20")
|
||||
|
||||
|
||||
def encode_base64(msg):
|
||||
|
@ -29,12 +28,11 @@ def encode_base64(msg):
|
|||
Also, add an appropriate Content-Transfer-Encoding header.
|
||||
"""
|
||||
orig = msg.get_payload(decode=True)
|
||||
encdata = str(_bencode(orig), 'ascii')
|
||||
encdata = str(_bencode(orig), "ascii")
|
||||
msg.set_payload(encdata)
|
||||
msg['Content-Transfer-Encoding'] = 'base64'
|
||||
msg["Content-Transfer-Encoding"] = "base64"
|
||||
|
||||
|
||||
|
||||
def encode_quopri(msg):
|
||||
"""Encode the message's payload in quoted-printable.
|
||||
|
||||
|
@ -43,36 +41,34 @@ def encode_quopri(msg):
|
|||
orig = msg.get_payload(decode=True)
|
||||
encdata = _qencode(orig)
|
||||
msg.set_payload(encdata)
|
||||
msg['Content-Transfer-Encoding'] = 'quoted-printable'
|
||||
msg["Content-Transfer-Encoding"] = "quoted-printable"
|
||||
|
||||
|
||||
|
||||
def encode_7or8bit(msg):
|
||||
"""Set the Content-Transfer-Encoding header to 7bit or 8bit."""
|
||||
orig = msg.get_payload(decode=True)
|
||||
if orig is None:
|
||||
# There's no payload. For backwards compatibility we use 7bit
|
||||
msg['Content-Transfer-Encoding'] = '7bit'
|
||||
msg["Content-Transfer-Encoding"] = "7bit"
|
||||
return
|
||||
# We play a trick to make this go fast. If encoding/decode to ASCII
|
||||
# succeeds, we know the data must be 7bit, otherwise treat it as 8bit.
|
||||
try:
|
||||
if isinstance(orig, str):
|
||||
orig.encode('ascii')
|
||||
orig.encode("ascii")
|
||||
else:
|
||||
orig.decode('ascii')
|
||||
orig.decode("ascii")
|
||||
except UnicodeError:
|
||||
charset = msg.get_charset()
|
||||
output_cset = charset and charset.output_charset
|
||||
# iso-2022-* is non-ASCII but encodes to a 7-bit representation
|
||||
if output_cset and output_cset.lower().startswith('iso-2022-'):
|
||||
msg['Content-Transfer-Encoding'] = '7bit'
|
||||
if output_cset and output_cset.lower().startswith("iso-2022-"):
|
||||
msg["Content-Transfer-Encoding"] = "7bit"
|
||||
else:
|
||||
msg['Content-Transfer-Encoding'] = '8bit'
|
||||
msg["Content-Transfer-Encoding"] = "8bit"
|
||||
else:
|
||||
msg['Content-Transfer-Encoding'] = '7bit'
|
||||
msg["Content-Transfer-Encoding"] = "7bit"
|
||||
|
||||
|
||||
|
||||
def encode_noop(msg):
|
||||
"""Do nothing."""
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Ładowanie…
Reference in New Issue