Add picoweb installation guide.

pull/12/head
Peter Hinch 2019-05-14 16:48:38 +01:00
rodzic 8953cbf519
commit b0bd198c70
14 zmienionych plików z 1415 dodań i 1 usunięć

94
PICOWEB.md 100644
Wyświetl plik

@ -0,0 +1,94 @@
# Running Picoweb on hardware devices
This has regularly caused dificulty on the forum.
The target hardware is assumed to be running official MicroPython firmware.
This repo aims to clarify the installation process. Paul Sokolovsky's Picoweb
code is unchanged except for the name of the logging library. The demos are
trivially changed to use IP '0.0.0.0' and port 80.
To install on a hardware platform such as ESP32 or Pyboard D it is necessary to
copy this directory and its contents (including subdirectories) to the target.
If using `rshell` on an ESP32 change to this directory, at the `rshell` prompt
issue
```
/my/tree/PicoWeb> rsync . /pyboard
```
This may take some time.
At the REPL connect to the network and determine your IP address
```
>>> import network
>>> w = network.WLAN()
>>> w.ifconfig()
```
issue
```
>>> from picoweb import example_webapp
```
or
```
>>> from picoweb import example_webapp2
```
Then point your browser at the IP address determined above.
Note that some platforms will have `uasyncio` installed as frozen bytecode: in
such cases there is no need to copy the `uasyncio` subdirectory (if you do, it
will be ignored).
# ESP8266
RAM limitations require the use of frozen bytecode, and getting the examples
running is a little more involved. Create a directory on your PC and copy the
contents of this directory to it. Then add the files `inisetup.py`, `_boot.py`
and `flashbdev.py` which may be found in the MicroPython source tree under
`ports/esp8266/modules`. You may also want to add a custom connect module to
simplify connection to your WiFi. Then build the firmware. The script I used
was
```bash
#! /bin/bash
# Test picoweb on ESP8266
DIRECTORY='/home/adminpete/temp/picoweb'
cd /mnt/qnap2/data/Projects/MicroPython/micropython/ports/esp8266
make clean
esptool.py --port /dev/ttyUSB0 erase_flash
if make -j 8 FROZEN_MPY_DIR=$DIRECTORY
then
sleep 1
esptool.py --port /dev/ttyUSB0 --baud 115200 write_flash --flash_size=detect -fm dio 0 build/firmware-combined.bin
sleep 4
rshell -p /dev/ttyUSB0 --buffer-size=30 --editor nano
else
echo Build failure
fi
```
For the demos you will need to make the `example_webapp.py` source file and
`squares.tpl` accessible in the filesystem. The following `rshell` commands,
executed from this directory or the one created above, will make these
available.
```
path/to/repo> mkdir /pyboard/picoweb
path/to/repo> mkdir /pyboard/picoweb/templates
path/to/repo> cp picoweb/example_webapp.py /pyboard/picoweb/
path/to/repo> cp picoweb/templates/squares.tpl /pyboard/picoweb/templates/
```
# Documentation and further examples
See [the PicoWeb docs](https://github.com/pfalcon/picoweb)
Note that to run under official MicroPython, references to `ulogging` in these
demos must be changed to `logging`. You may also want to change IP and port as
above.

94
PicoWeb/logging.py 100644
Wyświetl plik

@ -0,0 +1,94 @@
import sys
CRITICAL = 50
ERROR = 40
WARNING = 30
INFO = 20
DEBUG = 10
NOTSET = 0
_level_dict = {
CRITICAL: "CRIT",
ERROR: "ERROR",
WARNING: "WARN",
INFO: "INFO",
DEBUG: "DEBUG",
}
_stream = sys.stderr
class Logger:
level = NOTSET
def __init__(self, name):
self.name = name
def _level_str(self, level):
l = _level_dict.get(level)
if l is not None:
return l
return "LVL%s" % level
def setLevel(self, level):
self.level = level
def isEnabledFor(self, level):
return level >= (self.level or _level)
def log(self, level, msg, *args):
if level >= (self.level or _level):
_stream.write("%s:%s:" % (self._level_str(level), self.name))
if not args:
print(msg, file=_stream)
else:
print(msg % args, file=_stream)
def debug(self, msg, *args):
self.log(DEBUG, msg, *args)
def info(self, msg, *args):
self.log(INFO, msg, *args)
def warning(self, msg, *args):
self.log(WARNING, msg, *args)
def error(self, msg, *args):
self.log(ERROR, msg, *args)
def critical(self, msg, *args):
self.log(CRITICAL, msg, *args)
def exc(self, e, msg, *args):
self.log(ERROR, msg, *args)
sys.print_exception(e, _stream)
def exception(self, msg, *args):
self.exc(sys.exc_info()[1], msg, *args)
_level = INFO
_loggers = {}
def getLogger(name):
if name in _loggers:
return _loggers[name]
l = Logger(name)
_loggers[name] = l
return l
def info(msg, *args):
getLogger(None).info(msg, *args)
def debug(msg, *args):
getLogger(None).debug(msg, *args)
def basicConfig(level=INFO, filename=None, stream=None, format=None):
global _level, _stream
_level = level
if stream:
_stream = stream
if filename is not None:
print("logging.basicConfig: filename arg is not supported")
if format is not None:
print("logging.basicConfig: format arg is not supported")

Wyświetl plik

@ -0,0 +1,300 @@
# Picoweb web pico-framework for MicroPython
# Copyright (c) 2014-2018 Paul Sokolovsky
# SPDX-License-Identifier: MIT
import sys
import gc
import micropython
import utime
import uio
import ure as re
import uerrno
import uasyncio as asyncio
import pkg_resources
from .utils import parse_qs
def get_mime_type(fname):
# Provide minimal detection of important file
# types to keep browsers happy
if fname.endswith(".html"):
return "text/html"
if fname.endswith(".css"):
return "text/css"
if fname.endswith(".png") or fname.endswith(".jpg"):
return "image"
return "text/plain"
def sendstream(writer, f):
buf = bytearray(64)
while True:
l = f.readinto(buf)
if not l:
break
yield from writer.awrite(buf, 0, l)
def jsonify(writer, dict):
import ujson
yield from start_response(writer, "application/json")
yield from writer.awrite(ujson.dumps(dict))
def start_response(writer, content_type="text/html", status="200", headers=None):
yield from writer.awrite("HTTP/1.0 %s NA\r\n" % status)
yield from writer.awrite("Content-Type: ")
yield from writer.awrite(content_type)
if not headers:
yield from writer.awrite("\r\n\r\n")
return
yield from writer.awrite("\r\n")
if isinstance(headers, bytes) or isinstance(headers, str):
yield from writer.awrite(headers)
else:
for k, v in headers.items():
yield from writer.awrite(k)
yield from writer.awrite(": ")
yield from writer.awrite(v)
yield from writer.awrite("\r\n")
yield from writer.awrite("\r\n")
def http_error(writer, status):
yield from start_response(writer, status=status)
yield from writer.awrite(status)
class HTTPRequest:
def __init__(self):
pass
def read_form_data(self):
size = int(self.headers[b"Content-Length"])
data = yield from self.reader.read(size)
form = parse_qs(data.decode())
self.form = form
def parse_qs(self):
form = parse_qs(self.qs)
self.form = form
class WebApp:
def __init__(self, pkg, routes=None, serve_static=True):
if routes:
self.url_map = routes
else:
self.url_map = []
if pkg and pkg != "__main__":
self.pkg = pkg.split(".", 1)[0]
else:
self.pkg = None
if serve_static:
self.url_map.append((re.compile("^/(static/.+)"), self.handle_static))
self.mounts = []
self.inited = False
# Instantiated lazily
self.template_loader = None
self.headers_mode = "parse"
def parse_headers(self, reader):
headers = {}
while True:
l = yield from reader.readline()
if l == b"\r\n":
break
k, v = l.split(b":", 1)
headers[k] = v.strip()
return headers
def _handle(self, reader, writer):
if self.debug > 1:
micropython.mem_info()
close = True
req = None
try:
request_line = yield from reader.readline()
if request_line == b"":
if self.debug >= 0:
self.log.error("%s: EOF on request start" % reader)
yield from writer.aclose()
return
req = HTTPRequest()
# TODO: bytes vs str
request_line = request_line.decode()
method, path, proto = request_line.split()
if self.debug >= 0:
self.log.info('%.3f %s %s "%s %s"' % (utime.time(), req, writer, method, path))
path = path.split("?", 1)
qs = ""
if len(path) > 1:
qs = path[1]
path = path[0]
#print("================")
#print(req, writer)
#print(req, (method, path, qs, proto), req.headers)
# Find which mounted subapp (if any) should handle this request
app = self
while True:
found = False
for subapp in app.mounts:
root = subapp.url
#print(path, "vs", root)
if path[:len(root)] == root:
app = subapp
found = True
path = path[len(root):]
if not path.startswith("/"):
path = "/" + path
break
if not found:
break
# We initialize apps on demand, when they really get requests
if not app.inited:
app.init()
# Find handler to serve this request in app's url_map
found = False
for e in app.url_map:
pattern = e[0]
handler = e[1]
extra = {}
if len(e) > 2:
extra = e[2]
if path == pattern:
found = True
break
elif not isinstance(pattern, str):
# Anything which is non-string assumed to be a ducktype
# pattern matcher, whose .match() method is called. (Note:
# Django uses .search() instead, but .match() is more
# efficient and we're not exactly compatible with Django
# URL matching anyway.)
m = pattern.match(path)
if m:
req.url_match = m
found = True
break
if not found:
headers_mode = "skip"
else:
headers_mode = extra.get("headers", self.headers_mode)
if headers_mode == "skip":
while True:
l = yield from reader.readline()
if l == b"\r\n":
break
elif headers_mode == "parse":
req.headers = yield from self.parse_headers(reader)
else:
assert headers_mode == "leave"
if found:
req.method = method
req.path = path
req.qs = qs
req.reader = reader
close = yield from handler(req, writer)
else:
yield from start_response(writer, status="404")
yield from writer.awrite("404\r\n")
#print(req, "After response write")
except Exception as e:
if self.debug >= 0:
self.log.exc(e, "%.3f %s %s %r" % (utime.time(), req, writer, e))
if close is not False:
yield from writer.aclose()
if __debug__ and self.debug > 1:
self.log.debug("%.3f %s Finished processing request", utime.time(), req)
def mount(self, url, app):
"Mount a sub-app at the url of current app."
# Inspired by Bottle. It might seem that dispatching to
# subapps would rather be handled by normal routes, but
# arguably, that's less efficient. Taking into account
# that paradigmatically there's difference between handing
# an action and delegating responisibilities to another
# app, Bottle's way was followed.
app.url = url
self.mounts.append(app)
def route(self, url, **kwargs):
def _route(f):
self.url_map.append((url, f, kwargs))
return f
return _route
def add_url_rule(self, url, func, **kwargs):
# Note: this method skips Flask's "endpoint" argument,
# because it's alleged bloat.
self.url_map.append((url, func, kwargs))
def _load_template(self, tmpl_name):
if self.template_loader is None:
import utemplate.source
self.template_loader = utemplate.source.Loader(self.pkg, "templates")
return self.template_loader.load(tmpl_name)
def render_template(self, writer, tmpl_name, args=()):
tmpl = self._load_template(tmpl_name)
for s in tmpl(*args):
yield from writer.awrite(s)
def render_str(self, tmpl_name, args=()):
#TODO: bloat
tmpl = self._load_template(tmpl_name)
return ''.join(tmpl(*args))
def sendfile(self, writer, fname, content_type=None, headers=None):
if not content_type:
content_type = get_mime_type(fname)
try:
with pkg_resources.resource_stream(self.pkg, fname) as f:
yield from start_response(writer, content_type, "200", headers)
yield from sendstream(writer, f)
except OSError as e:
if e.args[0] == uerrno.ENOENT:
yield from http_error(writer, "404")
else:
raise
def handle_static(self, req, resp):
path = req.url_match.group(1)
print(path)
if ".." in path:
yield from http_error(resp, "403")
return
yield from self.sendfile(resp, path)
def init(self):
"""Initialize a web application. This is for overriding by subclasses.
This is good place to connect to/initialize a database, for example."""
self.inited = True
def run(self, host="127.0.0.1", port=8081, debug=False, lazy_init=False, log=None):
if log is None and debug >= 0:
import logging
log = logging.getLogger("picoweb")
if debug > 0:
log.setLevel(logging.DEBUG)
self.log = log
gc.collect()
self.debug = int(debug)
self.init()
if not lazy_init:
for app in self.mounts:
app.init()
loop = asyncio.get_event_loop()
if debug > 0:
print("* Running on http://%s:%s/" % (host, port))
loop.create_task(asyncio.start_server(self._handle, host, port))
loop.run_forever()
loop.close()

Wyświetl plik

@ -0,0 +1,55 @@
#
# This is a picoweb example showing a centralized web page route
# specification (classical Django style).
#
import ure as re
import picoweb
def index(req, resp):
# You can construct an HTTP response completely yourself, having
# a full control of headers sent...
yield from resp.awrite("HTTP/1.0 200 OK\r\n")
yield from resp.awrite("Content-Type: text/html\r\n")
yield from resp.awrite("\r\n")
yield from resp.awrite("I can show you a table of <a href='squares'>squares</a>.<br/>")
yield from resp.awrite("Or my <a href='file'>source</a>.<br/>")
yield from resp.awrite("Or enter /iam/Mickey Mouse after the URL for regexp match.")
def squares(req, resp):
# Or can use a convenience function start_response() (see its source for
# extra params it takes).
yield from picoweb.start_response(resp)
yield from app.render_template(resp, "squares.tpl", (req,))
def hello(req, resp):
yield from picoweb.start_response(resp)
# Here's how you extract matched groups from a regex URI match
yield from resp.awrite("Hello " + req.url_match.group(1))
ROUTES = [
# You can specify exact URI string matches...
("/", index),
("/squares", squares),
("/file", lambda req, resp: (yield from app.sendfile(resp, "example_webapp.py"))),
# ... or match using a regex, the match result available as req.url_match
# for match group extraction in your view.
(re.compile("^/iam/(.+)"), hello),
]
import logging
logging.basicConfig(level=logging.INFO)
#logging.basicConfig(level=logging.DEBUG)
app = picoweb.WebApp(__name__, ROUTES)
# debug values:
# -1 disable all logging
# 0 (False) normal logging: requests and errors
# 1 (True) debug logging
# 2 extra debug logging
app.run(debug=1, host='0.0.0.0', port=80)

Wyświetl plik

@ -0,0 +1,25 @@
#
# This is a picoweb example showing a web page route
# specification using view decorators (Flask style).
#
import picoweb
app = picoweb.WebApp(__name__)
@app.route("/")
def index(req, resp):
yield from picoweb.start_response(resp)
yield from resp.awrite("I can show you a table of <a href='squares'>squares</a>.")
@app.route("/squares")
def squares(req, resp):
yield from picoweb.start_response(resp)
yield from app.render_template(resp, "squares.tpl", (req,))
import logging
logging.basicConfig(level=logging.INFO)
app.run(debug=True, host='0.0.0.0', port=80)

Wyświetl plik

@ -0,0 +1,9 @@
{% args req %}
<html>
Request path: '{{req.path}}'<br>
<table border="1">
{% for i in range(5) %}
<tr><td> {{i}} </td><td> {{"%2d" % i ** 2}} </td></tr>
{% endfor %}
</table>
</html>

Wyświetl plik

@ -0,0 +1,28 @@
def unquote_plus(s):
# TODO: optimize
s = s.replace("+", " ")
arr = s.split("%")
arr2 = [chr(int(x[:2], 16)) + x[2:] for x in arr[1:]]
return arr[0] + "".join(arr2)
def parse_qs(s):
res = {}
if s:
pairs = s.split("&")
for p in pairs:
vals = [unquote_plus(x) for x in p.split("=", 1)]
if len(vals) == 1:
vals.append(True)
old = res.get(vals[0])
if old is not None:
if not isinstance(old, list):
old = [old]
res[vals[0]] = old
old.append(vals[1])
else:
res[vals[0]] = vals[1]
return res
#print(parse_qs("foo"))
#print(parse_qs("fo%41o+bar=+++1"))
#print(parse_qs("foo=1&foo=2"))

Wyświetl plik

@ -0,0 +1,27 @@
import uio
c = {}
def resource_stream(package, resource):
if package not in c:
try:
if package:
p = __import__(package + ".R", None, None, True)
else:
p = __import__("R")
c[package] = p.R
except ImportError:
if package:
p = __import__(package)
d = p.__path__
else:
d = "."
# if d[0] != "/":
# import uos
# d = uos.getcwd() + "/" + d
c[package] = d + "/"
p = c[package]
if isinstance(p, dict):
return uio.BytesIO(p[resource])
return open(p + resource, "rb")

Wyświetl plik

@ -0,0 +1,258 @@
import uerrno
import uselect as select
import usocket as _socket
from uasyncio.core import *
DEBUG = 0
log = None
def set_debug(val):
global DEBUG, log
DEBUG = val
if val:
import logging
log = logging.getLogger("uasyncio")
class PollEventLoop(EventLoop):
def __init__(self, runq_len=16, waitq_len=16):
EventLoop.__init__(self, runq_len, waitq_len)
self.poller = select.poll()
self.objmap = {}
def add_reader(self, sock, cb, *args):
if DEBUG and __debug__:
log.debug("add_reader%s", (sock, cb, args))
if args:
self.poller.register(sock, select.POLLIN)
self.objmap[id(sock)] = (cb, args)
else:
self.poller.register(sock, select.POLLIN)
self.objmap[id(sock)] = cb
def remove_reader(self, sock):
if DEBUG and __debug__:
log.debug("remove_reader(%s)", sock)
self.poller.unregister(sock)
del self.objmap[id(sock)]
def add_writer(self, sock, cb, *args):
if DEBUG and __debug__:
log.debug("add_writer%s", (sock, cb, args))
if args:
self.poller.register(sock, select.POLLOUT)
self.objmap[id(sock)] = (cb, args)
else:
self.poller.register(sock, select.POLLOUT)
self.objmap[id(sock)] = cb
def remove_writer(self, sock):
if DEBUG and __debug__:
log.debug("remove_writer(%s)", sock)
try:
self.poller.unregister(sock)
self.objmap.pop(id(sock), None)
except OSError as e:
# StreamWriter.awrite() first tries to write to a socket,
# and if that succeeds, yield IOWrite may never be called
# for that socket, and it will never be added to poller. So,
# ignore such error.
if e.args[0] != uerrno.ENOENT:
raise
def wait(self, delay):
if DEBUG and __debug__:
log.debug("poll.wait(%d)", delay)
# We need one-shot behavior (second arg of 1 to .poll())
res = self.poller.ipoll(delay, 1)
#log.debug("poll result: %s", res)
# Remove "if res" workaround after
# https://github.com/micropython/micropython/issues/2716 fixed.
if res:
for sock, ev in res:
cb = self.objmap[id(sock)]
if ev & (select.POLLHUP | select.POLLERR):
# These events are returned even if not requested, and
# are sticky, i.e. will be returned again and again.
# If the caller doesn't do proper error handling and
# unregister this sock, we'll busy-loop on it, so we
# as well can unregister it now "just in case".
self.remove_reader(sock)
if DEBUG and __debug__:
log.debug("Calling IO callback: %r", cb)
if isinstance(cb, tuple):
cb[0](*cb[1])
else:
cb.pend_throw(None)
self.call_soon(cb)
class StreamReader:
def __init__(self, polls, ios=None):
if ios is None:
ios = polls
self.polls = polls
self.ios = ios
def read(self, n=-1):
while True:
yield IORead(self.polls)
res = self.ios.read(n)
if res is not None:
break
# This should not happen for real sockets, but can easily
# happen for stream wrappers (ssl, websockets, etc.)
#log.warn("Empty read")
if not res:
yield IOReadDone(self.polls)
return res
def readexactly(self, n):
buf = b""
while n:
yield IORead(self.polls)
res = self.ios.read(n)
assert res is not None
if not res:
yield IOReadDone(self.polls)
break
buf += res
n -= len(res)
return buf
def readline(self):
if DEBUG and __debug__:
log.debug("StreamReader.readline()")
buf = b""
while True:
yield IORead(self.polls)
res = self.ios.readline()
assert res is not None
if not res:
yield IOReadDone(self.polls)
break
buf += res
if buf[-1] == 0x0a:
break
if DEBUG and __debug__:
log.debug("StreamReader.readline(): %s", buf)
return buf
def aclose(self):
yield IOReadDone(self.polls)
self.ios.close()
def __repr__(self):
return "<StreamReader %r %r>" % (self.polls, self.ios)
class StreamWriter:
def __init__(self, s, extra):
self.s = s
self.extra = extra
def awrite(self, buf, off=0, sz=-1):
# This method is called awrite (async write) to not proliferate
# incompatibility with original asyncio. Unlike original asyncio
# whose .write() method is both not a coroutine and guaranteed
# to return immediately (which means it has to buffer all the
# data), this method is a coroutine.
if sz == -1:
sz = len(buf) - off
if DEBUG and __debug__:
log.debug("StreamWriter.awrite(): spooling %d bytes", sz)
while True:
res = self.s.write(buf, off, sz)
# If we spooled everything, return immediately
if res == sz:
if DEBUG and __debug__:
log.debug("StreamWriter.awrite(): completed spooling %d bytes", res)
return
if res is None:
res = 0
if DEBUG and __debug__:
log.debug("StreamWriter.awrite(): spooled partial %d bytes", res)
assert res < sz
off += res
sz -= res
yield IOWrite(self.s)
#assert s2.fileno() == self.s.fileno()
if DEBUG and __debug__:
log.debug("StreamWriter.awrite(): can write more")
# Write piecewise content from iterable (usually, a generator)
def awriteiter(self, iterable):
for buf in iterable:
yield from self.awrite(buf)
def aclose(self):
yield IOWriteDone(self.s)
self.s.close()
def get_extra_info(self, name, default=None):
return self.extra.get(name, default)
def __repr__(self):
return "<StreamWriter %r>" % self.s
def open_connection(host, port, ssl=False):
if DEBUG and __debug__:
log.debug("open_connection(%s, %s)", host, port)
ai = _socket.getaddrinfo(host, port, 0, _socket.SOCK_STREAM)
ai = ai[0]
s = _socket.socket(ai[0], ai[1], ai[2])
s.setblocking(False)
try:
s.connect(ai[-1])
except OSError as e:
if e.args[0] != uerrno.EINPROGRESS:
raise
if DEBUG and __debug__:
log.debug("open_connection: After connect")
yield IOWrite(s)
# if __debug__:
# assert s2.fileno() == s.fileno()
if DEBUG and __debug__:
log.debug("open_connection: After iowait: %s", s)
if ssl:
print("Warning: uasyncio SSL support is alpha")
import ussl
s.setblocking(True)
s2 = ussl.wrap_socket(s)
s.setblocking(False)
return StreamReader(s, s2), StreamWriter(s2, {})
return StreamReader(s), StreamWriter(s, {})
def start_server(client_coro, host, port, backlog=10):
if DEBUG and __debug__:
log.debug("start_server(%s, %s)", host, port)
ai = _socket.getaddrinfo(host, port, 0, _socket.SOCK_STREAM)
ai = ai[0]
s = _socket.socket(ai[0], ai[1], ai[2])
s.setblocking(False)
s.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1)
s.bind(ai[-1])
s.listen(backlog)
while True:
if DEBUG and __debug__:
log.debug("start_server: Before accept")
yield IORead(s)
if DEBUG and __debug__:
log.debug("start_server: After iowait")
s2, client_addr = s.accept()
s2.setblocking(False)
if DEBUG and __debug__:
log.debug("start_server: After accept: %s", s2)
extra = {"peername": client_addr}
yield client_coro(StreamReader(s2), StreamWriter(s2, extra))
import uasyncio.core
uasyncio.core._event_loop_class = PollEventLoop

Wyświetl plik

@ -0,0 +1,315 @@
import utime as time
import utimeq
import ucollections
type_gen = type((lambda: (yield))())
DEBUG = 0
log = None
def set_debug(val):
global DEBUG, log
DEBUG = val
if val:
import logging
log = logging.getLogger("uasyncio.core")
class CancelledError(Exception):
pass
class TimeoutError(CancelledError):
pass
class EventLoop:
def __init__(self, runq_len=16, waitq_len=16):
self.runq = ucollections.deque((), runq_len, True)
self.waitq = utimeq.utimeq(waitq_len)
# Current task being run. Task is a top-level coroutine scheduled
# in the event loop (sub-coroutines executed transparently by
# yield from/await, event loop "doesn't see" them).
self.cur_task = None
def time(self):
return time.ticks_ms()
def create_task(self, coro):
# CPython 3.4.2
self.call_later_ms(0, coro)
# CPython asyncio incompatibility: we don't return Task object
def call_soon(self, callback, *args):
if __debug__ and DEBUG:
log.debug("Scheduling in runq: %s", (callback, args))
self.runq.append(callback)
if not isinstance(callback, type_gen):
self.runq.append(args)
def call_later(self, delay, callback, *args):
self.call_at_(time.ticks_add(self.time(), int(delay * 1000)), callback, args)
def call_later_ms(self, delay, callback, *args):
if not delay:
return self.call_soon(callback, *args)
self.call_at_(time.ticks_add(self.time(), delay), callback, args)
def call_at_(self, time, callback, args=()):
if __debug__ and DEBUG:
log.debug("Scheduling in waitq: %s", (time, callback, args))
self.waitq.push(time, callback, args)
def wait(self, delay):
# Default wait implementation, to be overriden in subclasses
# with IO scheduling
if __debug__ and DEBUG:
log.debug("Sleeping for: %s", delay)
time.sleep_ms(delay)
def run_forever(self):
cur_task = [0, 0, 0]
while True:
# Expire entries in waitq and move them to runq
tnow = self.time()
while self.waitq:
t = self.waitq.peektime()
delay = time.ticks_diff(t, tnow)
if delay > 0:
break
self.waitq.pop(cur_task)
if __debug__ and DEBUG:
log.debug("Moving from waitq to runq: %s", cur_task[1])
self.call_soon(cur_task[1], *cur_task[2])
# Process runq
l = len(self.runq)
if __debug__ and DEBUG:
log.debug("Entries in runq: %d", l)
while l:
cb = self.runq.popleft()
l -= 1
args = ()
if not isinstance(cb, type_gen):
args = self.runq.popleft()
l -= 1
if __debug__ and DEBUG:
log.info("Next callback to run: %s", (cb, args))
cb(*args)
continue
if __debug__ and DEBUG:
log.info("Next coroutine to run: %s", (cb, args))
self.cur_task = cb
delay = 0
try:
if args is ():
ret = next(cb)
else:
ret = cb.send(*args)
if __debug__ and DEBUG:
log.info("Coroutine %s yield result: %s", cb, ret)
if isinstance(ret, SysCall1):
arg = ret.arg
if isinstance(ret, SleepMs):
delay = arg
elif isinstance(ret, IORead):
cb.pend_throw(False)
self.add_reader(arg, cb)
continue
elif isinstance(ret, IOWrite):
cb.pend_throw(False)
self.add_writer(arg, cb)
continue
elif isinstance(ret, IOReadDone):
self.remove_reader(arg)
elif isinstance(ret, IOWriteDone):
self.remove_writer(arg)
elif isinstance(ret, StopLoop):
return arg
else:
assert False, "Unknown syscall yielded: %r (of type %r)" % (ret, type(ret))
elif isinstance(ret, type_gen):
self.call_soon(ret)
elif isinstance(ret, int):
# Delay
delay = ret
elif ret is None:
# Just reschedule
pass
elif ret is False:
# Don't reschedule
continue
else:
assert False, "Unsupported coroutine yield value: %r (of type %r)" % (ret, type(ret))
except StopIteration as e:
if __debug__ and DEBUG:
log.debug("Coroutine finished: %s", cb)
continue
except CancelledError as e:
if __debug__ and DEBUG:
log.debug("Coroutine cancelled: %s", cb)
continue
# Currently all syscalls don't return anything, so we don't
# need to feed anything to the next invocation of coroutine.
# If that changes, need to pass that value below.
if delay:
self.call_later_ms(delay, cb)
else:
self.call_soon(cb)
# Wait until next waitq task or I/O availability
delay = 0
if not self.runq:
delay = -1
if self.waitq:
tnow = self.time()
t = self.waitq.peektime()
delay = time.ticks_diff(t, tnow)
if delay < 0:
delay = 0
self.wait(delay)
def run_until_complete(self, coro):
def _run_and_stop():
yield from coro
yield StopLoop(0)
self.call_soon(_run_and_stop())
self.run_forever()
def stop(self):
self.call_soon((lambda: (yield StopLoop(0)))())
def close(self):
pass
class SysCall:
def __init__(self, *args):
self.args = args
def handle(self):
raise NotImplementedError
# Optimized syscall with 1 arg
class SysCall1(SysCall):
def __init__(self, arg):
self.arg = arg
class StopLoop(SysCall1):
pass
class IORead(SysCall1):
pass
class IOWrite(SysCall1):
pass
class IOReadDone(SysCall1):
pass
class IOWriteDone(SysCall1):
pass
_event_loop = None
_event_loop_class = EventLoop
def get_event_loop(runq_len=16, waitq_len=16):
global _event_loop
if _event_loop is None:
_event_loop = _event_loop_class(runq_len, waitq_len)
return _event_loop
def sleep(secs):
yield int(secs * 1000)
# Implementation of sleep_ms awaitable with zero heap memory usage
class SleepMs(SysCall1):
def __init__(self):
self.v = None
self.arg = None
def __call__(self, arg):
self.v = arg
#print("__call__")
return self
def __iter__(self):
#print("__iter__")
return self
def __next__(self):
if self.v is not None:
#print("__next__ syscall enter")
self.arg = self.v
self.v = None
return self
#print("__next__ syscall exit")
_stop_iter.__traceback__ = None
raise _stop_iter
_stop_iter = StopIteration()
sleep_ms = SleepMs()
def cancel(coro):
prev = coro.pend_throw(CancelledError())
if prev is False:
_event_loop.call_soon(coro)
class TimeoutObj:
def __init__(self, coro):
self.coro = coro
def wait_for_ms(coro, timeout):
def waiter(coro, timeout_obj):
res = yield from coro
if __debug__ and DEBUG:
log.debug("waiter: cancelling %s", timeout_obj)
timeout_obj.coro = None
return res
def timeout_func(timeout_obj):
if timeout_obj.coro:
if __debug__ and DEBUG:
log.debug("timeout_func: cancelling %s", timeout_obj.coro)
prev = timeout_obj.coro.pend_throw(TimeoutError())
#print("prev pend", prev)
if prev is False:
_event_loop.call_soon(timeout_obj.coro)
timeout_obj = TimeoutObj(_event_loop.cur_task)
_event_loop.call_later_ms(timeout, timeout_func, timeout_obj)
return (yield from waiter(coro, timeout_obj))
def wait_for(coro, timeout):
return wait_for_ms(coro, int(timeout * 1000))
def coroutine(f):
return f
#
# The functions below are deprecated in uasyncio, and provided only
# for compatibility with CPython asyncio
#
def ensure_future(coro, loop=_event_loop):
_event_loop.call_soon(coro)
# CPython asyncio incompatibility: we don't return Task object
return coro
# CPython asyncio incompatibility: Task is a function, not a class (for efficiency)
def Task(coro, loop=_event_loop):
# Same as async()
_event_loop.call_soon(coro)

Wyświetl plik

@ -0,0 +1,14 @@
class Loader:
def __init__(self, pkg, dir):
if dir == ".":
dir = ""
else:
dir = dir.replace("/", ".") + "."
if pkg and pkg != "__main__":
dir = pkg + "." + dir
self.p = dir
def load(self, name):
name = name.replace(".", "_")
return __import__(self.p + name, None, None, (name,)).render

Wyświetl plik

@ -0,0 +1,190 @@
# os module is loaded on demand
#import os
from . import compiled
class Compiler:
START_CHAR = "{"
STMNT = "%"
STMNT_END = "%}"
EXPR = "{"
EXPR_END = "}}"
def __init__(self, file_in, file_out, indent=0, seq=0, loader=None):
self.file_in = file_in
self.file_out = file_out
self.loader = loader
self.seq = seq
self._indent = indent
self.stack = []
self.in_literal = False
self.flushed_header = False
self.args = "*a, **d"
def indent(self, adjust=0):
if not self.flushed_header:
self.flushed_header = True
self.indent()
self.file_out.write("def render%s(%s):\n" % (str(self.seq) if self.seq else "", self.args))
self.stack.append("def")
self.file_out.write(" " * (len(self.stack) + self._indent + adjust))
def literal(self, s):
if not s:
return
if not self.in_literal:
self.indent()
self.file_out.write('yield """')
self.in_literal = True
self.file_out.write(s.replace('"', '\\"'))
def close_literal(self):
if self.in_literal:
self.file_out.write('"""\n')
self.in_literal = False
def render_expr(self, e):
self.indent()
self.file_out.write('yield str(' + e + ')\n')
def parse_statement(self, stmt):
tokens = stmt.split(None, 1)
if tokens[0] == "args":
if len(tokens) > 1:
self.args = tokens[1]
else:
self.args = ""
elif tokens[0] == "set":
self.indent()
self.file_out.write(stmt[3:].strip() + "\n")
elif tokens[0] == "include":
if not self.flushed_header:
# If there was no other output, we still need a header now
self.indent()
tokens = tokens[1].split(None, 1)
args = ""
if len(tokens) > 1:
args = tokens[1]
if tokens[0][0] == "{":
self.indent()
# "1" as fromlist param is uPy hack
self.file_out.write('_ = __import__(%s.replace(".", "_"), None, None, 1)\n' % tokens[0][2:-2])
self.indent()
self.file_out.write("yield from _.render(%s)\n" % args)
return
with self.loader.input_open(tokens[0][1:-1]) as inc:
self.seq += 1
c = Compiler(inc, self.file_out, len(self.stack) + self._indent, self.seq)
inc_id = self.seq
self.seq = c.compile()
self.indent()
self.file_out.write("yield from render%d(%s)\n" % (inc_id, args))
elif len(tokens) > 1:
if tokens[0] == "elif":
assert self.stack[-1] == "if"
self.indent(-1)
self.file_out.write(stmt + ":\n")
else:
self.indent()
self.file_out.write(stmt + ":\n")
self.stack.append(tokens[0])
else:
if stmt.startswith("end"):
assert self.stack[-1] == stmt[3:]
self.stack.pop(-1)
elif stmt == "else":
assert self.stack[-1] == "if"
self.indent(-1)
self.file_out.write("else:\n")
else:
assert False
def parse_line(self, l):
while l:
start = l.find(self.START_CHAR)
if start == -1:
self.literal(l)
return
self.literal(l[:start])
self.close_literal()
sel = l[start + 1]
#print("*%s=%s=" % (sel, EXPR))
if sel == self.STMNT:
end = l.find(self.STMNT_END)
assert end > 0
stmt = l[start + len(self.START_CHAR + self.STMNT):end].strip()
self.parse_statement(stmt)
end += len(self.STMNT_END)
l = l[end:]
if not self.in_literal and l == "\n":
break
elif sel == self.EXPR:
# print("EXPR")
end = l.find(self.EXPR_END)
assert end > 0
expr = l[start + len(self.START_CHAR + self.EXPR):end].strip()
self.render_expr(expr)
end += len(self.EXPR_END)
l = l[end:]
else:
self.literal(l[start])
l = l[start + 1:]
def header(self):
self.file_out.write("# Autogenerated file\n")
def compile(self):
self.header()
for l in self.file_in:
self.parse_line(l)
self.close_literal()
return self.seq
class Loader(compiled.Loader):
def __init__(self, pkg, dir):
super().__init__(pkg, dir)
self.dir = dir
if pkg == "__main__":
# if pkg isn't really a package, don't bother to use it
# it means we're running from "filesystem directory", not
# from a package.
pkg = None
self.pkg_path = ""
if pkg:
p = __import__(pkg)
if isinstance(p.__path__, str):
# uPy
self.pkg_path = p.__path__
else:
# CPy
self.pkg_path = p.__path__[0]
self.pkg_path += "/"
def input_open(self, template):
path = self.pkg_path + self.dir + "/" + template
return open(path)
def compiled_path(self, template):
return self.dir + "/" + template.replace(".", "_") + ".py"
def load(self, name):
try:
return super().load(name)
except (OSError, ImportError):
pass
compiled_path = self.pkg_path + self.compiled_path(name)
f_in = self.input_open(name)
f_out = open(compiled_path, "w")
c = Compiler(f_in, f_out, loader=self)
c.compile()
f_in.close()
f_out.close()
return super().load(name)

Wyświetl plik

@ -13,6 +13,11 @@ utilities for users of official MicroPython firmware to simplify installation.
Scripts for building MicroPython for various target hardware types and for
updating your local source. See [docs](./fastbuild/README.md)
# PicoWeb
[Easy installation](./PICOWEB.md) guide. Simplify installing this on
MicroPython hardware platforms under official MicroPython firmware.
# SSD1306
A means of rendering multiple larger fonts to the SSD1306 OLED display. The

Wyświetl plik

@ -1,4 +1,4 @@
MY_ID = '2\n'
#_SERVER = '192.168.0.35' # Laptop
SERVER = '192.168.0.33' # Pi
SERVER = '192.168.0.10' # Pi
PORT = 8123