From 8aa7fb473bfbe474af47460945ecbbe650e57196 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81d=C3=A1m=20Kov=C3=A1cs?= Date: Fri, 10 Nov 2023 15:09:19 +0100 Subject: [PATCH] Micropython project base --- .gitignore | 161 ++ .micropico | 3 + .vscode/Pico-W-Stub/__builtins__.pyi | 27 + .vscode/Pico-W-Stub/_asyncio.pyi | 11 + .vscode/Pico-W-Stub/_boot.pyi | 4 + .vscode/Pico-W-Stub/_boot_fat.pyi | 4 + .vscode/Pico-W-Stub/_onewire.pyi | 8 + .vscode/Pico-W-Stub/_rp2.pyi | 42 + .vscode/Pico-W-Stub/_thread.pyi | 25 + .vscode/Pico-W-Stub/aioble/__init__.pyi | 15 + .vscode/Pico-W-Stub/aioble/central.pyi | 71 + .vscode/Pico-W-Stub/aioble/client.pyi | 100 + .vscode/Pico-W-Stub/aioble/core.pyi | 23 + .vscode/Pico-W-Stub/aioble/device.pyi | 62 + .vscode/Pico-W-Stub/aioble/l2cap.pyi | 39 + .vscode/Pico-W-Stub/aioble/peripheral.pyi | 43 + .vscode/Pico-W-Stub/aioble/security.pyi | 26 + .vscode/Pico-W-Stub/aioble/server.pyi | 100 + .vscode/Pico-W-Stub/array.pyi | 33 + .vscode/Pico-W-Stub/asyncio/__init__.pyi | 7 + .vscode/Pico-W-Stub/asyncio/core.pyi | 52 + .vscode/Pico-W-Stub/asyncio/event.pyi | 21 + .vscode/Pico-W-Stub/asyncio/funcs.pyi | 13 + .vscode/Pico-W-Stub/asyncio/lock.pyi | 13 + .vscode/Pico-W-Stub/asyncio/stream.pyi | 36 + .vscode/Pico-W-Stub/binascii.pyi | 46 + .vscode/Pico-W-Stub/bluetooth.pyi | 592 +++++ .vscode/Pico-W-Stub/cmath.pyi | 72 + .vscode/Pico-W-Stub/collections.pyi | 105 + .vscode/Pico-W-Stub/cryptolib.pyi | 42 + .vscode/Pico-W-Stub/deflate.pyi | 76 + .vscode/Pico-W-Stub/dht.pyi | 15 + .vscode/Pico-W-Stub/ds18x20.pyi | 15 + .vscode/Pico-W-Stub/errno.pyi | 36 + .vscode/Pico-W-Stub/framebuf.pyi | 142 ++ .vscode/Pico-W-Stub/gc.pyi | 75 + .vscode/Pico-W-Stub/hashlib.pyi | 45 + .vscode/Pico-W-Stub/heapq.pyi | 35 + .vscode/Pico-W-Stub/io.pyi | 64 + .vscode/Pico-W-Stub/json.pyi | 47 + .vscode/Pico-W-Stub/lwip.pyi | 39 + .vscode/Pico-W-Stub/machine.pyi | 1193 ++++++++++ .vscode/Pico-W-Stub/math.pyi | 257 +++ .vscode/Pico-W-Stub/micropython.pyi | 190 ++ .../INSTALLER | 1 + .../LICENSE.md | 22 + .../METADATA | 67 + .../RECORD | 99 + .../REQUESTED | 0 .../WHEEL | 4 + .../INSTALLER | 1 + .../LICENSE.md | 239 ++ .../METADATA | 39 + .../RECORD | 68 + .../WHEEL | 4 + .vscode/Pico-W-Stub/mip/__init__.pyi | 15 + .vscode/Pico-W-Stub/neopixel.pyi | 15 + .vscode/Pico-W-Stub/network.pyi | 184 ++ .vscode/Pico-W-Stub/ntptime.pyi | 5 + .vscode/Pico-W-Stub/onewire.pyi | 21 + .vscode/Pico-W-Stub/os.pyi | 253 +++ .vscode/Pico-W-Stub/platform.pyi | 43 + .vscode/Pico-W-Stub/random.pyi | 84 + .vscode/Pico-W-Stub/requests.pyi | 16 + .vscode/Pico-W-Stub/requests/__init__.pyi | 31 + .vscode/Pico-W-Stub/rp2.pyi | 62 + .vscode/Pico-W-Stub/select.pyi | 103 + .vscode/Pico-W-Stub/socket.pyi | 271 +++ .vscode/Pico-W-Stub/ssl.pyi | 74 + .vscode/Pico-W-Stub/stdlib/__future__.pyi | 36 + .vscode/Pico-W-Stub/stdlib/_ast.pyi | 573 +++++ .vscode/Pico-W-Stub/stdlib/_codecs.pyi | 136 ++ .../Pico-W-Stub/stdlib/_collections_abc.pyi | 81 + .vscode/Pico-W-Stub/stdlib/_decimal.pyi | 281 +++ .../Pico-W-Stub/stdlib/_typeshed/__init__.pyi | 318 +++ .../Pico-W-Stub/stdlib/_typeshed/dbapi.pyi | 37 + .vscode/Pico-W-Stub/stdlib/_typeshed/wsgi.pyi | 44 + .vscode/Pico-W-Stub/stdlib/_typeshed/xml.pyi | 9 + .vscode/Pico-W-Stub/stdlib/abc.pyi | 57 + .../Pico-W-Stub/stdlib/asyncio/__init__.pyi | 45 + .../stdlib/asyncio/base_events.pyi | 521 +++++ .../stdlib/asyncio/base_futures.pyi | 20 + .../Pico-W-Stub/stdlib/asyncio/base_tasks.pyi | 9 + .../Pico-W-Stub/stdlib/asyncio/constants.pyi | 20 + .../Pico-W-Stub/stdlib/asyncio/coroutines.pyi | 28 + .vscode/Pico-W-Stub/stdlib/asyncio/events.pyi | 687 ++++++ .../Pico-W-Stub/stdlib/asyncio/exceptions.pyi | 38 + .../stdlib/asyncio/format_helpers.pyi | 20 + .../Pico-W-Stub/stdlib/asyncio/futures.pyi | 66 + .vscode/Pico-W-Stub/stdlib/asyncio/locks.pyi | 116 + .vscode/Pico-W-Stub/stdlib/asyncio/log.pyi | 3 + .vscode/Pico-W-Stub/stdlib/asyncio/mixins.pyi | 10 + .../stdlib/asyncio/proactor_events.pyi | 74 + .../Pico-W-Stub/stdlib/asyncio/protocols.pyi | 34 + .vscode/Pico-W-Stub/stdlib/asyncio/queues.pyi | 40 + .../Pico-W-Stub/stdlib/asyncio/runners.pyi | 35 + .../stdlib/asyncio/selector_events.pyi | 8 + .../Pico-W-Stub/stdlib/asyncio/sslproto.pyi | 176 ++ .../Pico-W-Stub/stdlib/asyncio/staggered.pyi | 10 + .../Pico-W-Stub/stdlib/asyncio/streams.pyi | 179 ++ .../Pico-W-Stub/stdlib/asyncio/taskgroups.pyi | 20 + .vscode/Pico-W-Stub/stdlib/asyncio/tasks.pyi | 348 +++ .../Pico-W-Stub/stdlib/asyncio/threads.pyi | 9 + .../Pico-W-Stub/stdlib/asyncio/timeouts.pyi | 18 + .../Pico-W-Stub/stdlib/asyncio/transports.pyi | 47 + .vscode/Pico-W-Stub/stdlib/asyncio/trsock.pyi | 122 + .../stdlib/asyncio/unix_events.pyi | 127 ++ .vscode/Pico-W-Stub/stdlib/builtins.pyi | 1958 +++++++++++++++++ .vscode/Pico-W-Stub/stdlib/codecs.pyi | 277 +++ .../stdlib/collections/__init__.pyi | 432 ++++ .../Pico-W-Stub/stdlib/collections/abc.pyi | 2 + .vscode/Pico-W-Stub/stdlib/contextlib.pyi | 207 ++ .vscode/Pico-W-Stub/stdlib/contextvars.pyi | 70 + .vscode/Pico-W-Stub/stdlib/dataclasses.pyi | 322 +++ .vscode/Pico-W-Stub/stdlib/decimal.pyi | 2 + .vscode/Pico-W-Stub/stdlib/enum.pyi | 299 +++ .vscode/Pico-W-Stub/stdlib/fractions.pyi | 162 ++ .vscode/Pico-W-Stub/stdlib/functools.pyi | 222 ++ .vscode/Pico-W-Stub/stdlib/io.pyi | 196 ++ .vscode/Pico-W-Stub/stdlib/numbers.pyi | 129 ++ .vscode/Pico-W-Stub/stdlib/os/__init__.pyi | 1132 ++++++++++ .vscode/Pico-W-Stub/stdlib/queue.pyi | 58 + .vscode/Pico-W-Stub/stdlib/re.pyi | 270 +++ .vscode/Pico-W-Stub/stdlib/selectors.pyi | 73 + .vscode/Pico-W-Stub/stdlib/socket.pyi | 831 +++++++ .vscode/Pico-W-Stub/stdlib/sre_compile.pyi | 11 + .vscode/Pico-W-Stub/stdlib/sre_constants.pyi | 130 ++ .vscode/Pico-W-Stub/stdlib/sre_parse.pyi | 125 ++ .vscode/Pico-W-Stub/stdlib/sys.pyi | 369 ++++ .vscode/Pico-W-Stub/stdlib/types.pyi | 631 ++++++ .vscode/Pico-W-Stub/stdlib/typing.pyi | 840 +++++++ .../Pico-W-Stub/stdlib/typing_extensions.pyi | 495 +++++ .vscode/Pico-W-Stub/struct.pyi | 93 + .vscode/Pico-W-Stub/sys.pyi | 49 + .vscode/Pico-W-Stub/time.pyi | 290 +++ .vscode/Pico-W-Stub/uarray.pyi | 33 + .vscode/Pico-W-Stub/uasyncio.pyi | 1 + .vscode/Pico-W-Stub/uasyncio/__init__.pyi | 45 + .vscode/Pico-W-Stub/uasyncio/core.pyi | 25 + .vscode/Pico-W-Stub/uasyncio/event.pyi | 15 + .vscode/Pico-W-Stub/uasyncio/funcs.pyi | 6 + .vscode/Pico-W-Stub/uasyncio/lock.pyi | 7 + .vscode/Pico-W-Stub/uasyncio/stream.pyi | 56 + .vscode/Pico-W-Stub/ubinascii.pyi | 46 + .vscode/Pico-W-Stub/ubluetooth.pyi | 592 +++++ .vscode/Pico-W-Stub/ucollections.pyi | 105 + .vscode/Pico-W-Stub/ucryptolib.pyi | 42 + .vscode/Pico-W-Stub/uctypes.pyi | 88 + .vscode/Pico-W-Stub/uerrno.pyi | 36 + .vscode/Pico-W-Stub/uhashlib.pyi | 45 + .vscode/Pico-W-Stub/uheapq.pyi | 35 + .vscode/Pico-W-Stub/uio.pyi | 64 + .vscode/Pico-W-Stub/ujson.pyi | 47 + .vscode/Pico-W-Stub/umachine.pyi | 1193 ++++++++++ .vscode/Pico-W-Stub/uos.pyi | 253 +++ .vscode/Pico-W-Stub/uplatform.pyi | 43 + .vscode/Pico-W-Stub/urandom.pyi | 84 + .vscode/Pico-W-Stub/ure.pyi | 6 + .vscode/Pico-W-Stub/urequests.pyi | 1 + .vscode/Pico-W-Stub/uselect.pyi | 103 + .vscode/Pico-W-Stub/usocket.pyi | 271 +++ .vscode/Pico-W-Stub/ussl.pyi | 74 + .vscode/Pico-W-Stub/ustruct.pyi | 93 + .vscode/Pico-W-Stub/usys.pyi | 49 + .vscode/Pico-W-Stub/utime.pyi | 290 +++ .vscode/Pico-W-Stub/uwebsocket.pyi | 10 + .vscode/Pico-W-Stub/webrepl.pyi | 15 + .vscode/Pico-W-Stub/webrepl_setup.pyi | 10 + .vscode/Pico-W-Stub/websocket.pyi | 10 + .vscode/extensions.json | 8 + .vscode/settings.json | 16 + 171 files changed, 23640 insertions(+) create mode 100644 .gitignore create mode 100644 .micropico create mode 100644 .vscode/Pico-W-Stub/__builtins__.pyi create mode 100644 .vscode/Pico-W-Stub/_asyncio.pyi create mode 100644 .vscode/Pico-W-Stub/_boot.pyi create mode 100644 .vscode/Pico-W-Stub/_boot_fat.pyi create mode 100644 .vscode/Pico-W-Stub/_onewire.pyi create mode 100644 .vscode/Pico-W-Stub/_rp2.pyi create mode 100644 .vscode/Pico-W-Stub/_thread.pyi create mode 100644 .vscode/Pico-W-Stub/aioble/__init__.pyi create mode 100644 .vscode/Pico-W-Stub/aioble/central.pyi create mode 100644 .vscode/Pico-W-Stub/aioble/client.pyi create mode 100644 .vscode/Pico-W-Stub/aioble/core.pyi create mode 100644 .vscode/Pico-W-Stub/aioble/device.pyi create mode 100644 .vscode/Pico-W-Stub/aioble/l2cap.pyi create mode 100644 .vscode/Pico-W-Stub/aioble/peripheral.pyi create mode 100644 .vscode/Pico-W-Stub/aioble/security.pyi create mode 100644 .vscode/Pico-W-Stub/aioble/server.pyi create mode 100644 .vscode/Pico-W-Stub/array.pyi create mode 100644 .vscode/Pico-W-Stub/asyncio/__init__.pyi create mode 100644 .vscode/Pico-W-Stub/asyncio/core.pyi create mode 100644 .vscode/Pico-W-Stub/asyncio/event.pyi create mode 100644 .vscode/Pico-W-Stub/asyncio/funcs.pyi create mode 100644 .vscode/Pico-W-Stub/asyncio/lock.pyi create mode 100644 .vscode/Pico-W-Stub/asyncio/stream.pyi create mode 100644 .vscode/Pico-W-Stub/binascii.pyi create mode 100644 .vscode/Pico-W-Stub/bluetooth.pyi create mode 100644 .vscode/Pico-W-Stub/cmath.pyi create mode 100644 .vscode/Pico-W-Stub/collections.pyi create mode 100644 .vscode/Pico-W-Stub/cryptolib.pyi create mode 100644 .vscode/Pico-W-Stub/deflate.pyi create mode 100644 .vscode/Pico-W-Stub/dht.pyi create mode 100644 .vscode/Pico-W-Stub/ds18x20.pyi create mode 100644 .vscode/Pico-W-Stub/errno.pyi create mode 100644 .vscode/Pico-W-Stub/framebuf.pyi create mode 100644 .vscode/Pico-W-Stub/gc.pyi create mode 100644 .vscode/Pico-W-Stub/hashlib.pyi create mode 100644 .vscode/Pico-W-Stub/heapq.pyi create mode 100644 .vscode/Pico-W-Stub/io.pyi create mode 100644 .vscode/Pico-W-Stub/json.pyi create mode 100644 .vscode/Pico-W-Stub/lwip.pyi create mode 100644 .vscode/Pico-W-Stub/machine.pyi create mode 100644 .vscode/Pico-W-Stub/math.pyi create mode 100644 .vscode/Pico-W-Stub/micropython.pyi create mode 100644 .vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/INSTALLER create mode 100644 .vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/LICENSE.md create mode 100644 .vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/METADATA create mode 100644 .vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/RECORD create mode 100644 .vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/REQUESTED create mode 100644 .vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/WHEEL create mode 100644 .vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/INSTALLER create mode 100644 .vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/LICENSE.md create mode 100644 .vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/METADATA create mode 100644 .vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/RECORD create mode 100644 .vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/WHEEL create mode 100644 .vscode/Pico-W-Stub/mip/__init__.pyi create mode 100644 .vscode/Pico-W-Stub/neopixel.pyi create mode 100644 .vscode/Pico-W-Stub/network.pyi create mode 100644 .vscode/Pico-W-Stub/ntptime.pyi create mode 100644 .vscode/Pico-W-Stub/onewire.pyi create mode 100644 .vscode/Pico-W-Stub/os.pyi create mode 100644 .vscode/Pico-W-Stub/platform.pyi create mode 100644 .vscode/Pico-W-Stub/random.pyi create mode 100644 .vscode/Pico-W-Stub/requests.pyi create mode 100644 .vscode/Pico-W-Stub/requests/__init__.pyi create mode 100644 .vscode/Pico-W-Stub/rp2.pyi create mode 100644 .vscode/Pico-W-Stub/select.pyi create mode 100644 .vscode/Pico-W-Stub/socket.pyi create mode 100644 .vscode/Pico-W-Stub/ssl.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/__future__.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/_ast.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/_codecs.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/_collections_abc.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/_decimal.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/_typeshed/__init__.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/_typeshed/dbapi.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/_typeshed/wsgi.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/_typeshed/xml.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/abc.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/__init__.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/base_events.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/base_futures.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/base_tasks.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/constants.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/coroutines.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/events.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/exceptions.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/format_helpers.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/futures.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/locks.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/log.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/mixins.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/proactor_events.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/protocols.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/queues.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/runners.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/selector_events.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/sslproto.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/staggered.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/streams.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/taskgroups.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/tasks.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/threads.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/timeouts.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/transports.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/trsock.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/asyncio/unix_events.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/builtins.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/codecs.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/collections/__init__.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/collections/abc.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/contextlib.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/contextvars.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/dataclasses.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/decimal.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/enum.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/fractions.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/functools.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/io.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/numbers.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/os/__init__.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/queue.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/re.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/selectors.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/socket.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/sre_compile.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/sre_constants.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/sre_parse.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/sys.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/types.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/typing.pyi create mode 100644 .vscode/Pico-W-Stub/stdlib/typing_extensions.pyi create mode 100644 .vscode/Pico-W-Stub/struct.pyi create mode 100644 .vscode/Pico-W-Stub/sys.pyi create mode 100644 .vscode/Pico-W-Stub/time.pyi create mode 100644 .vscode/Pico-W-Stub/uarray.pyi create mode 100644 .vscode/Pico-W-Stub/uasyncio.pyi create mode 100644 .vscode/Pico-W-Stub/uasyncio/__init__.pyi create mode 100644 .vscode/Pico-W-Stub/uasyncio/core.pyi create mode 100644 .vscode/Pico-W-Stub/uasyncio/event.pyi create mode 100644 .vscode/Pico-W-Stub/uasyncio/funcs.pyi create mode 100644 .vscode/Pico-W-Stub/uasyncio/lock.pyi create mode 100644 .vscode/Pico-W-Stub/uasyncio/stream.pyi create mode 100644 .vscode/Pico-W-Stub/ubinascii.pyi create mode 100644 .vscode/Pico-W-Stub/ubluetooth.pyi create mode 100644 .vscode/Pico-W-Stub/ucollections.pyi create mode 100644 .vscode/Pico-W-Stub/ucryptolib.pyi create mode 100644 .vscode/Pico-W-Stub/uctypes.pyi create mode 100644 .vscode/Pico-W-Stub/uerrno.pyi create mode 100644 .vscode/Pico-W-Stub/uhashlib.pyi create mode 100644 .vscode/Pico-W-Stub/uheapq.pyi create mode 100644 .vscode/Pico-W-Stub/uio.pyi create mode 100644 .vscode/Pico-W-Stub/ujson.pyi create mode 100644 .vscode/Pico-W-Stub/umachine.pyi create mode 100644 .vscode/Pico-W-Stub/uos.pyi create mode 100644 .vscode/Pico-W-Stub/uplatform.pyi create mode 100644 .vscode/Pico-W-Stub/urandom.pyi create mode 100644 .vscode/Pico-W-Stub/ure.pyi create mode 100644 .vscode/Pico-W-Stub/urequests.pyi create mode 100644 .vscode/Pico-W-Stub/uselect.pyi create mode 100644 .vscode/Pico-W-Stub/usocket.pyi create mode 100644 .vscode/Pico-W-Stub/ussl.pyi create mode 100644 .vscode/Pico-W-Stub/ustruct.pyi create mode 100644 .vscode/Pico-W-Stub/usys.pyi create mode 100644 .vscode/Pico-W-Stub/utime.pyi create mode 100644 .vscode/Pico-W-Stub/uwebsocket.pyi create mode 100644 .vscode/Pico-W-Stub/webrepl.pyi create mode 100644 .vscode/Pico-W-Stub/webrepl_setup.pyi create mode 100644 .vscode/Pico-W-Stub/websocket.pyi create mode 100644 .vscode/extensions.json create mode 100644 .vscode/settings.json diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..25f2c25 --- /dev/null +++ b/.gitignore @@ -0,0 +1,161 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ +secrets.py diff --git a/.micropico b/.micropico new file mode 100644 index 0000000..3de3977 --- /dev/null +++ b/.micropico @@ -0,0 +1,3 @@ +{ + "info": "This file is just used to identify a project folder." +} \ No newline at end of file diff --git a/.vscode/Pico-W-Stub/__builtins__.pyi b/.vscode/Pico-W-Stub/__builtins__.pyi new file mode 100644 index 0000000..54df69e --- /dev/null +++ b/.vscode/Pico-W-Stub/__builtins__.pyi @@ -0,0 +1,27 @@ +# allows for type checking of additional builtins by pyright + +from typing import Tuple, TypeVar + +Const_T = TypeVar("Const_T", int, float, str, bytes, Tuple) # constant + +def const(expr: Const_T) -> Const_T: + """ + Used to declare that the expression is a constant so that the compiler can + optimise it. The use of this function should be as follows:: + + from micropython import const + + CONST_X = const(123) + CONST_Y = const(2 * CONST_X + 1) + + Constants declared this way are still accessible as global variables from + outside the module they are declared in. On the other hand, if a constant + begins with an underscore then it is hidden, it is not available as a global + variable, and does not take up any memory during execution. + + This `const` function is recognised directly by the MicroPython parser and is + provided as part of the :mod:`micropython` module mainly so that scripts can be + written which run under both CPython and MicroPython, by following the above + pattern. + """ + ... diff --git a/.vscode/Pico-W-Stub/_asyncio.pyi b/.vscode/Pico-W-Stub/_asyncio.pyi new file mode 100644 index 0000000..bcaf545 --- /dev/null +++ b/.vscode/Pico-W-Stub/_asyncio.pyi @@ -0,0 +1,11 @@ +from _typeshed import Incomplete as Incomplete + +class TaskQueue: + def push(self, *args, **kwargs) -> Incomplete: ... + def peek(self, *args, **kwargs) -> Incomplete: ... + def remove(self, *args, **kwargs) -> Incomplete: ... + def pop(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... + +class Task: + def __init__(self, *argv, **kwargs) -> None: ... diff --git a/.vscode/Pico-W-Stub/_boot.pyi b/.vscode/Pico-W-Stub/_boot.pyi new file mode 100644 index 0000000..4278ae7 --- /dev/null +++ b/.vscode/Pico-W-Stub/_boot.pyi @@ -0,0 +1,4 @@ +from _typeshed import Incomplete + +bdev: Incomplete +vfs: Incomplete diff --git a/.vscode/Pico-W-Stub/_boot_fat.pyi b/.vscode/Pico-W-Stub/_boot_fat.pyi new file mode 100644 index 0000000..4278ae7 --- /dev/null +++ b/.vscode/Pico-W-Stub/_boot_fat.pyi @@ -0,0 +1,4 @@ +from _typeshed import Incomplete + +bdev: Incomplete +vfs: Incomplete diff --git a/.vscode/Pico-W-Stub/_onewire.pyi b/.vscode/Pico-W-Stub/_onewire.pyi new file mode 100644 index 0000000..e55ebcb --- /dev/null +++ b/.vscode/Pico-W-Stub/_onewire.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete as Incomplete + +def reset(*args, **kwargs) -> Incomplete: ... +def writebyte(*args, **kwargs) -> Incomplete: ... +def writebit(*args, **kwargs) -> Incomplete: ... +def crc8(*args, **kwargs) -> Incomplete: ... +def readbyte(*args, **kwargs) -> Incomplete: ... +def readbit(*args, **kwargs) -> Incomplete: ... diff --git a/.vscode/Pico-W-Stub/_rp2.pyi b/.vscode/Pico-W-Stub/_rp2.pyi new file mode 100644 index 0000000..0603907 --- /dev/null +++ b/.vscode/Pico-W-Stub/_rp2.pyi @@ -0,0 +1,42 @@ +from _typeshed import Incomplete as Incomplete + +def country(*args, **kwargs) -> Incomplete: ... +def bootsel_button(*args, **kwargs) -> Incomplete: ... + +class Flash: + def readblocks(self, *args, **kwargs) -> Incomplete: ... + def writeblocks(self, *args, **kwargs) -> Incomplete: ... + def ioctl(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... + +class PIO: + JOIN_TX: int + JOIN_NONE: int + JOIN_RX: int + SHIFT_LEFT: int + OUT_HIGH: int + OUT_LOW: int + SHIFT_RIGHT: int + IN_LOW: int + IRQ_SM3: int + IN_HIGH: int + IRQ_SM2: int + IRQ_SM0: int + IRQ_SM1: int + def state_machine(self, *args, **kwargs) -> Incomplete: ... + def remove_program(self, *args, **kwargs) -> Incomplete: ... + def irq(self, *args, **kwargs) -> Incomplete: ... + def add_program(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... + +class StateMachine: + def irq(self, *args, **kwargs) -> Incomplete: ... + def put(self, *args, **kwargs) -> Incomplete: ... + def restart(self, *args, **kwargs) -> Incomplete: ... + def rx_fifo(self, *args, **kwargs) -> Incomplete: ... + def tx_fifo(self, *args, **kwargs) -> Incomplete: ... + def init(self, *args, **kwargs) -> Incomplete: ... + def exec(self, *args, **kwargs) -> Incomplete: ... + def get(self, *args, **kwargs) -> Incomplete: ... + def active(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... diff --git a/.vscode/Pico-W-Stub/_thread.pyi b/.vscode/Pico-W-Stub/_thread.pyi new file mode 100644 index 0000000..328d8f6 --- /dev/null +++ b/.vscode/Pico-W-Stub/_thread.pyi @@ -0,0 +1,25 @@ +""" +Multithreading support. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/_thread.html + +CPython module: :mod:`python:_thread` https://docs.python.org/3/library/_thread.html . + +This module implements multithreading support. + +This module is highly experimental and its API is not yet fully settled +and not yet described in this documentation. +""" +from _typeshed import Incomplete, Incomplete as Incomplete + +def get_ident(*args, **kwargs) -> Incomplete: ... +def start_new_thread(*args, **kwargs) -> Incomplete: ... +def stack_size(*args, **kwargs) -> Incomplete: ... +def exit(*args, **kwargs) -> Incomplete: ... +def allocate_lock(*args, **kwargs) -> Incomplete: ... + +class LockType: + def locked(self, *args, **kwargs) -> Incomplete: ... + def release(self, *args, **kwargs) -> Incomplete: ... + def acquire(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... diff --git a/.vscode/Pico-W-Stub/aioble/__init__.pyi b/.vscode/Pico-W-Stub/aioble/__init__.pyi new file mode 100644 index 0000000..83f4348 --- /dev/null +++ b/.vscode/Pico-W-Stub/aioble/__init__.pyi @@ -0,0 +1,15 @@ +from .central import scan as scan +from .core import GattError as GattError, config as config, log_error as log_error, log_warn as log_warn, stop as stop +from .device import Device as Device, DeviceDisconnectedError as DeviceDisconnectedError +from .peripheral import advertise as advertise +from .server import ( + BufferedCharacteristic as BufferedCharacteristic, + Characteristic as Characteristic, + Descriptor as Descriptor, + Service as Service, + register_services as register_services, +) +from _typeshed import Incomplete + +ADDR_PUBLIC: Incomplete +ADDR_RANDOM: Incomplete diff --git a/.vscode/Pico-W-Stub/aioble/central.pyi b/.vscode/Pico-W-Stub/aioble/central.pyi new file mode 100644 index 0000000..984ea9f --- /dev/null +++ b/.vscode/Pico-W-Stub/aioble/central.pyi @@ -0,0 +1,71 @@ +from .core import ( + ble as ble, + ensure_active as ensure_active, + log_error as log_error, + log_info as log_info, + log_warn as log_warn, + register_irq_handler as register_irq_handler, +) +from .device import Device as Device, DeviceConnection as DeviceConnection, DeviceTimeout as DeviceTimeout +from _typeshed import Incomplete +from collections.abc import Generator + +_IRQ_SCAN_RESULT: Incomplete +_IRQ_SCAN_DONE: Incomplete +_IRQ_PERIPHERAL_CONNECT: Incomplete +_IRQ_PERIPHERAL_DISCONNECT: Incomplete +_ADV_IND: Incomplete +_ADV_DIRECT_IND: Incomplete +_ADV_SCAN_IND: Incomplete +_ADV_NONCONN_IND: Incomplete +_SCAN_RSP: Incomplete +_ADV_TYPE_FLAGS: Incomplete +_ADV_TYPE_NAME: Incomplete +_ADV_TYPE_SHORT_NAME: Incomplete +_ADV_TYPE_UUID16_INCOMPLETE: Incomplete +_ADV_TYPE_UUID16_COMPLETE: Incomplete +_ADV_TYPE_UUID32_INCOMPLETE: Incomplete +_ADV_TYPE_UUID32_COMPLETE: Incomplete +_ADV_TYPE_UUID128_INCOMPLETE: Incomplete +_ADV_TYPE_UUID128_COMPLETE: Incomplete +_ADV_TYPE_APPEARANCE: Incomplete +_ADV_TYPE_MANUFACTURER: Incomplete +_active_scanner: Incomplete +_connecting: Incomplete + +def _central_irq(event, data) -> None: ... +def _central_shutdown() -> None: ... +async def _cancel_pending() -> None: ... +async def _connect(connection, timeout_ms) -> None: ... + +class ScanResult: + device: Incomplete + adv_data: Incomplete + resp_data: Incomplete + rssi: Incomplete + connectable: bool + def __init__(self, device) -> None: ... + def _update(self, adv_type, rssi, adv_data): ... + def __str__(self): ... + def _decode_field(self, *adv_type) -> Generator[Incomplete, None, None]: ... + def name(self): ... + def services(self) -> Generator[Incomplete, None, None]: ... + def manufacturer(self, filter: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ... + +class scan: + _queue: Incomplete + _event: Incomplete + _done: bool + _results: Incomplete + _duration_ms: Incomplete + _interval_us: Incomplete + _window_us: Incomplete + _active: Incomplete + def __init__( + self, duration_ms, interval_us: Incomplete | None = ..., window_us: Incomplete | None = ..., active: bool = ... + ) -> None: ... + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc_val, exc_traceback) -> None: ... + def __aiter__(self): ... + async def __anext__(self): ... + async def cancel(self) -> None: ... diff --git a/.vscode/Pico-W-Stub/aioble/client.pyi b/.vscode/Pico-W-Stub/aioble/client.pyi new file mode 100644 index 0000000..58da152 --- /dev/null +++ b/.vscode/Pico-W-Stub/aioble/client.pyi @@ -0,0 +1,100 @@ +from .core import GattError as GattError, ble as ble, register_irq_handler as register_irq_handler +from .device import DeviceConnection as DeviceConnection +from _typeshed import Incomplete + +_IRQ_GATTC_SERVICE_RESULT: Incomplete +_IRQ_GATTC_SERVICE_DONE: Incomplete +_IRQ_GATTC_CHARACTERISTIC_RESULT: Incomplete +_IRQ_GATTC_CHARACTERISTIC_DONE: Incomplete +_IRQ_GATTC_DESCRIPTOR_RESULT: Incomplete +_IRQ_GATTC_DESCRIPTOR_DONE: Incomplete +_IRQ_GATTC_READ_RESULT: Incomplete +_IRQ_GATTC_READ_DONE: Incomplete +_IRQ_GATTC_WRITE_DONE: Incomplete +_IRQ_GATTC_NOTIFY: Incomplete +_IRQ_GATTC_INDICATE: Incomplete +_CCCD_UUID: Incomplete +_CCCD_NOTIFY: Incomplete +_CCCD_INDICATE: Incomplete +_FLAG_READ: Incomplete +_FLAG_WRITE_NO_RESPONSE: Incomplete +_FLAG_WRITE: Incomplete +_FLAG_NOTIFY: Incomplete +_FLAG_INDICATE: Incomplete + +def _client_irq(event, data) -> None: ... + +class ClientDiscover: + _connection: Incomplete + _queue: Incomplete + _status: Incomplete + _event: Incomplete + _disc_type: Incomplete + _parent: Incomplete + _timeout_ms: Incomplete + _args: Incomplete + def __init__(self, connection, disc_type, parent, timeout_ms, *args) -> None: ... + async def _start(self) -> None: ... + def __aiter__(self): ... + async def __anext__(self): ... + def _discover_result(conn_handle, *args) -> None: ... + def _discover_done(conn_handle, status) -> None: ... + +class ClientService: + connection: Incomplete + _start_handle: Incomplete + _end_handle: Incomplete + uuid: Incomplete + def __init__(self, connection, start_handle, end_handle, uuid) -> None: ... + def __str__(self): ... + async def characteristic(self, uuid, timeout_ms: int = ...): ... + def characteristics(self, uuid: Incomplete | None = ..., timeout_ms: int = ...): ... + def _start_discovery(connection, uuid: Incomplete | None = ...) -> None: ... + +class BaseClientCharacteristic: + _value_handle: Incomplete + properties: Incomplete + uuid: Incomplete + _read_event: Incomplete + _read_data: Incomplete + _read_status: Incomplete + _write_event: Incomplete + _write_status: Incomplete + def __init__(self, value_handle, properties, uuid) -> None: ... + def _register_with_connection(self) -> None: ... + def _find(conn_handle, value_handle): ... + def _check(self, flag) -> None: ... + async def read(self, timeout_ms: int = ...): ... + def _read_result(conn_handle, value_handle, data) -> None: ... + def _read_done(conn_handle, value_handle, status) -> None: ... + async def write(self, data, response: Incomplete | None = ..., timeout_ms: int = ...) -> None: ... + def _write_done(conn_handle, value_handle, status) -> None: ... + +class ClientCharacteristic(BaseClientCharacteristic): + service: Incomplete + connection: Incomplete + _end_handle: Incomplete + _notify_event: Incomplete + _notify_queue: Incomplete + _indicate_event: Incomplete + _indicate_queue: Incomplete + def __init__(self, service, end_handle, value_handle, properties, uuid) -> None: ... + def __str__(self): ... + def _connection(self): ... + async def descriptor(self, uuid, timeout_ms: int = ...): ... + def descriptors(self, timeout_ms: int = ...): ... + def _start_discovery(service, uuid: Incomplete | None = ...) -> None: ... + async def _notified_indicated(self, queue, event, timeout_ms): ... + async def notified(self, timeout_ms: Incomplete | None = ...): ... + def _on_notify_indicate(self, queue, event, data) -> None: ... + def _on_notify(conn_handle, value_handle, notify_data) -> None: ... + async def indicated(self, timeout_ms: Incomplete | None = ...): ... + def _on_indicate(conn_handle, value_handle, indicate_data) -> None: ... + async def subscribe(self, notify: bool = ..., indicate: bool = ...) -> None: ... + +class ClientDescriptor(BaseClientCharacteristic): + characteristic: Incomplete + def __init__(self, characteristic, dsc_handle, uuid) -> None: ... + def __str__(self): ... + def _connection(self): ... + def _start_discovery(characteristic, uuid: Incomplete | None = ...) -> None: ... diff --git a/.vscode/Pico-W-Stub/aioble/core.pyi b/.vscode/Pico-W-Stub/aioble/core.pyi new file mode 100644 index 0000000..51440ac --- /dev/null +++ b/.vscode/Pico-W-Stub/aioble/core.pyi @@ -0,0 +1,23 @@ +from _typeshed import Incomplete + +log_level: int + +def log_error(*args) -> None: ... +def log_warn(*args) -> None: ... +def log_info(*args) -> None: ... + +class GattError(Exception): + _status: Incomplete + def __init__(self, status) -> None: ... + +def ensure_active() -> None: ... +def config(*args, **kwargs): ... + +_irq_handlers: Incomplete +_shutdown_handlers: Incomplete + +def register_irq_handler(irq, shutdown) -> None: ... +def stop() -> None: ... +def ble_irq(event, data): ... + +ble: Incomplete diff --git a/.vscode/Pico-W-Stub/aioble/device.pyi b/.vscode/Pico-W-Stub/aioble/device.pyi new file mode 100644 index 0000000..07cfbc8 --- /dev/null +++ b/.vscode/Pico-W-Stub/aioble/device.pyi @@ -0,0 +1,62 @@ +from .core import ble as ble, log_error as log_error, register_irq_handler as register_irq_handler +from _typeshed import Incomplete + +_IRQ_MTU_EXCHANGED: Incomplete + +class DeviceDisconnectedError(Exception): ... + +def _device_irq(event, data) -> None: ... + +class DeviceTimeout: + _connection: Incomplete + _timeout_ms: Incomplete + _timeout_task: Incomplete + _task: Incomplete + def __init__(self, connection, timeout_ms) -> None: ... + async def _timeout_sleep(self) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, exc_type, exc_val, exc_traceback) -> None: ... + +class Device: + addr_type: Incomplete + addr: Incomplete + _connection: Incomplete + def __init__(self, addr_type, addr) -> None: ... + def __eq__(self, rhs): ... + def __hash__(self): ... + def __str__(self): ... + def addr_hex(self): ... + async def connect(self, timeout_ms: int = ...): ... + +class DeviceConnection: + _connected: Incomplete + device: Incomplete + encrypted: bool + authenticated: bool + bonded: bool + key_size: bool + mtu: Incomplete + _conn_handle: Incomplete + _event: Incomplete + _mtu_event: Incomplete + _discover: Incomplete + _characteristics: Incomplete + _task: Incomplete + _timeouts: Incomplete + _pair_event: Incomplete + _l2cap_channel: Incomplete + def __init__(self, device) -> None: ... + async def device_task(self) -> None: ... + def _run_task(self) -> None: ... + async def disconnect(self, timeout_ms: int = ...) -> None: ... + async def disconnected(self, timeout_ms: int = ..., disconnect: bool = ...) -> None: ... + async def service(self, uuid, timeout_ms: int = ...): ... + def services(self, uuid: Incomplete | None = ..., timeout_ms: int = ...): ... + async def pair(self, *args, **kwargs) -> None: ... + def is_connected(self): ... + def timeout(self, timeout_ms): ... + async def exchange_mtu(self, mtu: Incomplete | None = ..., timeout_ms: int = ...): ... + async def l2cap_accept(self, psm, mtu, timeout_ms: Incomplete | None = ...): ... + async def l2cap_connect(self, psm, mtu, timeout_ms: int = ...): ... + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc_val, exc_traceback) -> None: ... diff --git a/.vscode/Pico-W-Stub/aioble/l2cap.pyi b/.vscode/Pico-W-Stub/aioble/l2cap.pyi new file mode 100644 index 0000000..ed4827e --- /dev/null +++ b/.vscode/Pico-W-Stub/aioble/l2cap.pyi @@ -0,0 +1,39 @@ +from .core import ble as ble, log_error as log_error, register_irq_handler as register_irq_handler +from .device import DeviceConnection as DeviceConnection +from _typeshed import Incomplete + +_IRQ_L2CAP_ACCEPT: Incomplete +_IRQ_L2CAP_CONNECT: Incomplete +_IRQ_L2CAP_DISCONNECT: Incomplete +_IRQ_L2CAP_RECV: Incomplete +_IRQ_L2CAP_SEND_READY: Incomplete +_listening: bool + +def _l2cap_irq(event, data) -> None: ... +def _l2cap_shutdown() -> None: ... + +class L2CAPDisconnectedError(Exception): ... +class L2CAPConnectionError(Exception): ... + +class L2CAPChannel: + _connection: Incomplete + our_mtu: int + peer_mtu: int + _cid: Incomplete + _status: int + _stalled: bool + _data_ready: bool + _event: Incomplete + def __init__(self, connection) -> None: ... + def _assert_connected(self) -> None: ... + async def recvinto(self, buf, timeout_ms: Incomplete | None = ...): ... + def available(self): ... + async def send(self, buf, timeout_ms: Incomplete | None = ..., chunk_size: Incomplete | None = ...) -> None: ... + async def flush(self, timeout_ms: Incomplete | None = ...) -> None: ... + async def disconnect(self, timeout_ms: int = ...) -> None: ... + async def disconnected(self, timeout_ms: int = ...) -> None: ... + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc_val, exc_traceback) -> None: ... + +async def accept(connection, psm, mtu, timeout_ms): ... +async def connect(connection, psm, mtu, timeout_ms): ... diff --git a/.vscode/Pico-W-Stub/aioble/peripheral.pyi b/.vscode/Pico-W-Stub/aioble/peripheral.pyi new file mode 100644 index 0000000..e8fa8e6 --- /dev/null +++ b/.vscode/Pico-W-Stub/aioble/peripheral.pyi @@ -0,0 +1,43 @@ +from .core import ( + ble as ble, + ensure_active as ensure_active, + log_error as log_error, + log_info as log_info, + log_warn as log_warn, + register_irq_handler as register_irq_handler, +) +from .device import Device as Device, DeviceConnection as DeviceConnection, DeviceTimeout as DeviceTimeout +from _typeshed import Incomplete + +_IRQ_CENTRAL_CONNECT: Incomplete +_IRQ_CENTRAL_DISCONNECT: Incomplete +_ADV_TYPE_FLAGS: Incomplete +_ADV_TYPE_NAME: Incomplete +_ADV_TYPE_UUID16_COMPLETE: Incomplete +_ADV_TYPE_UUID32_COMPLETE: Incomplete +_ADV_TYPE_UUID128_COMPLETE: Incomplete +_ADV_TYPE_UUID16_MORE: Incomplete +_ADV_TYPE_UUID32_MORE: Incomplete +_ADV_TYPE_UUID128_MORE: Incomplete +_ADV_TYPE_APPEARANCE: Incomplete +_ADV_TYPE_MANUFACTURER: Incomplete +_ADV_PAYLOAD_MAX_LEN: Incomplete +_incoming_connection: Incomplete +_connect_event: Incomplete + +def _peripheral_irq(event, data) -> None: ... +def _peripheral_shutdown() -> None: ... +def _append(adv_data, resp_data, adv_type, value): ... +async def advertise( + interval_us, + adv_data: Incomplete | None = ..., + resp_data: Incomplete | None = ..., + connectable: bool = ..., + limited_disc: bool = ..., + br_edr: bool = ..., + name: Incomplete | None = ..., + services: Incomplete | None = ..., + appearance: int = ..., + manufacturer: Incomplete | None = ..., + timeout_ms: Incomplete | None = ..., +): ... diff --git a/.vscode/Pico-W-Stub/aioble/security.pyi b/.vscode/Pico-W-Stub/aioble/security.pyi new file mode 100644 index 0000000..d14965a --- /dev/null +++ b/.vscode/Pico-W-Stub/aioble/security.pyi @@ -0,0 +1,26 @@ +from .core import ble as ble, log_info as log_info, log_warn as log_warn, register_irq_handler as register_irq_handler +from .device import DeviceConnection as DeviceConnection +from _typeshed import Incomplete + +_IRQ_ENCRYPTION_UPDATE: Incomplete +_IRQ_GET_SECRET: Incomplete +_IRQ_SET_SECRET: Incomplete +_IRQ_PASSKEY_ACTION: Incomplete +_IO_CAPABILITY_DISPLAY_ONLY: Incomplete +_IO_CAPABILITY_DISPLAY_YESNO: Incomplete +_IO_CAPABILITY_KEYBOARD_ONLY: Incomplete +_IO_CAPABILITY_NO_INPUT_OUTPUT: Incomplete +_IO_CAPABILITY_KEYBOARD_DISPLAY: Incomplete +_PASSKEY_ACTION_INPUT: Incomplete +_PASSKEY_ACTION_DISP: Incomplete +_PASSKEY_ACTION_NUMCMP: Incomplete +_DEFAULT_PATH: str +_secrets: Incomplete +_modified: bool +_path: Incomplete + +def load_secrets(path: Incomplete | None = ...) -> None: ... +def _save_secrets(arg: Incomplete | None = ...) -> None: ... +def _security_irq(event, data): ... +def _security_shutdown() -> None: ... +async def pair(connection, bond: bool = ..., le_secure: bool = ..., mitm: bool = ..., io=..., timeout_ms: int = ...) -> None: ... diff --git a/.vscode/Pico-W-Stub/aioble/server.pyi b/.vscode/Pico-W-Stub/aioble/server.pyi new file mode 100644 index 0000000..3ea850e --- /dev/null +++ b/.vscode/Pico-W-Stub/aioble/server.pyi @@ -0,0 +1,100 @@ +from .core import ( + GattError as GattError, + ble as ble, + ensure_active as ensure_active, + log_error as log_error, + log_info as log_info, + log_warn as log_warn, + register_irq_handler as register_irq_handler, +) +from .device import DeviceConnection as DeviceConnection, DeviceTimeout as DeviceTimeout +from _typeshed import Incomplete + +_registered_characteristics: Incomplete +_IRQ_GATTS_WRITE: Incomplete +_IRQ_GATTS_READ_REQUEST: Incomplete +_IRQ_GATTS_INDICATE_DONE: Incomplete +_FLAG_READ: Incomplete +_FLAG_WRITE_NO_RESPONSE: Incomplete +_FLAG_WRITE: Incomplete +_FLAG_NOTIFY: Incomplete +_FLAG_INDICATE: Incomplete +_FLAG_READ_ENCRYPTED: Incomplete +_FLAG_READ_AUTHENTICATED: Incomplete +_FLAG_READ_AUTHORIZED: Incomplete +_FLAG_WRITE_ENCRYPTED: Incomplete +_FLAG_WRITE_AUTHENTICATED: Incomplete +_FLAG_WRITE_AUTHORIZED: Incomplete +_FLAG_WRITE_CAPTURE: Incomplete +_WRITE_CAPTURE_QUEUE_LIMIT: Incomplete + +def _server_irq(event, data): ... +def _server_shutdown() -> None: ... + +class Service: + uuid: Incomplete + characteristics: Incomplete + def __init__(self, uuid) -> None: ... + def _tuple(self): ... + +class BaseCharacteristic: + _value_handle: Incomplete + _initial: Incomplete + def _register(self, value_handle) -> None: ... + def read(self): ... + def write(self, data, send_update: bool = ...) -> None: ... + @staticmethod + def _init_capture() -> None: ... + @staticmethod + async def _run_capture_task() -> None: ... + _write_data: Incomplete + async def written(self, timeout_ms: Incomplete | None = ...): ... + def on_read(self, connection): ... + def _remote_write(conn_handle, value_handle) -> None: ... + def _remote_read(conn_handle, value_handle): ... + +class Characteristic(BaseCharacteristic): + descriptors: Incomplete + _write_event: Incomplete + _write_data: Incomplete + _indicate_connection: Incomplete + _indicate_event: Incomplete + _indicate_status: Incomplete + uuid: Incomplete + flags: Incomplete + _value_handle: Incomplete + _initial: Incomplete + def __init__( + self, + service, + uuid, + read: bool = ..., + write: bool = ..., + write_no_response: bool = ..., + notify: bool = ..., + indicate: bool = ..., + initial: Incomplete | None = ..., + capture: bool = ..., + ) -> None: ... + def _tuple(self): ... + def notify(self, connection, data: Incomplete | None = ...) -> None: ... + async def indicate(self, connection, data: Incomplete | None = ..., timeout_ms: int = ...) -> None: ... + def _indicate_done(conn_handle, value_handle, status) -> None: ... + +class BufferedCharacteristic(Characteristic): + _max_len: Incomplete + _append: Incomplete + def __init__(self, *args, max_len: int = ..., append: bool = ..., **kwargs) -> None: ... + def _register(self, value_handle) -> None: ... + +class Descriptor(BaseCharacteristic): + _write_event: Incomplete + _write_data: Incomplete + uuid: Incomplete + flags: Incomplete + _value_handle: Incomplete + _initial: Incomplete + def __init__(self, characteristic, uuid, read: bool = ..., write: bool = ..., initial: Incomplete | None = ...) -> None: ... + def _tuple(self): ... + +def register_services(*services) -> None: ... diff --git a/.vscode/Pico-W-Stub/array.pyi b/.vscode/Pico-W-Stub/array.pyi new file mode 100644 index 0000000..8841b77 --- /dev/null +++ b/.vscode/Pico-W-Stub/array.pyi @@ -0,0 +1,33 @@ +""" +Efficient arrays of numeric data. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/array.html + +CPython module: :mod:`python:array` https://docs.python.org/3/library/array.html . + +Supported format codes: ``b``, ``B``, ``h``, ``H``, ``i``, ``I``, ``l``, +``L``, ``q``, ``Q``, ``f``, ``d`` (the latter 2 depending on the +floating-point support). +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, List, Optional + +class array: + """ + Create array with elements of given type. Initial contents of the + array are given by *iterable*. If it is not provided, an empty + array is created. + """ + + def extend(self, iterable) -> Incomplete: + """ + Append new elements as contained in *iterable* to the end of + array, growing it. + """ + ... + def append(self, val) -> Incomplete: + """ + Append new element *val* to the end of array, growing it. + """ + ... + def __init__(self, typecode, iterable: Optional[Any] = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/asyncio/__init__.pyi b/.vscode/Pico-W-Stub/asyncio/__init__.pyi new file mode 100644 index 0000000..29d6c7c --- /dev/null +++ b/.vscode/Pico-W-Stub/asyncio/__init__.pyi @@ -0,0 +1,7 @@ +from .core import * +from _typeshed import Incomplete + +__version__: Incomplete +_attrs: Incomplete + +def __getattr__(attr): ... diff --git a/.vscode/Pico-W-Stub/asyncio/core.pyi b/.vscode/Pico-W-Stub/asyncio/core.pyi new file mode 100644 index 0000000..775c3af --- /dev/null +++ b/.vscode/Pico-W-Stub/asyncio/core.pyi @@ -0,0 +1,52 @@ +from .task import Task as Task, TaskQueue as TaskQueue +from _typeshed import Incomplete + +class CancelledError(BaseException): ... +class TimeoutError(Exception): ... + +_exc_context: Incomplete + +class SingletonGenerator: + state: Incomplete + exc: Incomplete + def __init__(self) -> None: ... + def __iter__(self): ... + def __next__(self) -> None: ... + +def sleep_ms(t, sgen=...): ... +def sleep(t): ... + +class IOQueue: + poller: Incomplete + map: Incomplete + def __init__(self) -> None: ... + def _enqueue(self, s, idx) -> None: ... + def _dequeue(self, s) -> None: ... + def queue_read(self, s) -> None: ... + def queue_write(self, s) -> None: ... + def remove(self, task) -> None: ... + def wait_io_event(self, dt) -> None: ... + +def _promote_to_task(aw): ... +def create_task(coro): ... +def run_until_complete(main_task: Incomplete | None = ...): ... +def run(coro): ... +async def _stopper() -> None: ... + +_stop_task: Incomplete + +class Loop: + _exc_handler: Incomplete + def create_task(coro): ... + def run_forever() -> None: ... + def run_until_complete(aw): ... + def stop() -> None: ... + def close() -> None: ... + def set_exception_handler(handler) -> None: ... + def get_exception_handler(): ... + def default_exception_handler(loop, context) -> None: ... + def call_exception_handler(context) -> None: ... + +def get_event_loop(runq_len: int = ..., waitq_len: int = ...): ... +def current_task(): ... +def new_event_loop(): ... diff --git a/.vscode/Pico-W-Stub/asyncio/event.pyi b/.vscode/Pico-W-Stub/asyncio/event.pyi new file mode 100644 index 0000000..731c304 --- /dev/null +++ b/.vscode/Pico-W-Stub/asyncio/event.pyi @@ -0,0 +1,21 @@ +import io +from . import core as core +from _typeshed import Incomplete +from collections.abc import Generator + +class Event: + state: bool + waiting: Incomplete + def __init__(self) -> None: ... + def is_set(self): ... + def set(self) -> None: ... + def clear(self) -> None: ... + def wait(self) -> Generator[None, None, Incomplete]: ... + +class ThreadSafeFlag(io.IOBase): + state: int + def __init__(self) -> None: ... + def ioctl(self, req, flags): ... + def set(self) -> None: ... + def clear(self) -> None: ... + async def wait(self) -> Generator[Incomplete, None, None]: ... diff --git a/.vscode/Pico-W-Stub/asyncio/funcs.pyi b/.vscode/Pico-W-Stub/asyncio/funcs.pyi new file mode 100644 index 0000000..1e111c0 --- /dev/null +++ b/.vscode/Pico-W-Stub/asyncio/funcs.pyi @@ -0,0 +1,13 @@ +from . import core as core +from _typeshed import Incomplete +from collections.abc import Generator + +async def _run(waiter, aw) -> None: ... +async def wait_for(aw, timeout, sleep=...): ... +def wait_for_ms(aw, timeout): ... + +class _Remove: + @staticmethod + def remove(t) -> None: ... + +def gather(*aws, return_exceptions: bool = ...) -> Generator[None, None, Incomplete]: ... diff --git a/.vscode/Pico-W-Stub/asyncio/lock.pyi b/.vscode/Pico-W-Stub/asyncio/lock.pyi new file mode 100644 index 0000000..b89edeb --- /dev/null +++ b/.vscode/Pico-W-Stub/asyncio/lock.pyi @@ -0,0 +1,13 @@ +from . import core as core +from _typeshed import Incomplete +from collections.abc import Generator + +class Lock: + state: int + waiting: Incomplete + def __init__(self) -> None: ... + def locked(self): ... + def release(self) -> None: ... + def acquire(self) -> Generator[None, None, Incomplete]: ... + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc, tb): ... diff --git a/.vscode/Pico-W-Stub/asyncio/stream.pyi b/.vscode/Pico-W-Stub/asyncio/stream.pyi new file mode 100644 index 0000000..15521ab --- /dev/null +++ b/.vscode/Pico-W-Stub/asyncio/stream.pyi @@ -0,0 +1,36 @@ +from . import core as core +from _typeshed import Incomplete +from collections.abc import Generator + +class Stream: + s: Incomplete + e: Incomplete + out_buf: bytes + def __init__(self, s, e=...) -> None: ... + def get_extra_info(self, v): ... + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc, tb) -> None: ... + def close(self) -> None: ... + async def wait_closed(self) -> None: ... + def read(self, n: int = ...) -> Generator[Incomplete, None, Incomplete]: ... + def readinto(self, buf) -> Generator[Incomplete, None, Incomplete]: ... + def readexactly(self, n) -> Generator[Incomplete, None, Incomplete]: ... + def readline(self) -> Generator[Incomplete, None, Incomplete]: ... + def write(self, buf) -> None: ... + def drain(self) -> Generator[Incomplete, None, Incomplete]: ... + +StreamReader = Stream +StreamWriter = Stream + +def open_connection(host, port) -> Generator[Incomplete, None, Incomplete]: ... + +class Server: + async def __aenter__(self): ... + async def __aexit__(self, exc_type, exc, tb) -> None: ... + state: bool + def close(self) -> None: ... + async def wait_closed(self) -> None: ... + async def _serve(self, s, cb) -> Generator[Incomplete, None, None]: ... + +async def start_server(cb, host, port, backlog: int = ...): ... +async def stream_awrite(self, buf, off: int = ..., sz: int = ...) -> None: ... diff --git a/.vscode/Pico-W-Stub/binascii.pyi b/.vscode/Pico-W-Stub/binascii.pyi new file mode 100644 index 0000000..4026fc1 --- /dev/null +++ b/.vscode/Pico-W-Stub/binascii.pyi @@ -0,0 +1,46 @@ +""" +Binary/ASCII conversions. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/binascii.html + +CPython module: :mod:`python:binascii` https://docs.python.org/3/library/binascii.html . + +This module implements conversions between binary data and various +encodings of it in ASCII form (in both directions). +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +def crc32(*args, **kwargs) -> Incomplete: ... +def hexlify(data, sep: Optional[Any] = None) -> bytes: + """ + Convert the bytes in the *data* object to a hexadecimal representation. + Returns a bytes object. + + If the additional argument *sep* is supplied it is used as a separator + between hexadecimal values. + """ + ... + +def unhexlify(data) -> bytes: + """ + Convert hexadecimal data to binary representation. Returns bytes string. + (i.e. inverse of hexlify) + """ + ... + +def b2a_base64(data, *, newline=True) -> bytes: + """ + Encode binary data in base64 format, as in `RFC 3548 + `_. Returns the encoded data + followed by a newline character if newline is true, as a bytes object. + """ + ... + +def a2b_base64(data) -> bytes: + """ + Decode base64-encoded data, ignoring invalid characters in the input. + Conforms to `RFC 2045 s.6.8 `_. + Returns a bytes object. + """ + ... diff --git a/.vscode/Pico-W-Stub/bluetooth.pyi b/.vscode/Pico-W-Stub/bluetooth.pyi new file mode 100644 index 0000000..ab39369 --- /dev/null +++ b/.vscode/Pico-W-Stub/bluetooth.pyi @@ -0,0 +1,592 @@ +""" +Low-level Bluetooth radio functionality. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/bluetooth.html + +This module provides an interface to a Bluetooth controller on a board. +Currently this supports Bluetooth Low Energy (BLE) in Central, Peripheral, +Broadcaster, and Observer roles, as well as GATT Server and Client and L2CAP +connection-oriented-channels. A device may operate in multiple roles +concurrently. Pairing (and bonding) is supported on some ports. + +This API is intended to match the low-level Bluetooth protocol and provide +building-blocks for higher-level abstractions such as specific device types. + +``Note:`` For most applications, we recommend using the higher-level + `aioble library `_. + +``Note:`` This module is still under development and its classes, functions, + methods and constants are subject to change. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional, Tuple + +FLAG_NOTIFY: int +FLAG_READ: int +FLAG_WRITE: int +FLAG_INDICATE: int +FLAG_WRITE_NO_RESPONSE: int + +class UUID: + """ + Creates a UUID instance with the specified **value**. + + The **value** can be either: + + - A 16-bit integer. e.g. ``0x2908``. + - A 128-bit UUID string. e.g. ``'6E400001-B5A3-F393-E0A9-E50E24DCCA9E'``. + """ + + def __init__(self, value, /) -> None: ... + +class BLE: + """ + Returns the singleton BLE object. + """ + + def gatts_notify(self, conn_handle, value_handle, data=None, /) -> None: + """ + Sends a notification request to a connected client. + + If *data* is ``None`` (the default), then the current local value (as set + with :meth:`gatts_write `) will be sent. + + Otherwise, if *data* is not ``None``, then that value is sent to the client + as part of the notification. The local value will not be modified. + + **Note:** The notification will be sent regardless of the subscription + status of the client to this characteristic. + """ + ... + def gatts_indicate(self, conn_handle, value_handle, data=None, /) -> None: + """ + Sends a indication request to a connected client. + + If *data* is ``None`` (the default), then the current local value (as set + with :meth:`gatts_write `) will be sent. + + Otherwise, if *data* is not ``None``, then that value is sent to the client + as part of the indication. The local value will not be modified. + + On acknowledgment (or failure, e.g. timeout), the + ``_IRQ_GATTS_INDICATE_DONE`` event will be raised. + + **Note:** The indication will be sent regardless of the subscription + status of the client to this characteristic. + """ + ... + def gattc_write(self, conn_handle, value_handle, data, mode=0, /) -> None: + """ + Issue a remote write to a connected server for the specified + characteristic or descriptor handle. + + The argument *mode* specifies the write behaviour, with the currently + supported values being: + + * ``mode=0`` (default) is a write-without-response: the write will + be sent to the remote server but no confirmation will be + returned, and no event will be raised. + * ``mode=1`` is a write-with-response: the remote server is + requested to send a response/acknowledgement that it received the + data. + + If a response is received from the remote server the + ``_IRQ_GATTC_WRITE_DONE`` event will be raised. + """ + ... + def gattc_read(self, conn_handle, value_handle, /) -> None: + """ + Issue a remote read to a connected server for the specified + characteristic or descriptor handle. + + When a value is available, the ``_IRQ_GATTC_READ_RESULT`` event will be + raised. Additionally, the ``_IRQ_GATTC_READ_DONE`` will be raised. + """ + ... + def gattc_exchange_mtu(self, conn_handle, /) -> Incomplete: + """ + Initiate MTU exchange with a connected server, using the preferred MTU + set using ``BLE.config(mtu=value)``. + + The ``_IRQ_MTU_EXCHANGED`` event will be raised when MTU exchange + completes. + + **Note:** MTU exchange is typically initiated by the central. When using + the BlueKitchen stack in the central role, it does not support a remote + peripheral initiating the MTU exchange. NimBLE works for both roles. + """ + ... + def gatts_read(self, value_handle, /) -> Incomplete: + """ + Reads the local value for this handle (which has either been written by + :meth:`gatts_write ` or by a remote client). + """ + ... + def gatts_write(self, value_handle, data, send_update=False, /) -> None: + """ + Writes the local value for this handle, which can be read by a client. + + If *send_update* is ``True``, then any subscribed clients will be notified + (or indicated, depending on what they're subscribed to and which operations + the characteristic supports) about this write. + """ + ... + def gatts_set_buffer(self, value_handle, len, append=False, /) -> None: + """ + Sets the internal buffer size for a value in bytes. This will limit the + largest possible write that can be received. The default is 20. + + Setting *append* to ``True`` will make all remote writes append to, rather + than replace, the current value. At most *len* bytes can be buffered in + this way. When you use :meth:`gatts_read `, the value will + be cleared after reading. This feature is useful when implementing something + like the Nordic UART Service. + """ + ... + def gatts_register_services(self, services_definition, /) -> Incomplete: + """ + Configures the server with the specified services, replacing any + existing services. + + *services_definition* is a list of **services**, where each **service** is a + two-element tuple containing a UUID and a list of **characteristics**. + + Each **characteristic** is a two-or-three-element tuple containing a UUID, a + **flags** value, and optionally a list of *descriptors*. + + Each **descriptor** is a two-element tuple containing a UUID and a **flags** + value. + + The **flags** are a bitwise-OR combination of the flags defined below. These + set both the behaviour of the characteristic (or descriptor) as well as the + security and privacy requirements. + + The return value is a list (one element per service) of tuples (each element + is a value handle). Characteristics and descriptor handles are flattened + into the same tuple, in the order that they are defined. + + The following example registers two services (Heart Rate, and Nordic UART):: + + HR_UUID = bluetooth.UUID(0x180D) + HR_CHAR = (bluetooth.UUID(0x2A37), bluetooth.FLAG_READ | bluetooth.FLAG_NOTIFY,) + HR_SERVICE = (HR_UUID, (HR_CHAR,),) + UART_UUID = bluetooth.UUID('6E400001-B5A3-F393-E0A9-E50E24DCCA9E') + UART_TX = (bluetooth.UUID('6E400003-B5A3-F393-E0A9-E50E24DCCA9E'), bluetooth.FLAG_READ | bluetooth.FLAG_NOTIFY,) + UART_RX = (bluetooth.UUID('6E400002-B5A3-F393-E0A9-E50E24DCCA9E'), bluetooth.FLAG_WRITE,) + UART_SERVICE = (UART_UUID, (UART_TX, UART_RX,),) + SERVICES = (HR_SERVICE, UART_SERVICE,) + ( (hr,), (tx, rx,), ) = bt.gatts_register_services(SERVICES) + + The three value handles (``hr``, ``tx``, ``rx``) can be used with + :meth:`gatts_read `, :meth:`gatts_write `, :meth:`gatts_notify `, and + :meth:`gatts_indicate `. + + **Note:** Advertising must be stopped before registering services. + + Available flags for characteristics and descriptors are:: + + from micropython import const + _FLAG_BROADCAST = const(0x0001) + _FLAG_READ = const(0x0002) + _FLAG_WRITE_NO_RESPONSE = const(0x0004) + _FLAG_WRITE = const(0x0008) + _FLAG_NOTIFY = const(0x0010) + _FLAG_INDICATE = const(0x0020) + _FLAG_AUTHENTICATED_SIGNED_WRITE = const(0x0040) + + _FLAG_AUX_WRITE = const(0x0100) + _FLAG_READ_ENCRYPTED = const(0x0200) + _FLAG_READ_AUTHENTICATED = const(0x0400) + _FLAG_READ_AUTHORIZED = const(0x0800) + _FLAG_WRITE_ENCRYPTED = const(0x1000) + _FLAG_WRITE_AUTHENTICATED = const(0x2000) + _FLAG_WRITE_AUTHORIZED = const(0x4000) + + As for the IRQs above, any required constants should be added to your Python code. + """ + ... + def irq(self, handler, /) -> int: + """ + Registers a callback for events from the BLE stack. The *handler* takes two + arguments, ``event`` (which will be one of the codes below) and ``data`` + (which is an event-specific tuple of values). + + **Note:** As an optimisation to prevent unnecessary allocations, the ``addr``, + ``adv_data``, ``char_data``, ``notify_data``, and ``uuid`` entries in the + tuples are read-only memoryview instances pointing to :mod:`bluetooth`'s internal + ringbuffer, and are only valid during the invocation of the IRQ handler + function. If your program needs to save one of these values to access after + the IRQ handler has returned (e.g. by saving it in a class instance or global + variable), then it needs to take a copy of the data, either by using ``bytes()`` + or ``bluetooth.UUID()``, like this:: + + connected_addr = bytes(addr) # equivalently: adv_data, char_data, or notify_data + matched_uuid = bluetooth.UUID(uuid) + + For example, the IRQ handler for a scan result might inspect the ``adv_data`` + to decide if it's the correct device, and only then copy the address data to be + used elsewhere in the program. And to print data from within the IRQ handler, + ``print(bytes(addr))`` will be needed. + + An event handler showing all possible events:: + + def bt_irq(event, data): + if event == _IRQ_CENTRAL_CONNECT: + # A central has connected to this peripheral. + conn_handle, addr_type, addr = data + elif event == _IRQ_CENTRAL_DISCONNECT: + # A central has disconnected from this peripheral. + conn_handle, addr_type, addr = data + elif event == _IRQ_GATTS_WRITE: + # A client has written to this characteristic or descriptor. + conn_handle, attr_handle = data + elif event == _IRQ_GATTS_READ_REQUEST: + # A client has issued a read. Note: this is only supported on STM32. + # Return a non-zero integer to deny the read (see below), or zero (or None) + # to accept the read. + conn_handle, attr_handle = data + elif event == _IRQ_SCAN_RESULT: + # A single scan result. + addr_type, addr, adv_type, rssi, adv_data = data + elif event == _IRQ_SCAN_DONE: + # Scan duration finished or manually stopped. + pass + elif event == _IRQ_PERIPHERAL_CONNECT: + # A successful gap_connect(). + conn_handle, addr_type, addr = data + elif event == _IRQ_PERIPHERAL_DISCONNECT: + # Connected peripheral has disconnected. + conn_handle, addr_type, addr = data + elif event == _IRQ_GATTC_SERVICE_RESULT: + # Called for each service found by gattc_discover_services(). + conn_handle, start_handle, end_handle, uuid = data + elif event == _IRQ_GATTC_SERVICE_DONE: + # Called once service discovery is complete. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, status = data + elif event == _IRQ_GATTC_CHARACTERISTIC_RESULT: + # Called for each characteristic found by gattc_discover_services(). + conn_handle, end_handle, value_handle, properties, uuid = data + elif event == _IRQ_GATTC_CHARACTERISTIC_DONE: + # Called once service discovery is complete. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, status = data + elif event == _IRQ_GATTC_DESCRIPTOR_RESULT: + # Called for each descriptor found by gattc_discover_descriptors(). + conn_handle, dsc_handle, uuid = data + elif event == _IRQ_GATTC_DESCRIPTOR_DONE: + # Called once service discovery is complete. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, status = data + elif event == _IRQ_GATTC_READ_RESULT: + # A gattc_read() has completed. + conn_handle, value_handle, char_data = data + elif event == _IRQ_GATTC_READ_DONE: + # A gattc_read() has completed. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, value_handle, status = data + elif event == _IRQ_GATTC_WRITE_DONE: + # A gattc_write() has completed. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, value_handle, status = data + elif event == _IRQ_GATTC_NOTIFY: + # A server has sent a notify request. + conn_handle, value_handle, notify_data = data + elif event == _IRQ_GATTC_INDICATE: + # A server has sent an indicate request. + conn_handle, value_handle, notify_data = data + elif event == _IRQ_GATTS_INDICATE_DONE: + # A client has acknowledged the indication. + # Note: Status will be zero on successful acknowledgment, implementation-specific value otherwise. + conn_handle, value_handle, status = data + elif event == _IRQ_MTU_EXCHANGED: + # ATT MTU exchange complete (either initiated by us or the remote device). + conn_handle, mtu = data + elif event == _IRQ_L2CAP_ACCEPT: + # A new channel has been accepted. + # Return a non-zero integer to reject the connection, or zero (or None) to accept. + conn_handle, cid, psm, our_mtu, peer_mtu = data + elif event == _IRQ_L2CAP_CONNECT: + # A new channel is now connected (either as a result of connecting or accepting). + conn_handle, cid, psm, our_mtu, peer_mtu = data + elif event == _IRQ_L2CAP_DISCONNECT: + # Existing channel has disconnected (status is zero), or a connection attempt failed (non-zero status). + conn_handle, cid, psm, status = data + elif event == _IRQ_L2CAP_RECV: + # New data is available on the channel. Use l2cap_recvinto to read. + conn_handle, cid = data + elif event == _IRQ_L2CAP_SEND_READY: + # A previous l2cap_send that returned False has now completed and the channel is ready to send again. + # If status is non-zero, then the transmit buffer overflowed and the application should re-send the data. + conn_handle, cid, status = data + elif event == _IRQ_CONNECTION_UPDATE: + # The remote device has updated connection parameters. + conn_handle, conn_interval, conn_latency, supervision_timeout, status = data + elif event == _IRQ_ENCRYPTION_UPDATE: + # The encryption state has changed (likely as a result of pairing or bonding). + conn_handle, encrypted, authenticated, bonded, key_size = data + elif event == _IRQ_GET_SECRET: + # Return a stored secret. + # If key is None, return the index'th value of this sec_type. + # Otherwise return the corresponding value for this sec_type and key. + sec_type, index, key = data + return value + elif event == _IRQ_SET_SECRET: + # Save a secret to the store for this sec_type and key. + sec_type, key, value = data + return True + elif event == _IRQ_PASSKEY_ACTION: + # Respond to a passkey request during pairing. + # See gap_passkey() for details. + # action will be an action that is compatible with the configured "io" config. + # passkey will be non-zero if action is "numeric comparison". + conn_handle, action, passkey = data + + + The event codes are:: + + from micropython import const + _IRQ_CENTRAL_CONNECT = const(1) + _IRQ_CENTRAL_DISCONNECT = const(2) + _IRQ_GATTS_WRITE = const(3) + _IRQ_GATTS_READ_REQUEST = const(4) + _IRQ_SCAN_RESULT = const(5) + _IRQ_SCAN_DONE = const(6) + _IRQ_PERIPHERAL_CONNECT = const(7) + _IRQ_PERIPHERAL_DISCONNECT = const(8) + _IRQ_GATTC_SERVICE_RESULT = const(9) + _IRQ_GATTC_SERVICE_DONE = const(10) + _IRQ_GATTC_CHARACTERISTIC_RESULT = const(11) + _IRQ_GATTC_CHARACTERISTIC_DONE = const(12) + _IRQ_GATTC_DESCRIPTOR_RESULT = const(13) + _IRQ_GATTC_DESCRIPTOR_DONE = const(14) + _IRQ_GATTC_READ_RESULT = const(15) + _IRQ_GATTC_READ_DONE = const(16) + _IRQ_GATTC_WRITE_DONE = const(17) + _IRQ_GATTC_NOTIFY = const(18) + _IRQ_GATTC_INDICATE = const(19) + _IRQ_GATTS_INDICATE_DONE = const(20) + _IRQ_MTU_EXCHANGED = const(21) + _IRQ_L2CAP_ACCEPT = const(22) + _IRQ_L2CAP_CONNECT = const(23) + _IRQ_L2CAP_DISCONNECT = const(24) + _IRQ_L2CAP_RECV = const(25) + _IRQ_L2CAP_SEND_READY = const(26) + _IRQ_CONNECTION_UPDATE = const(27) + _IRQ_ENCRYPTION_UPDATE = const(28) + _IRQ_GET_SECRET = const(29) + _IRQ_SET_SECRET = const(30) + + For the ``_IRQ_GATTS_READ_REQUEST`` event, the available return codes are:: + + _GATTS_NO_ERROR = const(0x00) + _GATTS_ERROR_READ_NOT_PERMITTED = const(0x02) + _GATTS_ERROR_WRITE_NOT_PERMITTED = const(0x03) + _GATTS_ERROR_INSUFFICIENT_AUTHENTICATION = const(0x05) + _GATTS_ERROR_INSUFFICIENT_AUTHORIZATION = const(0x08) + _GATTS_ERROR_INSUFFICIENT_ENCRYPTION = const(0x0f) + + For the ``_IRQ_PASSKEY_ACTION`` event, the available actions are:: + + _PASSKEY_ACTION_NONE = const(0) + _PASSKEY_ACTION_INPUT = const(2) + _PASSKEY_ACTION_DISPLAY = const(3) + _PASSKEY_ACTION_NUMERIC_COMPARISON = const(4) + + In order to save space in the firmware, these constants are not included on the + :mod:`bluetooth` module. Add the ones that you need from the list above to your + program. + """ + ... + def gap_connect(self, addr_type, addr, scan_duration_ms=2000, min_conn_interval_us=None, max_conn_interval_us=None, /) -> None: + """ + Connect to a peripheral. + + See :meth:`gap_scan ` for details about address types. + + To cancel an outstanding connection attempt early, call + ``gap_connect(None)``. + + On success, the ``_IRQ_PERIPHERAL_CONNECT`` event will be raised. If + cancelling a connection attempt, the ``_IRQ_PERIPHERAL_DISCONNECT`` event + will be raised. + + The device will wait up to *scan_duration_ms* to receive an advertising + payload from the device. + + The connection interval can be configured in **micro** seconds using either + or both of *min_conn_interval_us* and *max_conn_interval_us*. Otherwise a + default interval will be chosen, typically between 30000 and 50000 + microseconds. A shorter interval will increase throughput, at the expense + of power usage. + """ + ... + def gap_advertise(self, interval_us, adv_data=None, *, resp_data=None, connectable=True) -> Incomplete: + """ + Starts advertising at the specified interval (in **micro** seconds). This + interval will be rounded down to the nearest 625us. To stop advertising, set + *interval_us* to ``None``. + + *adv_data* and *resp_data* can be any type that implements the buffer + protocol (e.g. ``bytes``, ``bytearray``, ``str``). *adv_data* is included + in all broadcasts, and *resp_data* is send in reply to an active scan. + + **Note:** if *adv_data* (or *resp_data*) is ``None``, then the data passed + to the previous call to ``gap_advertise`` will be re-used. This allows a + broadcaster to resume advertising with just ``gap_advertise(interval_us)``. + To clear the advertising payload pass an empty ``bytes``, i.e. ``b''``. + """ + ... + def config(self, param, /) -> Tuple: + """ + Get or set configuration values of the BLE interface. To get a value the + parameter name should be quoted as a string, and just one parameter is + queried at a time. To set values use the keyword syntax, and one or more + parameter can be set at a time. + + Currently supported values are: + + - ``'mac'``: The current address in use, depending on the current address mode. + This returns a tuple of ``(addr_type, addr)``. + + See :meth:`gatts_write ` for details about address type. + + This may only be queried while the interface is currently active. + + - ``'addr_mode'``: Sets the address mode. Values can be: + + * 0x00 - PUBLIC - Use the controller's public address. + * 0x01 - RANDOM - Use a generated static address. + * 0x02 - RPA - Use resolvable private addresses. + * 0x03 - NRPA - Use non-resolvable private addresses. + + By default the interface mode will use a PUBLIC address if available, otherwise + it will use a RANDOM address. + + - ``'gap_name'``: Get/set the GAP device name used by service 0x1800, + characteristic 0x2a00. This can be set at any time and changed multiple + times. + + - ``'rxbuf'``: Get/set the size in bytes of the internal buffer used to store + incoming events. This buffer is global to the entire BLE driver and so + handles incoming data for all events, including all characteristics. + Increasing this allows better handling of bursty incoming data (for + example scan results) and the ability to receive larger characteristic values. + + - ``'mtu'``: Get/set the MTU that will be used during a ATT MTU exchange. The + resulting MTU will be the minimum of this and the remote device's MTU. + ATT MTU exchange will not happen automatically (unless the remote device initiates + it), and must be manually initiated with + :meth:`gattc_exchange_mtu`. + Use the ``_IRQ_MTU_EXCHANGED`` event to discover the MTU for a given connection. + + - ``'bond'``: Sets whether bonding will be enabled during pairing. When + enabled, pairing requests will set the "bond" flag and the keys will be stored + by both devices. + + - ``'mitm'``: Sets whether MITM-protection is required for pairing. + + - ``'io'``: Sets the I/O capabilities of this device. + + Available options are:: + + _IO_CAPABILITY_DISPLAY_ONLY = const(0) + _IO_CAPABILITY_DISPLAY_YESNO = const(1) + _IO_CAPABILITY_KEYBOARD_ONLY = const(2) + _IO_CAPABILITY_NO_INPUT_OUTPUT = const(3) + _IO_CAPABILITY_KEYBOARD_DISPLAY = const(4) + + - ``'le_secure'``: Sets whether "LE Secure" pairing is required. Default is + false (i.e. allow "Legacy Pairing"). + """ + ... + def active(self, active: Optional[Any] = None, /) -> Incomplete: + """ + Optionally changes the active state of the BLE radio, and returns the + current state. + + The radio must be made active before using any other methods on this class. + """ + ... + def gattc_discover_services(self, conn_handle, uuid=None, /) -> Incomplete: + """ + Query a connected server for its services. + + Optionally specify a service *uuid* to query for that service only. + + For each service discovered, the ``_IRQ_GATTC_SERVICE_RESULT`` event will + be raised, followed by ``_IRQ_GATTC_SERVICE_DONE`` on completion. + """ + ... + def gap_disconnect(self, conn_handle, /) -> bool: + """ + Disconnect the specified connection handle. This can either be a + central that has connected to this device (if acting as a peripheral) + or a peripheral that was previously connected to by this device (if acting + as a central). + + On success, the ``_IRQ_PERIPHERAL_DISCONNECT`` or ``_IRQ_CENTRAL_DISCONNECT`` + event will be raised. + + Returns ``False`` if the connection handle wasn't connected, and ``True`` + otherwise. + """ + ... + def gattc_discover_descriptors(self, conn_handle, start_handle, end_handle, /) -> Incomplete: + """ + Query a connected server for descriptors in the specified range. + + For each descriptor discovered, the ``_IRQ_GATTC_DESCRIPTOR_RESULT`` event + will be raised, followed by ``_IRQ_GATTC_DESCRIPTOR_DONE`` on completion. + """ + ... + def gattc_discover_characteristics(self, conn_handle, start_handle, end_handle, uuid=None, /) -> Incomplete: + """ + Query a connected server for characteristics in the specified range. + + Optionally specify a characteristic *uuid* to query for that + characteristic only. + + You can use ``start_handle=1``, ``end_handle=0xffff`` to search for a + characteristic in any service. + + For each characteristic discovered, the ``_IRQ_GATTC_CHARACTERISTIC_RESULT`` + event will be raised, followed by ``_IRQ_GATTC_CHARACTERISTIC_DONE`` on completion. + """ + ... + def gap_scan(self, duration_ms, interval_us=1280000, window_us=11250, active=False, /) -> Incomplete: + """ + Run a scan operation lasting for the specified duration (in **milli** seconds). + + To scan indefinitely, set *duration_ms* to ``0``. + + To stop scanning, set *duration_ms* to ``None``. + + Use *interval_us* and *window_us* to optionally configure the duty cycle. + The scanner will run for *window_us* **micro** seconds every *interval_us* + **micro** seconds for a total of *duration_ms* **milli** seconds. The default + interval and window are 1.28 seconds and 11.25 milliseconds respectively + (background scanning). + + For each scan result the ``_IRQ_SCAN_RESULT`` event will be raised, with event + data ``(addr_type, addr, adv_type, rssi, adv_data)``. + + ``addr_type`` values indicate public or random addresses: + * 0x00 - PUBLIC + * 0x01 - RANDOM (either static, RPA, or NRPA, the type is encoded in the address itself) + + ``adv_type`` values correspond to the Bluetooth Specification: + + * 0x00 - ADV_IND - connectable and scannable undirected advertising + * 0x01 - ADV_DIRECT_IND - connectable directed advertising + * 0x02 - ADV_SCAN_IND - scannable undirected advertising + * 0x03 - ADV_NONCONN_IND - non-connectable undirected advertising + * 0x04 - SCAN_RSP - scan response + + ``active`` can be set ``True`` if you want to receive scan responses in the results. + + When scanning is stopped (either due to the duration finishing or when + explicitly stopped), the ``_IRQ_SCAN_DONE`` event will be raised. + """ + ... + def __init__(self) -> None: ... diff --git a/.vscode/Pico-W-Stub/cmath.pyi b/.vscode/Pico-W-Stub/cmath.pyi new file mode 100644 index 0000000..87bed56 --- /dev/null +++ b/.vscode/Pico-W-Stub/cmath.pyi @@ -0,0 +1,72 @@ +""" +Mathematical functions for complex numbers. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/cmath.html + +CPython module: :mod:`python:cmath` https://docs.python.org/3/library/cmath.html . + +The ``cmath`` module provides some basic mathematical functions for +working with complex numbers. + +Availability: not available on WiPy and ESP8266. Floating point support +required for this module. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Tuple + +e: float +pi: float + +def polar(z) -> Tuple: + """ + Returns, as a tuple, the polar form of ``z``. + """ + ... + +def sqrt(z) -> Incomplete: + """ + Return the square-root of ``z``. + """ + ... + +def rect(r, phi) -> float: + """ + Returns the complex number with modulus ``r`` and phase ``phi``. + """ + ... + +def sin(z) -> float: + """ + Return the sine of ``z``. + """ + ... + +def exp(z) -> float: + """ + Return the exponential of ``z``. + """ + ... + +def cos(z) -> float: + """ + Return the cosine of ``z``. + """ + ... + +def phase(z) -> float: + """ + Returns the phase of the number ``z``, in the range (-pi, +pi]. + """ + ... + +def log(z) -> float: + """ + Return the natural logarithm of ``z``. The branch cut is along the negative real axis. + """ + ... + +def log10(z) -> float: + """ + Return the base-10 logarithm of ``z``. The branch cut is along the negative real axis. + """ + ... diff --git a/.vscode/Pico-W-Stub/collections.pyi b/.vscode/Pico-W-Stub/collections.pyi new file mode 100644 index 0000000..b4597a6 --- /dev/null +++ b/.vscode/Pico-W-Stub/collections.pyi @@ -0,0 +1,105 @@ +""" +Collection and container types. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/collections.html + +CPython module: :mod:`python:collections` https://docs.python.org/3/library/collections.html . + +This module implements advanced collection and container types to +hold/accumulate various objects. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.collections import OrderedDict as stdlib_OrderedDict, deque as stdlib_deque +from typing_extensions import NamedTuple as stdlib_NamedTuple +from typing import Any, Optional + +def namedtuple(name, fields) -> stdlib_NamedTuple: + """ + This is factory function to create a new namedtuple type with a specific + name and set of fields. A namedtuple is a subclass of tuple which allows + to access its fields not just by numeric index, but also with an attribute + access syntax using symbolic field names. Fields is a sequence of strings + specifying field names. For compatibility with CPython it can also be a + a string with space-separated field named (but this is less efficient). + Example of use:: + + from collections import namedtuple + + MyTuple = namedtuple("MyTuple", ("id", "name")) + t1 = MyTuple(1, "foo") + t2 = MyTuple(2, "bar") + print(t1.name) + assert t2.name == t2[1] + """ + ... + +class OrderedDict(stdlib_OrderedDict): + """ + ``dict`` type subclass which remembers and preserves the order of keys + added. When ordered dict is iterated over, keys/items are returned in + the order they were added:: + + from collections import OrderedDict + + # To make benefit of ordered keys, OrderedDict should be initialized + # from sequence of (key, value) pairs. + d = OrderedDict([("z", 1), ("a", 2)]) + # More items can be added as usual + d["w"] = 5 + d["b"] = 3 + for k, v in d.items(): + print(k, v) + + Output:: + + z 1 + a 2 + w 5 + b 3 + """ + + def popitem(self, *args, **kwargs) -> Incomplete: ... + def pop(self, *args, **kwargs) -> Incomplete: ... + def values(self, *args, **kwargs) -> Incomplete: ... + def setdefault(self, *args, **kwargs) -> Incomplete: ... + def update(self, *args, **kwargs) -> Incomplete: ... + def copy(self, *args, **kwargs) -> Incomplete: ... + def clear(self, *args, **kwargs) -> Incomplete: ... + def keys(self, *args, **kwargs) -> Incomplete: ... + def get(self, *args, **kwargs) -> Incomplete: ... + def items(self, *args, **kwargs) -> Incomplete: ... + @classmethod + def fromkeys(cls, *args, **kwargs) -> Incomplete: ... + def __init__(self, *args, **kwargs) -> None: ... + +class deque(stdlib_deque): + """ + Deques (double-ended queues) are a list-like container that support O(1) + appends and pops from either side of the deque. New deques are created + using the following arguments: + + - *iterable* must be the empty tuple, and the new deque is created empty. + + - *maxlen* must be specified and the deque will be bounded to this + maximum length. Once the deque is full, any new items added will + discard items from the opposite end. + + - The optional *flags* can be 1 to check for overflow when adding items. + + As well as supporting `bool` and `len`, deque objects have the following + methods: + """ + + def popleft(self) -> Incomplete: + """ + Remove and return an item from the left side of the deque. + Raises IndexError if no items are present. + """ + ... + def append(self, x) -> Incomplete: + """ + Add *x* to the right side of the deque. + Raises IndexError if overflow checking is enabled and there is no more room left. + """ + ... + def __init__(self, iterable, maxlen, flags: Optional[Any] = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/cryptolib.pyi b/.vscode/Pico-W-Stub/cryptolib.pyi new file mode 100644 index 0000000..eeb2919 --- /dev/null +++ b/.vscode/Pico-W-Stub/cryptolib.pyi @@ -0,0 +1,42 @@ +""" +Cryptographic ciphers. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/cryptolib.html +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +class aes: + def encrypt(self, in_buf, out_buf: Optional[Any] = None) -> Incomplete: + """ + Encrypt *in_buf*. If no *out_buf* is given result is returned as a + newly allocated `bytes` object. Otherwise, result is written into + mutable buffer *out_buf*. *in_buf* and *out_buf* can also refer + to the same mutable buffer, in which case data is encrypted in-place. + """ + ... + def decrypt(self, in_buf, out_buf: Optional[Any] = None) -> Incomplete: + """ + Like `encrypt()`, but for decryption. + """ + ... + def __init__(self, key, mode, IV: Optional[Any] = None) -> None: + """ + Initialize cipher object, suitable for encryption/decryption. Note: + after initialization, cipher object can be use only either for + encryption or decryption. Running decrypt() operation after encrypt() + or vice versa is not supported. + + Parameters are: + + * *key* is an encryption/decryption key (bytes-like). + * *mode* is: + + * ``1`` (or ``cryptolib.MODE_ECB`` if it exists) for Electronic Code Book (ECB). + * ``2`` (or ``cryptolib.MODE_CBC`` if it exists) for Cipher Block Chaining (CBC). + * ``6`` (or ``cryptolib.MODE_CTR`` if it exists) for Counter mode (CTR). + + * *IV* is an initialization vector for CBC mode. + * For Counter mode, *IV* is the initial value for the counter. + """ + ... diff --git a/.vscode/Pico-W-Stub/deflate.pyi b/.vscode/Pico-W-Stub/deflate.pyi new file mode 100644 index 0000000..3bf856b --- /dev/null +++ b/.vscode/Pico-W-Stub/deflate.pyi @@ -0,0 +1,76 @@ +""" +Deflate compression & decompression. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/deflate.html + +This module allows compression and decompression of binary data with the +`DEFLATE algorithm `_ +(commonly used in the zlib library and gzip archiver). + +**Availability:** + +* Added in MicroPython v1.21. + +* Decompression: Enabled via the ``MICROPY_PY_DEFLATE`` build option, on by default + on ports with the "extra features" level or higher (which is most boards). + +* Compression: Enabled via the ``MICROPY_PY_DEFLATE_COMPRESS`` build option, on + by default on ports with the "full features" level or higher (generally this means + you need to build your own firmware to enable this). +""" +from _typeshed import Incomplete, Incomplete as Incomplete + +GZIP: int +RAW: int +ZLIB: int +AUTO: int + +class DeflateIO: + """ + This class can be used to wrap a *stream* which is any + :term:`stream-like ` object such as a file, socket, or stream + (including :class:`io.BytesIO`). It is itself a stream and implements the + standard read/readinto/write/close methods. + + The *stream* must be a blocking stream. Non-blocking streams are currently + not supported. + + The *format* can be set to any of the constants defined below, and defaults + to ``AUTO`` which for decompressing will auto-detect gzip or zlib streams, + and for compressing it will generate a raw stream. + + The *wbits* parameter sets the base-2 logarithm of the DEFLATE dictionary + window size. So for example, setting *wbits* to ``10`` sets the window size + to 1024 bytes. Valid values are ``5`` to ``15`` inclusive (corresponding to + window sizes of 32 to 32k bytes). + + If *wbits* is set to ``0`` (the default), then for compression a window size + of 256 bytes will be used (as if *wbits* was set to 8). For decompression, it + depends on the format: + + * ``RAW`` will use 256 bytes (corresponding to *wbits* set to 8). + * ``ZLIB`` (or ``AUTO`` with zlib detected) will use the value from the zlib + header. + * ``GZIP`` (or ``AUTO`` with gzip detected) will use 32 kilobytes + (corresponding to *wbits* set to 15). + + See the :ref:`window size ` notes below for more information + about the window size, zlib, and gzip streams. + + If *close* is set to ``True`` then the underlying stream will be closed + automatically when the :class:`deflate.DeflateIO` stream is closed. This is + useful if you want to return a :class:`deflate.DeflateIO` stream that wraps + another stream and not have the caller need to know about managing the + underlying stream. + + If compression is enabled, a given :class:`deflate.DeflateIO` instance + supports both reading and writing. For example, a bidirectional stream like + a socket can be wrapped, which allows for compression/decompression in both + directions. + """ + + def readline(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, stream, format=AUTO, wbits=0, close=False, /) -> None: ... diff --git a/.vscode/Pico-W-Stub/dht.pyi b/.vscode/Pico-W-Stub/dht.pyi new file mode 100644 index 0000000..7ac764e --- /dev/null +++ b/.vscode/Pico-W-Stub/dht.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class DHTBase: + pin: Incomplete + buf: Incomplete + def __init__(self, pin) -> None: ... + def measure(self) -> None: ... + +class DHT11(DHTBase): + def humidity(self): ... + def temperature(self): ... + +class DHT22(DHTBase): + def humidity(self): ... + def temperature(self): ... diff --git a/.vscode/Pico-W-Stub/ds18x20.pyi b/.vscode/Pico-W-Stub/ds18x20.pyi new file mode 100644 index 0000000..43b2015 --- /dev/null +++ b/.vscode/Pico-W-Stub/ds18x20.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +_CONVERT: Incomplete +_RD_SCRATCH: Incomplete +_WR_SCRATCH: Incomplete + +class DS18X20: + ow: Incomplete + buf: Incomplete + def __init__(self, onewire) -> None: ... + def scan(self): ... + def convert_temp(self) -> None: ... + def read_scratch(self, rom): ... + def write_scratch(self, rom, buf) -> None: ... + def read_temp(self, rom): ... diff --git a/.vscode/Pico-W-Stub/errno.pyi b/.vscode/Pico-W-Stub/errno.pyi new file mode 100644 index 0000000..b1c95b3 --- /dev/null +++ b/.vscode/Pico-W-Stub/errno.pyi @@ -0,0 +1,36 @@ +""" +System error codes. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/errno.html + +CPython module: :mod:`python:errno` https://docs.python.org/3/library/errno.html . + +This module provides access to symbolic error codes for `OSError` exception. +A particular inventory of codes depends on :term:`MicroPython port`. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Dict + +ENOBUFS: int +ENODEV: int +ENOENT: int +EISDIR: int +EIO: int +EINVAL: int +EPERM: int +ETIMEDOUT: int +ENOMEM: int +EOPNOTSUPP: int +ENOTCONN: int +errorcode: dict +EAGAIN: int +EALREADY: int +EBADF: int +EADDRINUSE: int +EACCES: int +EINPROGRESS: int +EEXIST: int +EHOSTUNREACH: int +ECONNABORTED: int +ECONNRESET: int +ECONNREFUSED: int diff --git a/.vscode/Pico-W-Stub/framebuf.pyi b/.vscode/Pico-W-Stub/framebuf.pyi new file mode 100644 index 0000000..20cdfb0 --- /dev/null +++ b/.vscode/Pico-W-Stub/framebuf.pyi @@ -0,0 +1,142 @@ +""" +Frame buffer manipulation. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/framebuf.html + +This module provides a general frame buffer which can be used to create +bitmap images, which can then be sent to a display. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +MONO_HMSB: int +MONO_HLSB: int +RGB565: int +MONO_VLSB: int +MVLSB: int +GS2_HMSB: int +GS8: int +GS4_HMSB: int + +def FrameBuffer1(*args, **kwargs) -> Incomplete: ... + +class FrameBuffer: + """ + Construct a FrameBuffer object. The parameters are: + + - *buffer* is an object with a buffer protocol which must be large + enough to contain every pixel defined by the width, height and + format of the FrameBuffer. + - *width* is the width of the FrameBuffer in pixels + - *height* is the height of the FrameBuffer in pixels + - *format* specifies the type of pixel used in the FrameBuffer; + permissible values are listed under Constants below. These set the + number of bits used to encode a color value and the layout of these + bits in *buffer*. + Where a color value c is passed to a method, c is a small integer + with an encoding that is dependent on the format of the FrameBuffer. + - *stride* is the number of pixels between each horizontal line + of pixels in the FrameBuffer. This defaults to *width* but may + need adjustments when implementing a FrameBuffer within another + larger FrameBuffer or screen. The *buffer* size must accommodate + an increased step size. + + One must specify valid *buffer*, *width*, *height*, *format* and + optionally *stride*. Invalid *buffer* size or dimensions may lead to + unexpected errors. + """ + + def poly(self, x, y, coords, c, f: Optional[Any] = None) -> Incomplete: + """ + Given a list of coordinates, draw an arbitrary (convex or concave) closed + polygon at the given x, y location using the given color. + + The *coords* must be specified as a :mod:`array` of integers, e.g. + ``array('h', [x0, y0, x1, y1, ... xn, yn])``. + + The optional *f* parameter can be set to ``True`` to fill the polygon. + Otherwise just a one pixel outline is drawn. + """ + ... + def vline(self, x, y, h, c) -> Incomplete: ... + def pixel(self, x, y, c: Optional[Any] = None) -> Incomplete: + """ + If *c* is not given, get the color value of the specified pixel. + If *c* is given, set the specified pixel to the given color. + """ + ... + def text(self, s, x, y, c: Optional[Any] = None) -> None: + """ + Write text to the FrameBuffer using the the coordinates as the upper-left + corner of the text. The color of the text can be defined by the optional + argument but is otherwise a default value of 1. All characters have + dimensions of 8x8 pixels and there is currently no way to change the font. + """ + ... + def rect(self, x, y, w, h, c, f: Optional[Any] = None) -> None: + """ + Draw a rectangle at the given location, size and color. + + The optional *f* parameter can be set to ``True`` to fill the rectangle. + Otherwise just a one pixel outline is drawn. + """ + ... + def scroll(self, xstep, ystep) -> Incomplete: + """ + Shift the contents of the FrameBuffer by the given vector. This may + leave a footprint of the previous colors in the FrameBuffer. + """ + ... + def ellipse(self, x, y, xr, yr, c, f, m: Optional[Any] = None) -> None: + """ + Draw an ellipse at the given location. Radii *xr* and *yr* define the + geometry; equal values cause a circle to be drawn. The *c* parameter + defines the color. + + The optional *f* parameter can be set to ``True`` to fill the ellipse. + Otherwise just a one pixel outline is drawn. + + The optional *m* parameter enables drawing to be restricted to certain + quadrants of the ellipse. The LS four bits determine which quadrants are + to be drawn, with bit 0 specifying Q1, b1 Q2, b2 Q3 and b3 Q4. Quadrants + are numbered counterclockwise with Q1 being top right. + """ + ... + def line(self, x1, y1, x2, y2, c) -> None: + """ + Draw a line from a set of coordinates using the given color and + a thickness of 1 pixel. The `line` method draws the line up to + a second set of coordinates whereas the `hline` and `vline` + methods draw horizontal and vertical lines respectively up to + a given length. + """ + ... + def blit(self, fbuf, x, y, key=-1, palette=None) -> None: + """ + Draw another FrameBuffer on top of the current one at the given coordinates. + If *key* is specified then it should be a color integer and the + corresponding color will be considered transparent: all pixels with that + color value will not be drawn. (If the *palette* is specified then the *key* + is compared to the value from *palette*, not to the value directly from + *fbuf*.) + + The *palette* argument enables blitting between FrameBuffers with differing + formats. Typical usage is to render a monochrome or grayscale glyph/icon to + a color display. The *palette* is a FrameBuffer instance whose format is + that of the current FrameBuffer. The *palette* height is one pixel and its + pixel width is the number of colors in the source FrameBuffer. The *palette* + for an N-bit source needs 2**N pixels; the *palette* for a monochrome source + would have 2 pixels representing background and foreground colors. The + application assigns a color to each pixel in the *palette*. The color of the + current pixel will be that of that *palette* pixel whose x position is the + color of the corresponding source pixel. + """ + ... + def hline(self, x, y, w, c) -> Incomplete: ... + def fill(self, c) -> None: + """ + Fill the entire FrameBuffer with the specified color. + """ + ... + def fill_rect(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, buffer, width, height, format, stride=-1, /) -> None: ... diff --git a/.vscode/Pico-W-Stub/gc.pyi b/.vscode/Pico-W-Stub/gc.pyi new file mode 100644 index 0000000..3f24727 --- /dev/null +++ b/.vscode/Pico-W-Stub/gc.pyi @@ -0,0 +1,75 @@ +""" +Control the garbage collector. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/gc.html + +CPython module: :mod:`python:gc` https://docs.python.org/3/library/gc.html . +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +def mem_alloc() -> int: + """ + Return the number of bytes of heap RAM that are allocated by Python code. + + Difference to CPython + + This function is MicroPython extension. + """ + ... + +def isenabled(*args, **kwargs) -> Incomplete: ... +def mem_free() -> int: + """ + Return the number of bytes of heap RAM that is available for Python + code to allocate, or -1 if this amount is not known. + + Difference to CPython + + This function is MicroPython extension. + """ + ... + +def threshold(amount: Optional[Any] = None) -> Incomplete: + """ + Set or query the additional GC allocation threshold. Normally, a collection + is triggered only when a new allocation cannot be satisfied, i.e. on an + out-of-memory (OOM) condition. If this function is called, in addition to + OOM, a collection will be triggered each time after *amount* bytes have been + allocated (in total, since the previous time such an amount of bytes + have been allocated). *amount* is usually specified as less than the + full heap size, with the intention to trigger a collection earlier than when the + heap becomes exhausted, and in the hope that an early collection will prevent + excessive memory fragmentation. This is a heuristic measure, the effect + of which will vary from application to application, as well as + the optimal value of the *amount* parameter. + + Calling the function without argument will return the current value of + the threshold. A value of -1 means a disabled allocation threshold. + + Difference to CPython + + This function is a MicroPython extension. CPython has a similar + function - ``set_threshold()``, but due to different GC + implementations, its signature and semantics are different. + """ + ... + +def collect() -> None: + """ + Run a garbage collection. + """ + ... + +def enable() -> None: + """ + Enable automatic garbage collection. + """ + ... + +def disable() -> None: + """ + Disable automatic garbage collection. Heap memory can still be allocated, + and garbage collection can still be initiated manually using :meth:`gc.collect`. + """ + ... diff --git a/.vscode/Pico-W-Stub/hashlib.pyi b/.vscode/Pico-W-Stub/hashlib.pyi new file mode 100644 index 0000000..11bff03 --- /dev/null +++ b/.vscode/Pico-W-Stub/hashlib.pyi @@ -0,0 +1,45 @@ +""" +Hashing algorithms. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/hashlib.html + +CPython module: :mod:`python:hashlib` https://docs.python.org/3/library/hashlib.html . + +This module implements binary data hashing algorithms. The exact inventory +of available algorithms depends on a board. Among the algorithms which may +be implemented: + +* SHA256 - The current generation, modern hashing algorithm (of SHA2 series). + It is suitable for cryptographically-secure purposes. Included in the + MicroPython core and any board is recommended to provide this, unless + it has particular code size constraints. + +* SHA1 - A previous generation algorithm. Not recommended for new usages, + but SHA1 is a part of number of Internet standards and existing + applications, so boards targeting network connectivity and + interoperability will try to provide this. + +* MD5 - A legacy algorithm, not considered cryptographically secure. Only + selected boards, targeting interoperability with legacy applications, + will offer this. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +class sha256: + """ + Create an SHA256 hasher object and optionally feed ``data`` into it. + """ + + def digest(self, *args, **kwargs) -> Incomplete: ... + def update(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, data: Optional[Any] = None) -> None: ... + +class sha1: + """ + Create an SHA1 hasher object and optionally feed ``data`` into it. + """ + + def digest(self, *args, **kwargs) -> Incomplete: ... + def update(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, data: Optional[Any] = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/heapq.pyi b/.vscode/Pico-W-Stub/heapq.pyi new file mode 100644 index 0000000..0ae5364 --- /dev/null +++ b/.vscode/Pico-W-Stub/heapq.pyi @@ -0,0 +1,35 @@ +""" +Heap queue algorithm. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/heapq.html + +CPython module: :mod:`python:heapq` https://docs.python.org/3/library/heapq.html . + +This module implements the +`min heap queue algorithm `_. + +A heap queue is essentially a list that has its elements stored in such a way +that the first item of the list is always the smallest. +""" +from _typeshed import Incomplete, Incomplete as Incomplete + +def heappop(heap) -> Incomplete: + """ + Pop the first item from the ``heap``, and return it. Raise ``IndexError`` if + ``heap`` is empty. + + The returned item will be the smallest item in the ``heap``. + """ + ... + +def heappush(heap, item) -> Incomplete: + """ + Push the ``item`` onto the ``heap``. + """ + ... + +def heapify(x) -> Incomplete: + """ + Convert the list ``x`` into a heap. This is an in-place operation. + """ + ... diff --git a/.vscode/Pico-W-Stub/io.pyi b/.vscode/Pico-W-Stub/io.pyi new file mode 100644 index 0000000..343844d --- /dev/null +++ b/.vscode/Pico-W-Stub/io.pyi @@ -0,0 +1,64 @@ +""" +Input/output streams. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/io.html + +CPython module: :mod:`python:io` https://docs.python.org/3/library/io.html . + +This module contains additional types of `stream` (file-like) objects +and helper functions. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.io import * +from typing import Any, IO, Optional + +def open(name, mode="r", **kwargs) -> Incomplete: + """ + Open a file. Builtin ``open()`` function is aliased to this function. + All ports (which provide access to file system) are required to support + *mode* parameter, but support for other arguments vary by port. + """ + ... + +class IOBase: + def __init__(self, *argv, **kwargs) -> None: ... + +class StringIO(IO): + def write(self, *args, **kwargs) -> Incomplete: ... + def flush(self, *args, **kwargs) -> Incomplete: ... + def getvalue(self, *args, **kwargs) -> Incomplete: ... + def seek(self, *args, **kwargs) -> Incomplete: ... + def tell(self, *args, **kwargs) -> Incomplete: ... + def readline(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, string: Optional[Any] = None) -> None: ... + +class BytesIO(IO): + """ + In-memory file-like objects for input/output. `StringIO` is used for + text-mode I/O (similar to a normal file opened with "t" modifier). + `BytesIO` is used for binary-mode I/O (similar to a normal file + opened with "b" modifier). Initial contents of file-like objects + can be specified with *string* parameter (should be normal string + for `StringIO` or bytes object for `BytesIO`). All the usual file + methods like ``read()``, ``write()``, ``seek()``, ``flush()``, + ``close()`` are available on these objects, and additionally, a + following method: + """ + + def write(self, *args, **kwargs) -> Incomplete: ... + def flush(self, *args, **kwargs) -> Incomplete: ... + def getvalue(self) -> Incomplete: + """ + Get the current contents of the underlying buffer which holds data. + """ + ... + def seek(self, *args, **kwargs) -> Incomplete: ... + def tell(self, *args, **kwargs) -> Incomplete: ... + def readline(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, string: Optional[Any] = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/json.pyi b/.vscode/Pico-W-Stub/json.pyi new file mode 100644 index 0000000..f77f758 --- /dev/null +++ b/.vscode/Pico-W-Stub/json.pyi @@ -0,0 +1,47 @@ +""" +JSON encoding and decoding. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/json.html + +CPython module: :mod:`python:json` https://docs.python.org/3/library/json.html . + +This modules allows to convert between Python objects and the JSON +data format. +""" +from _typeshed import Incomplete, Incomplete as Incomplete + +def loads(str) -> Incomplete: + """ + Parse the JSON *str* and return an object. Raises :exc:`ValueError` if the + string is not correctly formed. + """ + ... + +def load(stream) -> Incomplete: + """ + Parse the given *stream*, interpreting it as a JSON string and + deserialising the data to a Python object. The resulting object is + returned. + + Parsing continues until end-of-file is encountered. + A :exc:`ValueError` is raised if the data in *stream* is not correctly formed. + """ + ... + +def dumps(obj, separators=None) -> str: + """ + Return *obj* represented as a JSON string. + + The arguments have the same meaning as in `dump`. + """ + ... + +def dump(obj, stream, separators=None) -> Incomplete: + """ + Serialise *obj* to a JSON string, writing it to the given *stream*. + + If specified, separators should be an ``(item_separator, key_separator)`` + tuple. The default is ``(', ', ': ')``. To get the most compact JSON + representation, you should specify ``(',', ':')`` to eliminate whitespace. + """ + ... diff --git a/.vscode/Pico-W-Stub/lwip.pyi b/.vscode/Pico-W-Stub/lwip.pyi new file mode 100644 index 0000000..f6ba846 --- /dev/null +++ b/.vscode/Pico-W-Stub/lwip.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete as Incomplete + +SOCK_STREAM: int +SOCK_RAW: int +SOCK_DGRAM: int +SOL_SOCKET: int +SO_BROADCAST: int +SO_REUSEADDR: int +AF_INET6: int +AF_INET: int +IP_DROP_MEMBERSHIP: int +IPPROTO_IP: int +IP_ADD_MEMBERSHIP: int + +def reset(*args, **kwargs) -> Incomplete: ... +def print_pcbs(*args, **kwargs) -> Incomplete: ... +def getaddrinfo(*args, **kwargs) -> Incomplete: ... +def callback(*args, **kwargs) -> Incomplete: ... + +class socket: + def recvfrom(self, *args, **kwargs) -> Incomplete: ... + def recv(self, *args, **kwargs) -> Incomplete: ... + def makefile(self, *args, **kwargs) -> Incomplete: ... + def listen(self, *args, **kwargs) -> Incomplete: ... + def settimeout(self, *args, **kwargs) -> Incomplete: ... + def sendall(self, *args, **kwargs) -> Incomplete: ... + def setsockopt(self, *args, **kwargs) -> Incomplete: ... + def setblocking(self, *args, **kwargs) -> Incomplete: ... + def sendto(self, *args, **kwargs) -> Incomplete: ... + def readline(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + def connect(self, *args, **kwargs) -> Incomplete: ... + def send(self, *args, **kwargs) -> Incomplete: ... + def bind(self, *args, **kwargs) -> Incomplete: ... + def accept(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... diff --git a/.vscode/Pico-W-Stub/machine.pyi b/.vscode/Pico-W-Stub/machine.pyi new file mode 100644 index 0000000..66b5aa7 --- /dev/null +++ b/.vscode/Pico-W-Stub/machine.pyi @@ -0,0 +1,1193 @@ +""" +Functions related to the hardware. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/machine.html + +The ``machine`` module contains specific functions related to the hardware +on a particular board. Most functions in this module allow to achieve direct +and unrestricted access to and control of hardware blocks on a system +(like CPU, timers, buses, etc.). Used incorrectly, this can lead to +malfunction, lockups, crashes of your board, and in extreme cases, hardware +damage. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Callable, List, NoReturn, Optional, Tuple, Union + +WDT_RESET: int +PWRON_RESET: int + +def dht_readinto(*args, **kwargs) -> Incomplete: ... +def enable_irq(state) -> Incomplete: + """ + Re-enable interrupt requests. + The *state* parameter should be the value that was returned from the most + recent call to the `disable_irq()` function. + """ + ... + +def disable_irq() -> Incomplete: + """ + Disable interrupt requests. + Returns the previous IRQ state which should be considered an opaque value. + This return value should be passed to the `enable_irq()` function to restore + interrupts to their original state, before `disable_irq()` was called. + """ + ... + +def bitstream(pin, encoding, timing, data, /) -> Incomplete: + """ + Transmits *data* by bit-banging the specified *pin*. The *encoding* argument + specifies how the bits are encoded, and *timing* is an encoding-specific timing + specification. + + The supported encodings are: + + - ``0`` for "high low" pulse duration modulation. This will transmit 0 and + 1 bits as timed pulses, starting with the most significant bit. + The *timing* must be a four-tuple of nanoseconds in the format + ``(high_time_0, low_time_0, high_time_1, low_time_1)``. For example, + ``(400, 850, 800, 450)`` is the timing specification for WS2812 RGB LEDs + at 800kHz. + + The accuracy of the timing varies between ports. On Cortex M0 at 48MHz, it is + at best +/- 120ns, however on faster MCUs (ESP8266, ESP32, STM32, Pyboard), it + will be closer to +/-30ns. + + ``Note:`` For controlling WS2812 / NeoPixel strips, see the :mod:`neopixel` + module for a higher-level API. + """ + ... + +def deepsleep(time_ms: Optional[Any] = None) -> NoReturn: + """ + Stops execution in an attempt to enter a low power state. + + If *time_ms* is specified then this will be the maximum time in milliseconds that + the sleep will last for. Otherwise the sleep can last indefinitely. + + With or without a timeout, execution may resume at any time if there are events + that require processing. Such events, or wake sources, should be configured before + sleeping, like `Pin` change or `RTC` timeout. + + The precise behaviour and power-saving capabilities of lightsleep and deepsleep is + highly dependent on the underlying hardware, but the general properties are: + + * A lightsleep has full RAM and state retention. Upon wake execution is resumed + from the point where the sleep was requested, with all subsystems operational. + + * A deepsleep may not retain RAM or any other state of the system (for example + peripherals or network interfaces). Upon wake execution is resumed from the main + script, similar to a hard or power-on reset. The `reset_cause()` function will + return `machine.DEEPSLEEP` and this can be used to distinguish a deepsleep wake + from other resets. + """ + ... + +def bootloader(value: Optional[Any] = None) -> None: + """ + Reset the device and enter its bootloader. This is typically used to put the + device into a state where it can be programmed with new firmware. + + Some ports support passing in an optional *value* argument which can control + which bootloader to enter, what to pass to it, or other things. + """ + ... + +def soft_reset() -> NoReturn: + """ + Performs a soft reset of the interpreter, deleting all Python objects and + resetting the Python heap. It tries to retain the method by which the user + is connected to the MicroPython REPL (eg serial, USB, Wifi). + """ + ... + +def reset() -> NoReturn: + """ + Resets the device in a manner similar to pushing the external RESET + button. + """ + ... + +def freq(hz: Optional[Any] = None) -> Incomplete: + """ + Returns the CPU frequency in hertz. + + On some ports this can also be used to set the CPU frequency by passing in *hz*. + """ + ... + +def reset_cause() -> int: + """ + Get the reset cause. See :ref:`constants ` for the possible return values. + """ + ... + +def idle() -> Incomplete: + """ + Gates the clock to the CPU, useful to reduce power consumption at any time during + short or long periods. Peripherals continue working and execution resumes as soon + as any interrupt is triggered (on many ports this includes system timer + interrupt occurring at regular intervals on the order of millisecond). + """ + ... + +def time_pulse_us(pin, pulse_level, timeout_us=1000000, /) -> int: + """ + Time a pulse on the given *pin*, and return the duration of the pulse in + microseconds. The *pulse_level* argument should be 0 to time a low pulse + or 1 to time a high pulse. + + If the current input value of the pin is different to *pulse_level*, + the function first (*) waits until the pin input becomes equal to *pulse_level*, + then (**) times the duration that the pin is equal to *pulse_level*. + If the pin is already equal to *pulse_level* then timing starts straight away. + + The function will return -2 if there was timeout waiting for condition marked + (*) above, and -1 if there was timeout during the main measurement, marked (**) + above. The timeout is the same for both cases and given by *timeout_us* (which + is in microseconds). + """ + ... + +def lightsleep(time_ms: Optional[Any] = None) -> Incomplete: + """ + Stops execution in an attempt to enter a low power state. + + If *time_ms* is specified then this will be the maximum time in milliseconds that + the sleep will last for. Otherwise the sleep can last indefinitely. + + With or without a timeout, execution may resume at any time if there are events + that require processing. Such events, or wake sources, should be configured before + sleeping, like `Pin` change or `RTC` timeout. + + The precise behaviour and power-saving capabilities of lightsleep and deepsleep is + highly dependent on the underlying hardware, but the general properties are: + + * A lightsleep has full RAM and state retention. Upon wake execution is resumed + from the point where the sleep was requested, with all subsystems operational. + + * A deepsleep may not retain RAM or any other state of the system (for example + peripherals or network interfaces). Upon wake execution is resumed from the main + script, similar to a hard or power-on reset. The `reset_cause()` function will + return `machine.DEEPSLEEP` and this can be used to distinguish a deepsleep wake + from other resets. + """ + ... + +def unique_id() -> bytes: + """ + Returns a byte string with a unique identifier of a board/SoC. It will vary + from a board/SoC instance to another, if underlying hardware allows. Length + varies by hardware (so use substring of a full value if you expect a short + ID). In some MicroPython ports, ID corresponds to the network MAC address. + """ + ... + +class WDT: + """ + Create a WDT object and start it. The timeout must be given in milliseconds. + Once it is running the timeout cannot be changed and the WDT cannot be stopped either. + + Notes: On the esp8266 a timeout cannot be specified, it is determined by the underlying system. + On rp2040 devices, the maximum timeout is 8388 ms. + """ + + def feed(self) -> None: + """ + Feed the WDT to prevent it from resetting the system. The application + should place this call in a sensible place ensuring that the WDT is + only fed after verifying that everything is functioning correctly. + """ + ... + def __init__(self, id=0, timeout=5000) -> None: ... + +mem8: Incomplete +mem32: Incomplete +mem16: Incomplete + +class PWM: + """ + Construct and return a new PWM object using the following parameters: + + - *dest* is the entity on which the PWM is output, which is usually a + :ref:`machine.Pin ` object, but a port may allow other values, + like integers. + - *freq* should be an integer which sets the frequency in Hz for the + PWM cycle. + - *duty_u16* sets the duty cycle as a ratio ``duty_u16 / 65535``. + - *duty_ns* sets the pulse width in nanoseconds. + - *invert* inverts the respective output if the value is True + + Setting *freq* may affect other PWM objects if the objects share the same + underlying PWM generator (this is hardware specific). + Only one of *duty_u16* and *duty_ns* should be specified at a time. + *invert* is not available at all ports. + """ + + def duty_u16(self, value: Optional[Any] = None) -> int: + """ + Get or set the current duty cycle of the PWM output, as an unsigned 16-bit + value in the range 0 to 65535 inclusive. + + With no arguments the duty cycle is returned. + + With a single *value* argument the duty cycle is set to that value, measured + as the ratio ``value / 65535``. + """ + ... + def freq(self, value: Optional[Any] = None) -> Incomplete: + """ + Get or set the current frequency of the PWM output. + + With no arguments the frequency in Hz is returned. + + With a single *value* argument the frequency is set to that value in Hz. The + method may raise a ``ValueError`` if the frequency is outside the valid range. + """ + ... + def init(self, *, freq, duty_u16, duty_ns) -> None: + """ + Modify settings for the PWM object. See the above constructor for details + about the parameters. + """ + ... + def duty_ns(self, value: Optional[Any] = None) -> int: + """ + Get or set the current pulse width of the PWM output, as a value in nanoseconds. + + With no arguments the pulse width in nanoseconds is returned. + + With a single *value* argument the pulse width is set to that value. + """ + ... + def deinit(self) -> None: + """ + Disable the PWM output. + """ + ... + def __init__(self, dest, *, freq=0, duty=0, duty_u16=0, duty_ns=0, invert=False) -> None: ... + +class ADC: + """ + Access the ADC associated with a source identified by *id*. This + *id* may be an integer (usually specifying a channel number), a + :ref:`Pin ` object, or other value supported by the + underlying machine. + + If additional keyword-arguments are given then they will configure + various aspects of the ADC. If not given, these settings will take + previous or default values. The settings are: + + - *sample_ns* is the sampling time in nanoseconds. + + - *atten* specifies the input attenuation. + """ + + CORE_TEMP: int + def read_u16(self) -> int: + """ + Take an analog reading and return an integer in the range 0-65535. + The return value represents the raw reading taken by the ADC, scaled + such that the minimum value is 0 and the maximum value is 65535. + """ + ... + def __init__(self, id, *, sample_ns: Optional[int] = 0, atten: Optional[int] = ATTN_0DB) -> None: ... + +class I2C: + """ + Construct and return a new I2C object using the following parameters: + + - *id* identifies a particular I2C peripheral. Allowed values for + depend on the particular port/board + - *scl* should be a pin object specifying the pin to use for SCL. + - *sda* should be a pin object specifying the pin to use for SDA. + - *freq* should be an integer which sets the maximum frequency + for SCL. + - *timeout* is the maximum time in microseconds to allow for I2C + transactions. This parameter is not allowed on some ports. + + Note that some ports/boards will have default values of *scl* and *sda* + that can be changed in this constructor. Others will have fixed values + of *scl* and *sda* that cannot be changed. + """ + + def readfrom_mem_into(self, addr, memaddr, buf, *, addrsize=8) -> None: + """ + Read into *buf* from the peripheral specified by *addr* starting from the + memory address specified by *memaddr*. The number of bytes read is the + length of *buf*. + The argument *addrsize* specifies the address size in bits (on ESP8266 + this argument is not recognised and the address size is always 8 bits). + + The method returns ``None``. + """ + ... + def readfrom_into(self, addr, buf, stop=True, /) -> None: + """ + Read into *buf* from the peripheral specified by *addr*. + The number of bytes read will be the length of *buf*. + If *stop* is true then a STOP condition is generated at the end of the transfer. + + The method returns ``None``. + """ + ... + def readfrom_mem(self, addr, memaddr, nbytes, *, addrsize=8) -> bytes: + """ + Read *nbytes* from the peripheral specified by *addr* starting from the memory + address specified by *memaddr*. + The argument *addrsize* specifies the address size in bits. + Returns a `bytes` object with the data read. + """ + ... + def writeto_mem(self, addr, memaddr, buf, *, addrsize=8) -> None: + """ + Write *buf* to the peripheral specified by *addr* starting from the + memory address specified by *memaddr*. + The argument *addrsize* specifies the address size in bits (on ESP8266 + this argument is not recognised and the address size is always 8 bits). + + The method returns ``None``. + """ + ... + def scan(self) -> List: + """ + Scan all I2C addresses between 0x08 and 0x77 inclusive and return a list of + those that respond. A device responds if it pulls the SDA line low after + its address (including a write bit) is sent on the bus. + """ + ... + def writeto(self, addr, buf, stop=True, /) -> int: + """ + Write the bytes from *buf* to the peripheral specified by *addr*. If a + NACK is received following the write of a byte from *buf* then the + remaining bytes are not sent. If *stop* is true then a STOP condition is + generated at the end of the transfer, even if a NACK is received. + The function returns the number of ACKs that were received. + """ + ... + def writevto(self, addr, vector, stop=True, /) -> int: + """ + Write the bytes contained in *vector* to the peripheral specified by *addr*. + *vector* should be a tuple or list of objects with the buffer protocol. + The *addr* is sent once and then the bytes from each object in *vector* + are written out sequentially. The objects in *vector* may be zero bytes + in length in which case they don't contribute to the output. + + If a NACK is received following the write of a byte from one of the + objects in *vector* then the remaining bytes, and any remaining objects, + are not sent. If *stop* is true then a STOP condition is generated at + the end of the transfer, even if a NACK is received. The function + returns the number of ACKs that were received. + """ + ... + def start(self) -> None: + """ + Generate a START condition on the bus (SDA transitions to low while SCL is high). + """ + ... + def readfrom(self, addr, nbytes, stop=True, /) -> bytes: + """ + Read *nbytes* from the peripheral specified by *addr*. + If *stop* is true then a STOP condition is generated at the end of the transfer. + Returns a `bytes` object with the data read. + """ + ... + def readinto(self, buf, nack=True, /) -> Incomplete: + """ + Reads bytes from the bus and stores them into *buf*. The number of bytes + read is the length of *buf*. An ACK will be sent on the bus after + receiving all but the last byte. After the last byte is received, if *nack* + is true then a NACK will be sent, otherwise an ACK will be sent (and in this + case the peripheral assumes more bytes are going to be read in a later call). + """ + ... + def init(self, scl, sda, *, freq=400000) -> None: + """ + Initialise the I2C bus with the given arguments: + + - *scl* is a pin object for the SCL line + - *sda* is a pin object for the SDA line + - *freq* is the SCL clock rate + + In the case of hardware I2C the actual clock frequency may be lower than the + requested frequency. This is dependent on the platform hardware. The actual + rate may be determined by printing the I2C object. + """ + ... + def stop(self) -> None: + """ + Generate a STOP condition on the bus (SDA transitions to high while SCL is high). + """ + ... + def write(self, buf) -> int: + """ + Write the bytes from *buf* to the bus. Checks that an ACK is received + after each byte and stops transmitting the remaining bytes if a NACK is + received. The function returns the number of ACKs that were received. + """ + ... + def __init__( + self, + id: Union[int, str] = -1, + *, + scl: Optional[Union[Pin, str]] = None, + sda: Optional[Union[Pin, str]] = None, + freq=400_000, + timeout=50000, + ) -> None: ... + +class I2S: + """ + Construct an I2S object of the given id: + + - ``id`` identifies a particular I2S bus; it is board and port specific + + Keyword-only parameters that are supported on all ports: + + - ``sck`` is a pin object for the serial clock line + - ``ws`` is a pin object for the word select line + - ``sd`` is a pin object for the serial data line + - ``mck`` is a pin object for the master clock line; + master clock frequency is sampling rate * 256 + - ``mode`` specifies receive or transmit + - ``bits`` specifies sample size (bits), 16 or 32 + - ``format`` specifies channel format, STEREO or MONO + - ``rate`` specifies audio sampling rate (Hz); + this is the frequency of the ``ws`` signal + - ``ibuf`` specifies internal buffer length (bytes) + + For all ports, DMA runs continuously in the background and allows user applications to perform other operations while + sample data is transferred between the internal buffer and the I2S peripheral unit. + Increasing the size of the internal buffer has the potential to increase the time that user applications can perform non-I2S operations + before underflow (e.g. ``write`` method) or overflow (e.g. ``readinto`` method). + """ + + RX: int + MONO: int + STEREO: int + TX: int + @staticmethod + def shift(*, buf, bits, shift) -> Incomplete: + """ + bitwise shift of all samples contained in ``buf``. ``bits`` specifies sample size in bits. ``shift`` specifies the number of bits to shift each sample. + Positive for left shift, negative for right shift. + Typically used for volume control. Each bit shift changes sample volume by 6dB. + """ + ... + def init(self, sck, *args, **kwargs) -> Incomplete: + """ + see Constructor for argument descriptions + """ + ... + def irq(self, handler) -> Incomplete: + """ + Set a callback. ``handler`` is called when ``buf`` is emptied (``write`` method) or becomes full (``readinto`` method). + Setting a callback changes the ``write`` and ``readinto`` methods to non-blocking operation. + ``handler`` is called in the context of the MicroPython scheduler. + """ + ... + def readinto(self, buf) -> int: + """ + Read audio samples into the buffer specified by ``buf``. ``buf`` must support the buffer protocol, such as bytearray or array. + "buf" byte ordering is little-endian. For Stereo format, left channel sample precedes right channel sample. For Mono format, + the left channel sample data is used. + Returns number of bytes read + """ + ... + def deinit(self) -> Incomplete: + """ + Deinitialize the I2S bus + """ + ... + def write(self, buf) -> int: + """ + Write audio samples contained in ``buf``. ``buf`` must support the buffer protocol, such as bytearray or array. + "buf" byte ordering is little-endian. For Stereo format, left channel sample precedes right channel sample. For Mono format, + the sample data is written to both the right and left channels. + Returns number of bytes written + """ + ... + def __init__(self, id, *, sck, ws, sd, mck=None, mode, bits, format, rate, ibuf) -> None: ... + +class Pin: + """ + Access the pin peripheral (GPIO pin) associated with the given ``id``. If + additional arguments are given in the constructor then they are used to initialise + the pin. Any settings that are not specified will remain in their previous state. + + The arguments are: + + - ``id`` is mandatory and can be an arbitrary object. Among possible value + types are: int (an internal Pin identifier), str (a Pin name), and tuple + (pair of [port, pin]). + + - ``mode`` specifies the pin mode, which can be one of: + + - ``Pin.IN`` - Pin is configured for input. If viewed as an output the pin + is in high-impedance state. + + - ``Pin.OUT`` - Pin is configured for (normal) output. + + - ``Pin.OPEN_DRAIN`` - Pin is configured for open-drain output. Open-drain + output works in the following way: if the output value is set to 0 the pin + is active at a low level; if the output value is 1 the pin is in a high-impedance + state. Not all ports implement this mode, or some might only on certain pins. + + - ``Pin.ALT`` - Pin is configured to perform an alternative function, which is + port specific. For a pin configured in such a way any other Pin methods + (except :meth:`Pin.init`) are not applicable (calling them will lead to undefined, + or a hardware-specific, result). Not all ports implement this mode. + + - ``Pin.ALT_OPEN_DRAIN`` - The Same as ``Pin.ALT``, but the pin is configured as + open-drain. Not all ports implement this mode. + + - ``Pin.ANALOG`` - Pin is configured for analog input, see the :class:`ADC` class. + + - ``pull`` specifies if the pin has a (weak) pull resistor attached, and can be + one of: + + - ``None`` - No pull up or down resistor. + - ``Pin.PULL_UP`` - Pull up resistor enabled. + - ``Pin.PULL_DOWN`` - Pull down resistor enabled. + + - ``value`` is valid only for Pin.OUT and Pin.OPEN_DRAIN modes and specifies initial + output pin value if given, otherwise the state of the pin peripheral remains + unchanged. + + - ``drive`` specifies the output power of the pin and can be one of: ``Pin.DRIVE_0``, + ``Pin.DRIVE_1``, etc., increasing in drive strength. The actual current driving + capabilities are port dependent. Not all ports implement this argument. + + - ``alt`` specifies an alternate function for the pin and the values it can take are + port dependent. This argument is valid only for ``Pin.ALT`` and ``Pin.ALT_OPEN_DRAIN`` + modes. It may be used when a pin supports more than one alternate function. If only + one pin alternate function is supported the this argument is not required. Not all + ports implement this argument. + + As specified above, the Pin class allows to set an alternate function for a particular + pin, but it does not specify any further operations on such a pin. Pins configured in + alternate-function mode are usually not used as GPIO but are instead driven by other + hardware peripherals. The only operation supported on such a pin is re-initialising, + by calling the constructor or :meth:`Pin.init` method. If a pin that is configured in + alternate-function mode is re-initialised with ``Pin.IN``, ``Pin.OUT``, or + ``Pin.OPEN_DRAIN``, the alternate function will be removed from the pin. + """ + + ALT_SPI: int + IN: int + ALT_USB: int + ALT_UART: int + IRQ_FALLING: int + OUT: int + OPEN_DRAIN: int + IRQ_RISING: int + PULL_DOWN: int + ALT_SIO: int + ALT_GPCK: int + ALT: int + PULL_UP: int + ALT_I2C: int + ALT_PWM: int + ALT_PIO1: int + ALT_PIO0: int + def low(self) -> None: + """ + Set pin to "0" output level. + + Availability: nrf, rp2, stm32 ports. + """ + ... + def irq(self, handler=None, trigger=IRQ_FALLING, *, priority=1, wake=None, hard=False) -> Callable[..., Incomplete]: + """ + Configure an interrupt handler to be called when the trigger source of the + pin is active. If the pin mode is ``Pin.IN`` then the trigger source is + the external value on the pin. If the pin mode is ``Pin.OUT`` then the + trigger source is the output buffer of the pin. Otherwise, if the pin mode + is ``Pin.OPEN_DRAIN`` then the trigger source is the output buffer for + state '0' and the external pin value for state '1'. + + The arguments are: + + - ``handler`` is an optional function to be called when the interrupt + triggers. The handler must take exactly one argument which is the + ``Pin`` instance. + + - ``trigger`` configures the event which can generate an interrupt. + Possible values are: + + - ``Pin.IRQ_FALLING`` interrupt on falling edge. + - ``Pin.IRQ_RISING`` interrupt on rising edge. + - ``Pin.IRQ_LOW_LEVEL`` interrupt on low level. + - ``Pin.IRQ_HIGH_LEVEL`` interrupt on high level. + + These values can be OR'ed together to trigger on multiple events. + + - ``priority`` sets the priority level of the interrupt. The values it + can take are port-specific, but higher values always represent higher + priorities. + + - ``wake`` selects the power mode in which this interrupt can wake up the + system. It can be ``machine.IDLE``, ``machine.SLEEP`` or ``machine.DEEPSLEEP``. + These values can also be OR'ed together to make a pin generate interrupts in + more than one power mode. + + - ``hard`` if true a hardware interrupt is used. This reduces the delay + between the pin change and the handler being called. Hard interrupt + handlers may not allocate memory; see :ref:`isr_rules`. + Not all ports support this argument. + + This method returns a callback object. + + The following methods are not part of the core Pin API and only implemented on certain ports. + """ + ... + def toggle(self, *args, **kwargs) -> Incomplete: ... + def off(self) -> None: + """ + Set pin to "0" output level. + """ + ... + def on(self) -> None: + """ + Set pin to "1" output level. + """ + ... + def init(self, mode=-1, pull=-1, *, value=None, drive=0, alt=-1) -> None: + """ + Re-initialise the pin using the given parameters. Only those arguments that + are specified will be set. The rest of the pin peripheral state will remain + unchanged. See the constructor documentation for details of the arguments. + + Returns ``None``. + """ + ... + def value(self, x: Optional[Any] = None) -> int: + """ + This method allows to set and get the value of the pin, depending on whether + the argument ``x`` is supplied or not. + + If the argument is omitted then this method gets the digital logic level of + the pin, returning 0 or 1 corresponding to low and high voltage signals + respectively. The behaviour of this method depends on the mode of the pin: + + - ``Pin.IN`` - The method returns the actual input value currently present + on the pin. + - ``Pin.OUT`` - The behaviour and return value of the method is undefined. + - ``Pin.OPEN_DRAIN`` - If the pin is in state '0' then the behaviour and + return value of the method is undefined. Otherwise, if the pin is in + state '1', the method returns the actual input value currently present + on the pin. + + If the argument is supplied then this method sets the digital logic level of + the pin. The argument ``x`` can be anything that converts to a boolean. + If it converts to ``True``, the pin is set to state '1', otherwise it is set + to state '0'. The behaviour of this method depends on the mode of the pin: + + - ``Pin.IN`` - The value is stored in the output buffer for the pin. The + pin state does not change, it remains in the high-impedance state. The + stored value will become active on the pin as soon as it is changed to + ``Pin.OUT`` or ``Pin.OPEN_DRAIN`` mode. + - ``Pin.OUT`` - The output buffer is set to the given value immediately. + - ``Pin.OPEN_DRAIN`` - If the value is '0' the pin is set to a low voltage + state. Otherwise the pin is set to high-impedance state. + + When setting the value this method returns ``None``. + """ + ... + def high(self) -> None: + """ + Set pin to "1" output level. + + Availability: nrf, rp2, stm32 ports. + """ + ... + + class cpu: + GPIO20: Incomplete + GPIO25: Incomplete + GPIO26: Incomplete + GPIO27: Incomplete + GPIO24: Incomplete + GPIO21: Incomplete + GPIO22: Incomplete + GPIO23: Incomplete + GPIO28: Incomplete + GPIO6: Incomplete + GPIO7: Incomplete + GPIO8: Incomplete + GPIO5: Incomplete + GPIO29: Incomplete + GPIO3: Incomplete + GPIO4: Incomplete + GPIO9: Incomplete + GPIO2: Incomplete + GPIO1: Incomplete + GPIO10: Incomplete + GPIO11: Incomplete + GPIO0: Incomplete + EXT_GPIO0: Incomplete + EXT_GPIO1: Incomplete + EXT_GPIO2: Incomplete + GPIO12: Incomplete + GPIO17: Incomplete + GPIO18: Incomplete + GPIO19: Incomplete + GPIO16: Incomplete + GPIO13: Incomplete + GPIO14: Incomplete + GPIO15: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + + class board: + GP3: Incomplete + GP28: Incomplete + GP4: Incomplete + GP5: Incomplete + GP22: Incomplete + GP27: Incomplete + GP26: Incomplete + WL_GPIO2: Incomplete + WL_GPIO0: Incomplete + LED: Incomplete + WL_GPIO1: Incomplete + GP6: Incomplete + GP7: Incomplete + GP9: Incomplete + GP8: Incomplete + GP12: Incomplete + GP11: Incomplete + GP13: Incomplete + GP14: Incomplete + GP0: Incomplete + GP10: Incomplete + GP1: Incomplete + GP21: Incomplete + GP2: Incomplete + GP19: Incomplete + GP20: Incomplete + GP15: Incomplete + GP16: Incomplete + GP18: Incomplete + GP17: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + + def __init__(self, id, mode=-1, pull=-1, *, value=None, drive=0, alt=-1) -> None: ... + def __call__(self, x: Optional[Any] = None) -> Incomplete: + """ + Pin objects are callable. The call method provides a (fast) shortcut to set + and get the value of the pin. It is equivalent to Pin.value([x]). + See :meth:`Pin.value` for more details. + """ + ... + +class SoftSPI: + """ + Construct a new software SPI object. Additional parameters must be + given, usually at least *sck*, *mosi* and *miso*, and these are used + to initialise the bus. See `SPI.init` for a description of the parameters. + """ + + LSB: int + MSB: int + def deinit(self, *args, **kwargs) -> Incomplete: ... + def init(self, *args, **kwargs) -> Incomplete: ... + def write_readinto(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, baudrate=500000, *, polarity=0, phase=0, bits=8, firstbit=MSB, sck=None, mosi=None, miso=None) -> None: ... + +class Timer: + """ + Construct a new timer object of the given ``id``. ``id`` of -1 constructs a + virtual timer (if supported by a board). + ``id`` shall not be passed as a keyword argument. + + See ``init`` for parameters of initialisation. + """ + + PERIODIC: int + ONE_SHOT: int + def init(self, *, mode=PERIODIC, freq=-1, period=-1, callback=None) -> None: + """ + Initialise the timer. Example:: + + def mycallback(t): + pass + + # periodic at 1kHz + tim.init(mode=Timer.PERIODIC, freq=1000, callback=mycallback) + + # periodic with 100ms period + tim.init(period=100, callback=mycallback) + + # one shot firing after 1000ms + tim.init(mode=Timer.ONE_SHOT, period=1000, callback=mycallback) + + Keyword arguments: + + - ``mode`` can be one of: + + - ``Timer.ONE_SHOT`` - The timer runs once until the configured + period of the channel expires. + - ``Timer.PERIODIC`` - The timer runs periodically at the configured + frequency of the channel. + + - ``freq`` - The timer frequency, in units of Hz. The upper bound of + the frequency is dependent on the port. When both the ``freq`` and + ``period`` arguments are given, ``freq`` has a higher priority and + ``period`` is ignored. + + - ``period`` - The timer period, in milliseconds. + + - ``callback`` - The callable to call upon expiration of the timer period. + The callback must take one argument, which is passed the Timer object. + The ``callback`` argument shall be specified. Otherwise an exception + will occur upon timer expiration: + ``TypeError: 'NoneType' object isn't callable`` + """ + ... + def deinit(self) -> None: + """ + Deinitialises the timer. Stops the timer, and disables the timer peripheral. + """ + ... + def __init__(self, id=-1, *args, **kwargs) -> None: ... + +class UART: + """ + Construct a UART object of the given id. + """ + + INV_TX: int + RTS: int + CTS: int + INV_RX: int + def deinit(self) -> None: + """ + Turn off the UART bus. + + .. note:: + You will not be able to call ``init()`` on the object after ``deinit()``. + A new instance needs to be created in that case. + """ + ... + def sendbreak(self) -> None: + """ + Send a break condition on the bus. This drives the bus low for a duration + longer than required for a normal transmission of a character. + """ + ... + def init(self, baudrate=9600, bits=8, parity=None, stop=1, *args, **kwargs) -> None: + """ + Initialise the UART bus with the given parameters: + + - *baudrate* is the clock rate. + - *bits* is the number of bits per character, 7, 8 or 9. + - *parity* is the parity, ``None``, 0 (even) or 1 (odd). + - *stop* is the number of stop bits, 1 or 2. + + Additional keyword-only parameters that may be supported by a port are: + + - *tx* specifies the TX pin to use. + - *rx* specifies the RX pin to use. + - *rts* specifies the RTS (output) pin to use for hardware receive flow control. + - *cts* specifies the CTS (input) pin to use for hardware transmit flow control. + - *txbuf* specifies the length in characters of the TX buffer. + - *rxbuf* specifies the length in characters of the RX buffer. + - *timeout* specifies the time to wait for the first character (in ms). + - *timeout_char* specifies the time to wait between characters (in ms). + - *invert* specifies which lines to invert. + + - ``0`` will not invert lines (idle state of both lines is logic high). + - ``UART.INV_TX`` will invert TX line (idle state of TX line now logic low). + - ``UART.INV_RX`` will invert RX line (idle state of RX line now logic low). + - ``UART.INV_TX | UART.INV_RX`` will invert both lines (idle state at logic low). + + - *flow* specifies which hardware flow control signals to use. The value + is a bitmask. + + - ``0`` will ignore hardware flow control signals. + - ``UART.RTS`` will enable receive flow control by using the RTS output pin to + signal if the receive FIFO has sufficient space to accept more data. + - ``UART.CTS`` will enable transmit flow control by pausing transmission when the + CTS input pin signals that the receiver is running low on buffer space. + - ``UART.RTS | UART.CTS`` will enable both, for full hardware flow control. + + On the WiPy only the following keyword-only parameter is supported: + + - *pins* is a 4 or 2 item list indicating the TX, RX, RTS and CTS pins (in that order). + Any of the pins can be None if one wants the UART to operate with limited functionality. + If the RTS pin is given the the RX pin must be given as well. The same applies to CTS. + When no pins are given, then the default set of TX and RX pins is taken, and hardware + flow control will be disabled. If *pins* is ``None``, no pin assignment will be made. + + .. note:: + It is possible to call ``init()`` multiple times on the same object in + order to reconfigure UART on the fly. That allows using single UART + peripheral to serve different devices attached to different GPIO pins. + Only one device can be served at a time in that case. + Also do not call ``deinit()`` as it will prevent calling ``init()`` + again. + """ + ... + def flush(self) -> Incomplete: + """ + Waits until all data has been sent. In case of a timeout, an exception is raised. The timeout + duration depends on the tx buffer size and the baud rate. Unless flow control is enabled, a timeout + should not occur. + + .. note:: + + For the rp2, esp8266 and nrf ports the call returns while the last byte is sent. + If required, a one character wait time has to be added in the calling script. + + Availability: rp2, esp32, esp8266, mimxrt, cc3200, stm32, nrf ports, renesas-ra + """ + ... + def txdone(self) -> bool: + """ + Tells whether all data has been sent or no data transfer is happening. In this case, + it returns ``True``. If a data transmission is ongoing it returns ``False``. + + .. note:: + + For the rp2, esp8266 and nrf ports the call may return ``True`` even if the last byte + of a transfer is still being sent. If required, a one character wait time has to be + added in the calling script. + + Availability: rp2, esp32, esp8266, mimxrt, cc3200, stm32, nrf ports, renesas-ra + """ + ... + def read(self, nbytes: Optional[Any] = None) -> bytes: + """ + Read characters. If ``nbytes`` is specified then read at most that many bytes, + otherwise read as much data as possible. It may return sooner if a timeout + is reached. The timeout is configurable in the constructor. + + Return value: a bytes object containing the bytes read in. Returns ``None`` + on timeout. + """ + ... + def any(self) -> int: + """ + Returns an integer counting the number of characters that can be read without + blocking. It will return 0 if there are no characters available and a positive + number if there are characters. The method may return 1 even if there is more + than one character available for reading. + + For more sophisticated querying of available characters use select.poll:: + + poll = select.poll() + poll.register(uart, select.POLLIN) + poll.poll(timeout) + """ + ... + def write(self, buf) -> int: + """ + Write the buffer of bytes to the bus. + + Return value: number of bytes written or ``None`` on timeout. + """ + ... + def readinto(self, buf, nbytes: Optional[Any] = None) -> int: + """ + Read bytes into the ``buf``. If ``nbytes`` is specified then read at most + that many bytes. Otherwise, read at most ``len(buf)`` bytes. It may return sooner if a timeout + is reached. The timeout is configurable in the constructor. + + Return value: number of bytes read and stored into ``buf`` or ``None`` on + timeout. + """ + ... + def readline(self) -> None: + """ + Read a line, ending in a newline character. It may return sooner if a timeout + is reached. The timeout is configurable in the constructor. + + Return value: the line read or ``None`` on timeout. + """ + ... + def __init__(self, id, *args, **kwargs) -> None: ... + +class SoftI2C(I2C): + """ + Construct a new software I2C object. The parameters are: + + - *scl* should be a pin object specifying the pin to use for SCL. + - *sda* should be a pin object specifying the pin to use for SDA. + - *freq* should be an integer which sets the maximum frequency + for SCL. + - *timeout* is the maximum time in microseconds to wait for clock + stretching (SCL held low by another device on the bus), after + which an ``OSError(ETIMEDOUT)`` exception is raised. + """ + + def readfrom_mem_into(self, *args, **kwargs) -> Incomplete: ... + def readfrom_into(self, *args, **kwargs) -> Incomplete: ... + def readfrom_mem(self, *args, **kwargs) -> Incomplete: ... + def writeto_mem(self, *args, **kwargs) -> Incomplete: ... + def scan(self, *args, **kwargs) -> Incomplete: ... + def writeto(self, *args, **kwargs) -> Incomplete: ... + def writevto(self, *args, **kwargs) -> Incomplete: ... + def start(self, *args, **kwargs) -> Incomplete: ... + def readfrom(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def init(self, *args, **kwargs) -> Incomplete: ... + def stop(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, scl, sda, *, freq=400000, timeout=50000) -> None: ... + +class RTC: + """ + Create an RTC object. See init for parameters of initialization. + """ + + def datetime(self, datetimetuple: Optional[Any] = None) -> Tuple: + """ + Get or set the date and time of the RTC. + + With no arguments, this method returns an 8-tuple with the current + date and time. With 1 argument (being an 8-tuple) it sets the date + and time. + + The 8-tuple has the following format: + + (year, month, day, weekday, hours, minutes, seconds, subseconds) + + The meaning of the ``subseconds`` field is hardware dependent. + """ + ... + def __init__(self, id=0, *args, **kwargs) -> None: ... + +class SPI: + """ + Construct an SPI object on the given bus, *id*. Values of *id* depend + on a particular port and its hardware. Values 0, 1, etc. are commonly used + to select hardware SPI block #0, #1, etc. + + With no additional parameters, the SPI object is created but not + initialised (it has the settings from the last initialisation of + the bus, if any). If extra arguments are given, the bus is initialised. + See ``init`` for parameters of initialisation. + """ + + LSB: int + MSB: int + def deinit(self) -> None: + """ + Turn off the SPI bus. + """ + ... + def init( + self, baudrate=1000000, *, polarity=0, phase=0, bits=8, firstbit=MSB, sck=None, mosi=None, miso=None, pins: Optional[Tuple] + ) -> None: + """ + Initialise the SPI bus with the given parameters: + + - ``baudrate`` is the SCK clock rate. + - ``polarity`` can be 0 or 1, and is the level the idle clock line sits at. + - ``phase`` can be 0 or 1 to sample data on the first or second clock edge + respectively. + - ``bits`` is the width in bits of each transfer. Only 8 is guaranteed to be supported by all hardware. + - ``firstbit`` can be ``SPI.MSB`` or ``SPI.LSB``. + - ``sck``, ``mosi``, ``miso`` are pins (machine.Pin) objects to use for bus signals. For most + hardware SPI blocks (as selected by ``id`` parameter to the constructor), pins are fixed + and cannot be changed. In some cases, hardware blocks allow 2-3 alternative pin sets for + a hardware SPI block. Arbitrary pin assignments are possible only for a bitbanging SPI driver + (``id`` = -1). + - ``pins`` - WiPy port doesn't ``sck``, ``mosi``, ``miso`` arguments, and instead allows to + specify them as a tuple of ``pins`` parameter. + + In the case of hardware SPI the actual clock frequency may be lower than the + requested baudrate. This is dependent on the platform hardware. The actual + rate may be determined by printing the SPI object. + """ + ... + def write_readinto(self, write_buf, read_buf) -> int: + """ + Write the bytes from ``write_buf`` while reading into ``read_buf``. The + buffers can be the same or different, but both buffers must have the + same length. + Returns ``None``. + + Note: on WiPy this function returns the number of bytes written. + """ + ... + def read(self, nbytes, write=0x00) -> bytes: + """ + Read a number of bytes specified by ``nbytes`` while continuously writing + the single byte given by ``write``. + Returns a ``bytes`` object with the data that was read. + """ + ... + def write(self, buf) -> int: + """ + Write the bytes contained in ``buf``. + Returns ``None``. + + Note: on WiPy this function returns the number of bytes written. + """ + ... + def readinto(self, buf, write=0x00) -> int: + """ + Read into the buffer specified by ``buf`` while continuously writing the + single byte given by ``write``. + Returns ``None``. + + Note: on WiPy this function returns the number of bytes read. + """ + ... + def __init__(self, id, *args, **kwargs) -> None: ... + +class Signal(Pin): + """ + Signal(pin_arguments..., *, invert=False) + + Create a Signal object. There're two ways to create it: + + * By wrapping existing Pin object - universal method which works for + any board. + * By passing required Pin parameters directly to Signal constructor, + skipping the need to create intermediate Pin object. Available on + many, but not all boards. + + The arguments are: + + - ``pin_obj`` is existing Pin object. + + - ``pin_arguments`` are the same arguments as can be passed to Pin constructor. + + - ``invert`` - if True, the signal will be inverted (active low). + """ + + def off(self) -> None: + """ + Deactivate signal. + """ + ... + def on(self) -> None: + """ + Activate signal. + """ + ... + def value(self, x: Optional[Any] = None) -> int: + """ + This method allows to set and get the value of the signal, depending on whether + the argument ``x`` is supplied or not. + + If the argument is omitted then this method gets the signal level, 1 meaning + signal is asserted (active) and 0 - signal inactive. + + If the argument is supplied then this method sets the signal level. The + argument ``x`` can be anything that converts to a boolean. If it converts + to ``True``, the signal is active, otherwise it is inactive. + + Correspondence between signal being active and actual logic level on the + underlying pin depends on whether signal is inverted (active-low) or not. + For non-inverted signal, active status corresponds to logical 1, inactive - + to logical 0. For inverted/active-low signal, active status corresponds + to logical 0, while inactive - to logical 1. + """ + ... + def __init__(self, pin_obj, *args, invert=False) -> None: ... diff --git a/.vscode/Pico-W-Stub/math.pyi b/.vscode/Pico-W-Stub/math.pyi new file mode 100644 index 0000000..4a948e3 --- /dev/null +++ b/.vscode/Pico-W-Stub/math.pyi @@ -0,0 +1,257 @@ +""" +Mathematical functions. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/math.html + +CPython module: :mod:`python:math` https://docs.python.org/3/library/math.html . + +The ``math`` module provides some basic mathematical functions for +working with floating-point numbers. + +*Note:* On the pyboard, floating-point numbers have 32-bit precision. + +Availability: not available on WiPy. Floating point support required +for this module. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Tuple + +inf: float +nan: float +pi: float +e: float +tau: float + +def ldexp(x, exp) -> Incomplete: + """ + Return ``x * (2**exp)``. + """ + ... + +def lgamma(x) -> float: + """ + Return the natural logarithm of the gamma function of ``x``. + """ + ... + +def trunc(x) -> int: + """ + Return an integer, being ``x`` rounded towards 0. + """ + ... + +def isclose(*args, **kwargs) -> Incomplete: ... +def gamma(x) -> Incomplete: + """ + Return the gamma function of ``x``. + """ + ... + +def isnan(x) -> bool: + """ + Return ``True`` if ``x`` is not-a-number + """ + ... + +def isfinite(x) -> bool: + """ + Return ``True`` if ``x`` is finite. + """ + ... + +def isinf(x) -> bool: + """ + Return ``True`` if ``x`` is infinite. + """ + ... + +def sqrt(x) -> Incomplete: + """ + Return the square root of ``x``. + """ + ... + +def sinh(x) -> float: + """ + Return the hyperbolic sine of ``x``. + """ + ... + +def log(x) -> float: + """ + Return the natural logarithm of ``x``. + """ + ... + +def tan(x) -> float: + """ + Return the tangent of ``x``. + """ + ... + +def tanh(x) -> float: + """ + Return the hyperbolic tangent of ``x``. + """ + ... + +def log2(x) -> float: + """ + Return the base-2 logarithm of ``x``. + """ + ... + +def log10(x) -> float: + """ + Return the base-10 logarithm of ``x``. + """ + ... + +def sin(x) -> float: + """ + Return the sine of ``x``. + """ + ... + +def modf(x) -> Tuple: + """ + Return a tuple of two floats, being the fractional and integral parts of + ``x``. Both return values have the same sign as ``x``. + """ + ... + +def radians(x) -> Incomplete: + """ + Return degrees ``x`` converted to radians. + """ + ... + +def atanh(x) -> float: + """ + Return the inverse hyperbolic tangent of ``x``. + """ + ... + +def atan2(y, x) -> float: + """ + Return the principal value of the inverse tangent of ``y/x``. + """ + ... + +def atan(x) -> float: + """ + Return the inverse tangent of ``x``. + """ + ... + +def ceil(x) -> int: + """ + Return an integer, being ``x`` rounded towards positive infinity. + """ + ... + +def copysign(x, y) -> Incomplete: + """ + Return ``x`` with the sign of ``y``. + """ + ... + +def frexp(x) -> Incomplete: + """ + Decomposes a floating-point number into its mantissa and exponent. + The returned value is the tuple ``(m, e)`` such that ``x == m * 2**e`` + exactly. If ``x == 0`` then the function returns ``(0.0, 0)``, otherwise + the relation ``0.5 <= abs(m) < 1`` holds. + """ + ... + +def acos(x) -> float: + """ + Return the inverse cosine of ``x``. + """ + ... + +def pow(x, y) -> Incomplete: + """ + Returns ``x`` to the power of ``y``. + """ + ... + +def asinh(x) -> float: + """ + Return the inverse hyperbolic sine of ``x``. + """ + ... + +def acosh(x) -> float: + """ + Return the inverse hyperbolic cosine of ``x``. + """ + ... + +def asin(x) -> float: + """ + Return the inverse sine of ``x``. + """ + ... + +def factorial(*args, **kwargs) -> Incomplete: ... +def fabs(x) -> Incomplete: + """ + Return the absolute value of ``x``. + """ + ... + +def expm1(x) -> Incomplete: + """ + Return ``exp(x) - 1``. + """ + ... + +def floor(x) -> int: + """ + Return an integer, being ``x`` rounded towards negative infinity. + """ + ... + +def fmod(x, y) -> Incomplete: + """ + Return the remainder of ``x/y``. + """ + ... + +def cos(x) -> float: + """ + Return the cosine of ``x``. + """ + ... + +def degrees(x) -> Incomplete: + """ + Return radians ``x`` converted to degrees. + """ + ... + +def cosh(x) -> float: + """ + Return the hyperbolic cosine of ``x``. + """ + ... + +def exp(x) -> float: + """ + Return the exponential of ``x``. + """ + ... + +def erf(x) -> Incomplete: + """ + Return the error function of ``x``. + """ + ... + +def erfc(x) -> Incomplete: + """ + Return the complementary error function of ``x``. + """ + ... diff --git a/.vscode/Pico-W-Stub/micropython.pyi b/.vscode/Pico-W-Stub/micropython.pyi new file mode 100644 index 0000000..dfdc858 --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython.pyi @@ -0,0 +1,190 @@ +""" +Access and control MicroPython internals. + +MicroPython module: https://docs.micropython.org/en/latest/library/micropython.html +""" + +# source version: v1_20_0 +# origin module:: repos/micropython/docs/library/micropython.rst + +from typing import Any, Callable, Optional, Tuple, TypeVar, Union + +from _typeshed import Incomplete + +Const_T = TypeVar("Const_T", int, float, str, bytes, Tuple) # constant + +def const(expr: Const_T) -> Const_T: + """ + Used to declare that the expression is a constant so that the compiler can + optimise it. The use of this function should be as follows:: + + from micropython import const + + CONST_X = const(123) + CONST_Y = const(2 * CONST_X + 1) + + Constants declared this way are still accessible as global variables from + outside the module they are declared in. On the other hand, if a constant + begins with an underscore then it is hidden, it is not available as a global + variable, and does not take up any memory during execution. + + This `const` function is recognised directly by the MicroPython parser and is + provided as part of the :mod:`micropython` module mainly so that scripts can be + written which run under both CPython and MicroPython, by following the above + pattern. + """ + ... + +def opt_level(level: Optional[Any] = None) -> Incomplete: + """ + If *level* is given then this function sets the optimisation level for subsequent + compilation of scripts, and returns ``None``. Otherwise it returns the current + optimisation level. + + The optimisation level controls the following compilation features: + + - Assertions: at level 0 assertion statements are enabled and compiled into the + bytecode; at levels 1 and higher assertions are not compiled. + - Built-in ``__debug__`` variable: at level 0 this variable expands to ``True``; + at levels 1 and higher it expands to ``False``. + - Source-code line numbers: at levels 0, 1 and 2 source-code line number are + stored along with the bytecode so that exceptions can report the line number + they occurred at; at levels 3 and higher line numbers are not stored. + + The default optimisation level is usually level 0. + """ + ... + +def alloc_emergency_exception_buf(size) -> Incomplete: + """ + Allocate *size* bytes of RAM for the emergency exception buffer (a good + size is around 100 bytes). The buffer is used to create exceptions in cases + when normal RAM allocation would fail (eg within an interrupt handler) and + therefore give useful traceback information in these situations. + + A good way to use this function is to put it at the start of your main script + (eg ``boot.py`` or ``main.py``) and then the emergency exception buffer will be active + for all the code following it. + """ + ... + +def mem_info(verbose: Optional[Any] = None) -> None: + """ + Print information about currently used memory. If the *verbose* argument + is given then extra information is printed. + + The information that is printed is implementation dependent, but currently + includes the amount of stack and heap used. In verbose mode it prints out + the entire heap indicating which blocks are used and which are free. + """ + ... + +def qstr_info(verbose: Optional[Any] = None) -> None: + """ + Print information about currently interned strings. If the *verbose* + argument is given then extra information is printed. + + The information that is printed is implementation dependent, but currently + includes the number of interned strings and the amount of RAM they use. In + verbose mode it prints out the names of all RAM-interned strings. + """ + ... + +def stack_use() -> int: + """ + Return an integer representing the current amount of stack that is being + used. The absolute value of this is not particularly useful, rather it + should be used to compute differences in stack usage at different points. + """ + ... + +def heap_lock() -> int: ... +def heap_unlock() -> int: ... +def heap_locked() -> bool: + """ + Lock or unlock the heap. When locked no memory allocation can occur and a + `MemoryError` will be raised if any heap allocation is attempted. + `heap_locked()` returns a true value if the heap is currently locked. + + These functions can be nested, ie `heap_lock()` can be called multiple times + in a row and the lock-depth will increase, and then `heap_unlock()` must be + called the same number of times to make the heap available again. + + Both `heap_unlock()` and `heap_locked()` return the current lock depth + (after unlocking for the former) as a non-negative integer, with 0 meaning + the heap is not locked. + + If the REPL becomes active with the heap locked then it will be forcefully + unlocked. + + Note: `heap_locked()` is not enabled on most ports by default, + requires ``MICROPY_PY_MICROPYTHON_HEAP_LOCKED``. + """ + ... + +def kbd_intr(chr) -> None: + """ + Set the character that will raise a `KeyboardInterrupt` exception. By + default this is set to 3 during script execution, corresponding to Ctrl-C. + Passing -1 to this function will disable capture of Ctrl-C, and passing 3 + will restore it. + + This function can be used to prevent the capturing of Ctrl-C on the + incoming stream of characters that is usually used for the REPL, in case + that stream is used for other purposes. + """ + ... + +def schedule(func, arg) -> Incomplete: + """ + Schedule the function *func* to be executed "very soon". The function + is passed the value *arg* as its single argument. "Very soon" means that + the MicroPython runtime will do its best to execute the function at the + earliest possible time, given that it is also trying to be efficient, and + that the following conditions hold: + + - A scheduled function will never preempt another scheduled function. + - Scheduled functions are always executed "between opcodes" which means + that all fundamental Python operations (such as appending to a list) + are guaranteed to be atomic. + - A given port may define "critical regions" within which scheduled + functions will never be executed. Functions may be scheduled within + a critical region but they will not be executed until that region + is exited. An example of a critical region is a preempting interrupt + handler (an IRQ). + + A use for this function is to schedule a callback from a preempting IRQ. + Such an IRQ puts restrictions on the code that runs in the IRQ (for example + the heap may be locked) and scheduling a function to call later will lift + those restrictions. + + Note: If `schedule()` is called from a preempting IRQ, when memory + allocation is not allowed and the callback to be passed to `schedule()` is + a bound method, passing this directly will fail. This is because creating a + reference to a bound method causes memory allocation. A solution is to + create a reference to the method in the class constructor and to pass that + reference to `schedule()`. This is discussed in detail here + :ref:`reference documentation ` under "Creation of Python + objects". + + There is a finite queue to hold the scheduled functions and `schedule()` + will raise a `RuntimeError` if the queue is full. + """ + ... + +def viper(func: Callable) -> Callable: + """ + The Viper code emitter is not fully compliant. It supports special Viper native data types in pursuit of performance. + Integer processing is non-compliant because it uses machine words: arithmetic on 32 bit hardware is performed modulo 2**32. + Like the Native emitter Viper produces machine instructions but further optimisations are performed, substantially increasing + performance especially for integer arithmetic and bit manipulations. + """ + ... + +def native(func: Callable) -> Callable: + """ + This causes the MicroPython compiler to emit native CPU opcodes rather than bytecode. + It covers the bulk of the MicroPython functionality, so most functions will require no adaptation. + See: https://docs.micropython.org/en/latest/reference/speed_python.html?highlight=viper#the-native-code-emitter + """ + ... diff --git a/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/INSTALLER b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/LICENSE.md b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/LICENSE.md new file mode 100644 index 0000000..15d4b46 --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/LICENSE.md @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2022 Jos Verlinde + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/METADATA b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/METADATA new file mode 100644 index 0000000..d9fe4d4 --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/METADATA @@ -0,0 +1,67 @@ +Metadata-Version: 2.1 +Name: micropython-rp2-rpi_pico_w-stubs +Version: 1.21.0.post1 +Summary: MicroPython stubs +Home-page: https://github.com/josverl/micropython-stubs#micropython-stubs +License: MIT +Author: josverl +Author-email: josverl@users.noreply.github.com +Requires-Python: >=3.8,<4.0 +Classifier: Development Status :: 4 - Beta +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: MicroPython +Classifier: Topic :: Software Development :: Build Tools +Classifier: Topic :: Text Editors :: Integrated Development Environments (IDE) +Classifier: Typing :: Typed +Requires-Dist: micropython-stdlib-stubs (>=0.9.0) +Project-URL: Documentation, https://micropython-stubs.readthedocs.io/ +Project-URL: Repository, https://github.com/josverl/micropython-stubs +Description-Content-Type: text/markdown + +# micropython-rp2-rpi_pico_w-stubs + + +This is a stub-only package for MicroPython. +It is intended to be installed in a projects virtual environment to allow static type checkers and intellisense features to be used while writing Micropython code. + +The version of this package is alligned the the version of the MicroPython firmware. + - Major, Minor and Patch levels are alligned to the same version as the firmware. + - The post release level is used to publish new releases of the stubs. + +For `Micropython 1.17` the stubs are published as `1.17.post1` ... `1.17.post2` +for `Micropython 1.18` the stubs are published as `1.18.post1` ... `1.18.post2` + +To install the latest stubs: +`pip install -I micropython--stubs` where port is the port of the MicroPython firmware. + +To install the stubs for an older version, such as MicroPython 1.17: +`pip install micropython-stm32-stubs==1.17.*` which will install the last post release of the stubs for MicroPython 1.17. + + +As the creation of the stubs, and merging of the different types is still going though improvements, the stub packages are marked as Beta. +To upgrade stubs to the latest stubs for a specific version use `pip install micropython-stm32-stubs==1.17.* --upgrade` + +If you have suggestions or find any issues with the stubs, please report them in the [MicroPython-stubs Discussions](https://github.com/Josverl/micropython-stubs/discussions) + +For an overview of Micropython Stubs please see: https://micropython-stubs.readthedocs.io/en/main/ + * List of all stubs : https://micropython-stubs.readthedocs.io/en/main/firmware_grp.html + +Included stubs: +* Merged stubs from `stubs/micropython-v1_21_0-rp2-rpi_pico_w-merged` +* Frozen stubs from `stubs/micropython-v1_21_0-frozen/rp2/RPI_PICO_W` +* Core stubs from `stubs/micropython-core` + + +origin | Family | Port | Board | Version +-------|--------|------|-------|-------- +Documentation | micropython | - | - | v1.21.0 +Core | micropython | rp2 | - | v1.21.0 + diff --git a/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/RECORD b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/RECORD new file mode 100644 index 0000000..9bcb19a --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/RECORD @@ -0,0 +1,99 @@ +__builtins__.pyi,sha256=SRa_5xpV_qKoNdAgXY1G1TGP59HAYvZxp-ew4M2s3dY,1068 +_asyncio.pyi,sha256=oJgEZ1Axm6cN0eO1Ke7oj5YckMoicmwKfUO9vL_Y810,416 +_boot.pyi,sha256=sE1k2jzwUqn1o5YjnKlavj6468D8XQNhJNJ69gVrdtg,71 +_boot_fat.pyi,sha256=sE1k2jzwUqn1o5YjnKlavj6468D8XQNhJNJ69gVrdtg,71 +_onewire.pyi,sha256=_BXH4KbavKcoxnFA9OF24eERDydSk7EfXlx6N2mlBw8,343 +_rp2.pyi,sha256=MCGcgPRjp_mQlh3SN7TWalfzSXP6GAC2ojkCFPpSQj4,1556 +_thread.pyi,sha256=8qtf48y90MbHs4wEY_eSDeZ7QP__qf5DnYSJa7r18Ps,953 +aioble/__init__.pyi,sha256=Wjhrff1BWzqTqQh5pMss5q9gT51FhkzMF3tN_QV1cGc,600 +aioble/central.pyi,sha256=hZmbJnu3ccyO9Z9zjd29KiW6CGTvBmsNJx2Nv1nzLzo,2448 +aioble/client.pyi,sha256=KkXt3661i_eKYozYTW1BaZkZjbpOhrhfK1SBxKpC0eo,4180 +aioble/core.pyi,sha256=juai87amOQhoM_Vum3OTUcbkNiQVhXT3mYNyGzLLhe0,520 +aioble/device.pyi,sha256=7lzYKge6yfFNWwmwyViUQgEClY6lVXWKPneR4oCtifc,2311 +aioble/l2cap.pyi,sha256=k4NiXgzbvI25TqyfPbuWfu_v0KmF2dVXGtt3FuaicAs,1509 +aioble/peripheral.pyi,sha256=Rz6k4Jpk-_h6r_BAXp6-rwfnPMRcNJ8KT1uhiujugwM,1425 +aioble/security.pyi,sha256=-POdQrFOH67f9vtr2vbrf5U4TdZzipfx_qzRWDo6wEM,1071 +aioble/server.pyi,sha256=Wd4ESEM63-A-7q3sPS3ed6Pl19j4DVh112C2WqUCaxM,3364 +array.pyi,sha256=ZPtcObYk-XaI4AknOYrfMOJPXOS2ho0p35xdCgYcuVQ,1090 +asyncio/__init__.pyi,sha256=fa4aomSb_KMbJVCimrP6IfegajK_KSN8muiH_lbqc7k,132 +asyncio/core.pyi,sha256=xzNDXF3b6zq-OGz22ZPoPIJ_m5TVxgSg0YfUmG_CDzY,1530 +asyncio/event.pyi,sha256=fFBZxUa_PzdiiE8I14F9IZeqg9lJgIAH8s--6SBd-9E,623 +asyncio/funcs.pyi,sha256=3uEqPbVQPEqsaids5pFDkvmYUpufmkw4XuoyjwrouvI,390 +asyncio/lock.pyi,sha256=QF9HAL_ayvACAPYG9Rd2kB0-WeUFYPZazG-MFFVSvtE,414 +asyncio/stream.pyi,sha256=Uih1xMqHeZY4RwBQ4g-ut3_NauUjF10yxjGvh4Z3zeQ,1427 +binascii.pyi,sha256=kOai4wTFKZ1BQkyHe5WO2fkRDGST6eEetLS3KdtQ388,1488 +bluetooth.pyi,sha256=eKIXx2TJHUUHtKsSL4NawmGdnp02pCLbURKMQzSPvv0,30033 +cmath.pyi,sha256=Hvtu5G3iSwPeuJHZLWliHC3_g07MtMBEVErlOCRXqO0,1529 +collections.pyi,sha256=veeBTws6XDnpGS2N6sh0lKECaVflUC61SkJXbeakdeY,3966 +cryptolib.pyi,sha256=pV8vbhqweB83T5J70RwDAFpn9rNJSkHH5YZbS23EJ98,1739 +deflate.pyi,sha256=xraLamaDJ2rDZtVYjXLKlIM-F-E1xf8x4oA3qEgqeDM,3311 +dht.pyi,sha256=zamvhZo46pXwloiPKKlldLlMYmb7waNyZE1aeUnz-vA,344 +ds18x20.pyi,sha256=-BUsQj1Y155ooBHsobP6SzzNqWrw3SO8wnH3EpI9Z0A,404 +errno.pyi,sha256=fT9TQhrfWRbRou7wVcyJQWPysPh372koMw_GNZCC7SU,777 +framebuf.pyi,sha256=AlWAUXju3GDe8cD5SUFMb3iz90uLkbFet0BS64eDKUg,6474 +gc.pyi,sha256=rh6cpwqew57KbQm6aD92AY0cb0mSPM8KFrqxjQpLX6I,2524 +hashlib.pyi,sha256=9b65Uc6P92RHUGIgf00qCHOb2KSytlkRUnSNEj8q4r8,1743 +heapq.pyi,sha256=JE1S9UQ38DvuchjejRgb65S_tAYvK8CYYpQT4vEl4JA,1000 +io.pyi,sha256=ltx99WnM7_72vpSIRbTiiSh9Z5D90SbCeKaNrGISsOs,2702 +json.pyi,sha256=mP4C0XMgUt1ZcT7AQSteCY-n0rd0bREU_dnSQnjO5d0,1454 +lwip.pyi,sha256=0vIYgPwL5EnAbcoS1U2p_8e1SgPWiIYNevZDxMrO0kk,1582 +machine.pyi,sha256=HVtm4Fkv-HfqGxcE0koQVerU7acXqGkYBf8SKCKMPoM,50379 +math.pyi,sha256=KgjOuv3rpLNWcKWe7fVkB0PQ0Spv2VsBeE-pPhWNCWs,4827 +micropython.pyi,sha256=a72FE6dfhvby6MIVPeTOPcgh46wsJUyRJr4W4TJsGKA,8423 +micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/LICENSE.md,sha256=EerY3Evf4ZqwZBZMGIokOHR70txtsAlS_GGHuxmxUJY,1092 +micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/METADATA,sha256=i_ZZ0I4ExOQn4KCrRnXpajKX5Rdco9aYtYIPJdqAcds,3225 +micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/RECORD,, +micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88 +mip/__init__.pyi,sha256=Zd0De2fcvpCue3E5iPFUmmXdpf_IeciR9Pty29Id_fk,598 +neopixel.pyi,sha256=jOhE-kR8bRy0dEwqFLLY3Z7v7dp0n2XFwXTCwty0TuY,448 +network.pyi,sha256=OQjwBgmxsx5ICq3GCOL3vzqIfUBb4X_gUTEPJ9PWIQQ,7633 +ntptime.pyi,sha256=CmHJaGQFWjMig6JCTnWF0tY0KIw6jFWN362TnJWaWZQ,72 +onewire.pyi,sha256=DgLSo0kBX1ox2Qm0x76XY00m2x17GM0uh8J4GWU1Tgs,617 +os.pyi,sha256=BNYJHi5PymNeepTYUD27GoUDmXWrAreN3bIiU8-sB64,9677 +platform.pyi,sha256=3xuJleRh1wBIkS51WeiUkfQtwgYK1CLjDqXteBQIWaY,1463 +random.pyi,sha256=GDIgpkmplAsckapDEHlkkRdsRq8fCS_hBBRCS5gFmZI,2687 +requests.pyi,sha256=Mk3u-Y3RUz3YXbe8bkC5gzNE12dF8Z7SGEp0-lhckLQ,609 +requests/__init__.pyi,sha256=yNYrzq9TMRID3tAXLhHRRdzaiku2o6OfjDXjaF2bhYA,739 +rp2.pyi,sha256=ghYwOcTINmxwfGdaL3tE1z5soZvskgoBbjvSLiuOb2E,1918 +select.pyi,sha256=LKJ75d0F4BJg_7VAH3L07H1qqG3IkLuVEc82j-xMFWM,4114 +socket.pyi,sha256=YUTOaiosablCnk-corhGdTRNpQScrllJxdw9pZyWrYo,11622 +ssl.pyi,sha256=u94PkXN_NoaRnj2bBdMFq2x7JUHVmjeJcA8tvL1wMyI,3758 +struct.pyi,sha256=4Mf6SQIchLMnVNPnFG-iNgeIqqpaAYx89xvz1FAKJQA,4316 +sys.pyi,sha256=aL8EhWS78hy24Ir-y4QWB6QYdj5hYVooEiNQZ-MxMK0,1442 +time.pyi,sha256=Df5LIT2n7WwXRXjLKBlqQ7g0ZHsQe1mnkTdyKyAtYno,13313 +uarray.pyi,sha256=ZPtcObYk-XaI4AknOYrfMOJPXOS2ho0p35xdCgYcuVQ,1090 +uasyncio.pyi,sha256=eu4a7KxASOh_jGsRn7zmGtUt2rJYtOpjVuGYB4ty4fc,28 +uasyncio/__init__.pyi,sha256=bmpai6ZIJXDZ00aZz11BZZ92VGbfdQwqUL1jmUz3ZHU,2015 +uasyncio/core.pyi,sha256=6mmEJjdYJhiYRHY4c4Hs47AYuY2zEq_ZJXitr4XSlR8,1029 +uasyncio/event.pyi,sha256=OGzLIKk8AkbwIMK5l5CPy4QZTNc0uFQJzUftEgeUOzQ,580 +uasyncio/funcs.pyi,sha256=rHBK8jMGD6X6rvpy7jYMCukHJ1SxFg7GfAxlXug_XX0,147 +uasyncio/lock.pyi,sha256=LQ6j1whw6Oe2cRGT_gsQXkd16UrnsB5p33q8X73l220,259 +uasyncio/stream.pyi,sha256=89XP2eqkvEmo3VZTNC1V0IAne8b_xEkPJQfkiN0i6Mg,1746 +ubinascii.pyi,sha256=kOai4wTFKZ1BQkyHe5WO2fkRDGST6eEetLS3KdtQ388,1488 +ubluetooth.pyi,sha256=eKIXx2TJHUUHtKsSL4NawmGdnp02pCLbURKMQzSPvv0,30033 +ucollections.pyi,sha256=veeBTws6XDnpGS2N6sh0lKECaVflUC61SkJXbeakdeY,3966 +ucryptolib.pyi,sha256=pV8vbhqweB83T5J70RwDAFpn9rNJSkHH5YZbS23EJ98,1739 +uctypes.pyi,sha256=tUGuvBHmFbcLBOZCHZ--OKBe6dZdee-8B8ALYbTuV2Q,2417 +uerrno.pyi,sha256=fT9TQhrfWRbRou7wVcyJQWPysPh372koMw_GNZCC7SU,777 +uhashlib.pyi,sha256=9b65Uc6P92RHUGIgf00qCHOb2KSytlkRUnSNEj8q4r8,1743 +uheapq.pyi,sha256=JE1S9UQ38DvuchjejRgb65S_tAYvK8CYYpQT4vEl4JA,1000 +uio.pyi,sha256=ltx99WnM7_72vpSIRbTiiSh9Z5D90SbCeKaNrGISsOs,2702 +ujson.pyi,sha256=mP4C0XMgUt1ZcT7AQSteCY-n0rd0bREU_dnSQnjO5d0,1454 +umachine.pyi,sha256=HVtm4Fkv-HfqGxcE0koQVerU7acXqGkYBf8SKCKMPoM,50379 +uos.pyi,sha256=BNYJHi5PymNeepTYUD27GoUDmXWrAreN3bIiU8-sB64,9677 +uplatform.pyi,sha256=3xuJleRh1wBIkS51WeiUkfQtwgYK1CLjDqXteBQIWaY,1463 +urandom.pyi,sha256=GDIgpkmplAsckapDEHlkkRdsRq8fCS_hBBRCS5gFmZI,2687 +ure.pyi,sha256=bLeXSxERwfWOsjH_TCaRE4bcguKddfOgSAf2Bw9Fu7o,239 +urequests.pyi,sha256=eu4a7KxASOh_jGsRn7zmGtUt2rJYtOpjVuGYB4ty4fc,28 +uselect.pyi,sha256=LKJ75d0F4BJg_7VAH3L07H1qqG3IkLuVEc82j-xMFWM,4114 +usocket.pyi,sha256=YUTOaiosablCnk-corhGdTRNpQScrllJxdw9pZyWrYo,11622 +ussl.pyi,sha256=u94PkXN_NoaRnj2bBdMFq2x7JUHVmjeJcA8tvL1wMyI,3758 +ustruct.pyi,sha256=4Mf6SQIchLMnVNPnFG-iNgeIqqpaAYx89xvz1FAKJQA,4316 +usys.pyi,sha256=aL8EhWS78hy24Ir-y4QWB6QYdj5hYVooEiNQZ-MxMK0,1442 +utime.pyi,sha256=Df5LIT2n7WwXRXjLKBlqQ7g0ZHsQe1mnkTdyKyAtYno,13313 +uwebsocket.pyi,sha256=1wiEl4cRkoZE4jwWC-38w-Aowz3W5JZuv4KXHDYhsr8,469 +webrepl.pyi,sha256=Du-Qx0WvAvNFp5E6NG7a2lJv7m5z7KEWpUNRTCZmVO4,513 +webrepl_setup.pyi,sha256=3AjgA3EbRBgj6rUkxc_isYHihM-pCGnVANegahw1jfE,232 +websocket.pyi,sha256=1wiEl4cRkoZE4jwWC-38w-Aowz3W5JZuv4KXHDYhsr8,469 diff --git a/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/REQUESTED b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/REQUESTED new file mode 100644 index 0000000..e69de29 diff --git a/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/WHEEL b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/WHEEL new file mode 100644 index 0000000..3695fd1 --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.7.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/INSTALLER b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/LICENSE.md b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/LICENSE.md new file mode 100644 index 0000000..8b19eb0 --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/LICENSE.md @@ -0,0 +1,239 @@ +MIT License + +Copyright (c) 2023 Jos Verlinde + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- +Parts of this package are licenced are licensed under different licenses , reproduced below. +-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + +The "typeshed" project is licensed under the terms of the Apache license, as +reproduced below. + += = = = = + +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + += = = = = + +Parts of typeshed are licensed under different licenses (like the MIT +license), reproduced below. + += = = = = + +The MIT License + +Copyright (c) 2015 Jukka Lehtosalo and contributors + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. + += = = = = diff --git a/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/METADATA b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/METADATA new file mode 100644 index 0000000..d460fce --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/METADATA @@ -0,0 +1,39 @@ +Metadata-Version: 2.1 +Name: micropython-stdlib-stubs +Version: 1.0.0 +Summary: Micropython stdlib is a reduced and augmented copy of typeshed's stdlib for use by MicroPython stub packages +Home-page: https://github.com/josverl/micropython-stubs#micropython-stubs +License: MIT +Author: josverl +Author-email: josverl@users.noreply.github.com +Requires-Python: >=3.8,<4.0 +Classifier: Development Status :: 4 - Beta +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: Implementation :: MicroPython +Classifier: Topic :: Software Development :: Build Tools +Classifier: Topic :: Text Editors :: Integrated Development Environments (IDE) +Classifier: Typing :: Typed +Project-URL: Documentation, https://micropython-stubs.readthedocs.io/ +Project-URL: Repository, https://github.com/josverl/micropython-stubs +Description-Content-Type: text/markdown + + +A limited size copy of typesheds stdlib directory. +https://github.com/python/typeshed/tree/main/stdlib + +This is used as a dependency in the micropython-*-stub packages to allow overriding of some of the stdlib modules with micropython specific implementations. + +If you have suggestions or find any issues with the stubs, please report them in the [MicroPython-stubs Discussions](https://github.com/Josverl/micropython-stubs/discussions) + +For an overview of Micropython Stubs please see: https://micropython-stubs.readthedocs.io/en/main/ + * List of all stubs : https://micropython-stubs.readthedocs.io/en/main/firmware_grp.html + + diff --git a/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/RECORD b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/RECORD new file mode 100644 index 0000000..cf80c43 --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/RECORD @@ -0,0 +1,68 @@ +micropython_stdlib_stubs-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +micropython_stdlib_stubs-1.0.0.dist-info/LICENSE.md,sha256=XnIlPftszZeoPSWf1jwR9a1w2zp3zOL_-oC0qRi-gbE,13067 +micropython_stdlib_stubs-1.0.0.dist-info/METADATA,sha256=KNci2h2_liQGHy_8lwV89mZaWIbUFW0pxIcrvP8mJ1Y,1908 +micropython_stdlib_stubs-1.0.0.dist-info/RECORD,, +micropython_stdlib_stubs-1.0.0.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88 +stdlib/__future__.pyi,sha256=s-Y8IJP5L0EswG_QSR_xENtt9dEC0ItL_aslaA1UmME,951 +stdlib/_ast.pyi,sha256=ba_Kp76gZrGxVzrJDXs48Mqx6zYCJ-TFw23Ef0hwDSs,15249 +stdlib/_codecs.pyi,sha256=wSOMpfy-2VFBVy9uqzVqMvLP1rfFv94z4GOCtONwdsc,7283 +stdlib/_collections_abc.pyi,sha256=_KGWOxzKJOU2gQu4vQ66zyoknNHWOHg5VElIrB-GFpY,2204 +stdlib/_decimal.pyi,sha256=_kl9Zl3aws7sXZH8n5Nw885SYCrYhNNThB9IJ939lGs,14057 +stdlib/_typeshed/__init__.pyi,sha256=dxOvbssi4kLrgRJ-tlhZki85eDQ4vVJ5z3DZ7-YH3KM,10964 +stdlib/_typeshed/dbapi.pyi,sha256=BJz51q_GDfs3yYJa18jemTilsBKs-m7jV-5ZfM_7owA,1675 +stdlib/_typeshed/wsgi.pyi,sha256=iExnjwtr89qEPnoCVG48EtM8OeCJ70WGXyAUJwmu0O4,1677 +stdlib/_typeshed/xml.pyi,sha256=Ffv4MfMivR0nrzVZxli93eddTSRcZR8itwNKdc0ZZnQ,499 +stdlib/abc.pyi,sha256=NCUDS1byk9ykHiqN1WRB0Ofyewm25MFOj12k-uiGugo,1997 +stdlib/asyncio/__init__.pyi,sha256=Rha03nC28gVISpLc2COjbPX--6UF7Sq4qIa-G6pkjRw,1266 +stdlib/asyncio/base_events.pyi,sha256=62qVPV4W7902jhyalPHXk1Zd2WTHVKgoHpN4r3sCW9A,19890 +stdlib/asyncio/base_futures.pyi,sha256=cHkQsNjGfzBjHZgbyPnAvlsvFYCxOCocJ_mHQVx0HZ4,768 +stdlib/asyncio/base_tasks.pyi,sha256=PYv3qwMz2WIqDs3GGdLTaJfiJBTuUwIK0PUEKHIl9Uc,413 +stdlib/asyncio/constants.pyi,sha256=6QF1yNVLZmHDjtnuXStdw-8SWnxzQX8SXn5SnAqKvDs,599 +stdlib/asyncio/coroutines.pyi,sha256=hRcymXIP0CVXqBpRoDzGtFdLHuTQXbO2y0Kap0d3ls0,1120 +stdlib/asyncio/events.pyi,sha256=vPoqrxAVQwVfzv18BMjD71LpX2riA7J1IYj9t5U3WWQ,24512 +stdlib/asyncio/exceptions.pyi,sha256=Sp-bv5S0sVEb11niAgCtX4D2-zoECgJL-EHiGxC0ke4,1039 +stdlib/asyncio/format_helpers.pyi,sha256=7bHuuROgjnZAgQxjgG1IJrB_RH_PQymZ4c7BUCNt9YY,907 +stdlib/asyncio/futures.pyi,sha256=L27XOkdyEAmEFHPimch3HWY6bzsWSvViUxmVXr1rJIE,2717 +stdlib/asyncio/locks.pyi,sha256=ZmCUTwT0Urzu0X8kP691Dva3t42fdiG62gwMDpyyQBU,4203 +stdlib/asyncio/log.pyi,sha256=--UJmDmbuqm1EdrcBW5c94k3pzoNwlZKjsqn-ckSpPA,42 +stdlib/asyncio/mixins.pyi,sha256=JUtyAosLnudv46MZaTZvigzn5eoLZnVHajWTSOJ_CQY,242 +stdlib/asyncio/proactor_events.pyi,sha256=q0tMiV-y81mJh16xcXa21hmlidXyULUn2C9FY6_uQ3k,3009 +stdlib/asyncio/protocols.pyi,sha256=3ooDCGHhxxYPcM5o20stbqOPNd-RBbiIDNk4ungvJqU,1665 +stdlib/asyncio/queues.pyi,sha256=TntiRYcbhHQ8pFYG9tmD6p9WUbE-s6kBPlYEzu_oVc8,1310 +stdlib/asyncio/runners.pyi,sha256=BNNnBWEveWcf-Qe1QoAe99LD4vmWKHdoMSXe2Rph098,1288 +stdlib/asyncio/selector_events.pyi,sha256=-40IJS-J9MsqcwKb9SkSsO-5-679O_7ocPwOZQZ44yA,231 +stdlib/asyncio/sslproto.pyi,sha256=XT4zcTMZYTlRh8K2WSxHDz5Xw_k-otF5ShKYUDXI-30,6595 +stdlib/asyncio/staggered.pyi,sha256=Qwgygm1Wd5ydD1Q9Iwu1lCq5uHRl0q_p5wca_HD7ask,351 +stdlib/asyncio/streams.pyi,sha256=ZlYMdF5DydffhfXWrHAqky3DtiNxO31OzMRN9BhTf8I,6759 +stdlib/asyncio/taskgroups.pyi,sha256=l399dBgZNG3_fVZ6Cd3YrmrLT6BVoCFdvkqoyGnQdbQ,646 +stdlib/asyncio/tasks.pyi,sha256=6aKXS_Oh0s4buOTZkBlmIrNIR-6BNhLFjJoEZSBUOUs,14465 +stdlib/asyncio/threads.pyi,sha256=2luO0lvlHbVhlIaZqQfFeVdr8xHn5hlmhl2QBP9YLlU,274 +stdlib/asyncio/timeouts.pyi,sha256=brKulE6iRhpq7_8kdJYDdw-QyVTkrzbK_yYiPycIiyA,653 +stdlib/asyncio/transports.pyi,sha256=lZFZ2sBsXpa1TGFnIWIBUMMWpqgQMIHjPuPHjJRHCLw,2094 +stdlib/asyncio/trsock.pyi,sha256=QFrlTSefkKqkm9CvkQE-DrbJ-O0cbj3NN9MjUFBE250,5161 +stdlib/asyncio/unix_events.pyi,sha256=KtpYuURjnn9lgipPv5FW7_S0yrZUKkBpxqLv1QjoZwo,5847 +stdlib/builtins.pyi,sha256=slGhgVHGPZM_S2nDlAzR2s0pACuGWadcpohAvVdVDz4,85034 +stdlib/codecs.pyi,sha256=yvQyUT4IcMHUB_5N6CpOX5dz7IC0UasuoQtS2zpFlNo,11867 +stdlib/collections/__init__.pyi,sha256=K_CAL3sFw6sYSlV7dExVisyKVltE8wdl5_Ol-VVmXi8,21370 +stdlib/collections/abc.pyi,sha256=7E24ytmwcwkHwpzvttFRMn9nJfpNDW6YYulI0H9nxxI,81 +stdlib/contextlib.pyi,sha256=SotLFn_0pEX3aqSGKF1EzuQLUflAbNSkfG-fkGqonpY,9193 +stdlib/contextvars.pyi,sha256=e1LEavWqiBfSZOscHUnLsazlho_3a1KhMfzvWGD-F7s,2472 +stdlib/dataclasses.pyi,sha256=OU1-LN29Qf4TWDS_wJEX-cb-M2JH201gQqgWNxpLdLc,9719 +stdlib/decimal.pyi,sha256=SsrITV7HUaFdSbalRQapbaA726XRSduQCtEBU8Ev9U4,119 +stdlib/enum.pyi,sha256=qV1OqUsRtz8l81Q-Hfsc0sa6O9fbg4nD6ALmK7eRQdI,10984 +stdlib/fractions.pyi,sha256=ZDMRzxCoqgPkPGPiQR4AVOYjx1Gbo5_0IDjo61ncBuA,5754 +stdlib/functools.pyi,sha256=PB8UjNUNW8bsdOfrmReFzdrQrtzIB4r1e_5YQxCzsDY,8850 +stdlib/io.pyi,sha256=Fx1e2zuobNnVGjlnFm6swi2H3lVMly8LrOw7SbNzJvc,7631 +stdlib/numbers.pyi,sha256=0ze1DSG3UBfMDIjPFr-htK4FCF4R6FGY69KvSHxSkL8,4043 +stdlib/os/__init__.pyi,sha256=59wnHVcQ5OD22a1GMNU4l-jmI703jWH5fWL5U17cohg,38450 +stdlib/queue.pyi,sha256=k88XJePCEN3Vw-hTgj1-Vod8AzZ81E6hztLcmRnbM90,2131 +stdlib/re.pyi,sha256=ioHD_Nvlx8LpB63K0A3fmGIMzKUuDYDG8j3GmDTfaDI,10221 +stdlib/selectors.pyi,sha256=zCq3OZKYThiRl3NTC1pHXdwB2D5c4Nz3Y1lr4Ek9EwQ,3802 +stdlib/socket.pyi,sha256=PhKukvc_esHzO3xsOdrfncyS4aI0Tkt0nOFI3WNeTFs,30384 +stdlib/sre_compile.pyi,sha256=UPXp7Mfy8VRdGCUgDWu_-msiRGad2FEDtUpWwA5CIxA,343 +stdlib/sre_constants.pyi,sha256=qZqXCU0gEevo7PDRM3j8iRTL3MdyehjzuA95SANnawk,4116 +stdlib/sre_parse.pyi,sha256=PYPVZLt8485Ob9JYMKAf-ximVjZPTIlrKmvSFk-VtaI,4610 +stdlib/sys.pyi,sha256=o_0z8ab0hvt9YjWDmA78jNmES6CfG8R2L-sRQJtdRmQ,12231 +stdlib/types.pyi,sha256=LyfVBbkqqSn__17l6lkx1VFTQnYeOgt9eyZ48WuC_Ug,21369 +stdlib/typing.pyi,sha256=YfF43PG2kly39ygU84RsxYGDq4mjf8IiSgdK9IBNNVo,30253 +stdlib/typing_extensions.pyi,sha256=iSpzmXucn4MDSdhXLQRHI1smqjE9bEj_-Zx6C_tM_WY,15278 diff --git a/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/WHEEL b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/WHEEL new file mode 100644 index 0000000..3695fd1 --- /dev/null +++ b/.vscode/Pico-W-Stub/micropython_stdlib_stubs-1.0.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.7.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/.vscode/Pico-W-Stub/mip/__init__.pyi b/.vscode/Pico-W-Stub/mip/__init__.pyi new file mode 100644 index 0000000..c19812a --- /dev/null +++ b/.vscode/Pico-W-Stub/mip/__init__.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +_PACKAGE_INDEX: Incomplete +_CHUNK_SIZE: int + +def _ensure_path_exists(path) -> None: ... +def _chunk(src, dest) -> None: ... +def _check_exists(path, short_hash): ... +def _rewrite_url(url, branch: Incomplete | None = ...): ... +def _download_file(url, dest): ... +def _install_json(package_json_url, index, target, version, mpy): ... +def _install_package(package, index, target, version, mpy): ... +def install( + package, index: Incomplete | None = ..., target: Incomplete | None = ..., version: Incomplete | None = ..., mpy: bool = ... +) -> None: ... diff --git a/.vscode/Pico-W-Stub/neopixel.pyi b/.vscode/Pico-W-Stub/neopixel.pyi new file mode 100644 index 0000000..2273411 --- /dev/null +++ b/.vscode/Pico-W-Stub/neopixel.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +class NeoPixel: + ORDER: Incomplete + pin: Incomplete + n: Incomplete + bpp: Incomplete + buf: Incomplete + timing: Incomplete + def __init__(self, pin, n, bpp: int = ..., timing: int = ...) -> None: ... + def __len__(self) -> int: ... + def __setitem__(self, i, v) -> None: ... + def __getitem__(self, i): ... + def fill(self, v) -> None: ... + def write(self) -> None: ... diff --git a/.vscode/Pico-W-Stub/network.pyi b/.vscode/Pico-W-Stub/network.pyi new file mode 100644 index 0000000..deb91bd --- /dev/null +++ b/.vscode/Pico-W-Stub/network.pyi @@ -0,0 +1,184 @@ +""" +Network configuration. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/network.html + +This module provides network drivers and routing configuration. To use this +module, a MicroPython variant/build with network capabilities must be installed. +Network drivers for specific hardware are available within this module and are +used to configure hardware network interface(s). Network services provided +by configured interfaces are then available for use via the :mod:`socket` +module. + +For example:: + + # connect/ show IP config a specific network interface + # see below for examples of specific drivers + import network + import time + nic = network.Driver(...) + if not nic.isconnected(): + nic.connect() + print("Waiting for connection...") + while not nic.isconnected(): + time.sleep(1) + print(nic.ifconfig()) + + # now use socket as usual + import socket + addr = socket.getaddrinfo('micropython.org', 80)[0][-1] + s = socket.socket() + s.connect(addr) + s.send(b'GET / HTTP/1.1\r\nHost: micropython.org\r\n\r\n') + data = s.recv(1000) + s.close() +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, List, Optional, Tuple, Union + +STA_IF: int +STAT_IDLE: int +STAT_NO_AP_FOUND: int +STAT_WRONG_PASSWORD: int +STAT_GOT_IP: int +AP_IF: int +STAT_CONNECTING: int +STAT_CONNECT_FAIL: int + +def route(*args, **kwargs) -> Incomplete: ... +def hostname(*args, **kwargs) -> Incomplete: ... +def country(*args, **kwargs) -> Incomplete: ... + +class WLAN: + """ + Create a WLAN network interface object. Supported interfaces are + ``network.STA_IF`` (station aka client, connects to upstream WiFi access + points) and ``network.AP_IF`` (access point, allows other WiFi clients to + connect). Availability of the methods below depends on interface type. + For example, only STA interface may `WLAN.connect()` to an access point. + """ + + PM_PERFORMANCE: int + PM_POWERSAVE: int + PM_NONE: int + def isconnected(self) -> bool: + """ + In case of STA mode, returns ``True`` if connected to a WiFi access + point and has a valid IP address. In AP mode returns ``True`` when a + station is connected. Returns ``False`` otherwise. + """ + ... + def ioctl(self, *args, **kwargs) -> Incomplete: ... + def ifconfig(self, configtuple: Optional[Any] = None) -> Tuple: + """ + Get/set IP-level network interface parameters: IP address, subnet mask, + gateway and DNS server. When called with no arguments, this method returns + a 4-tuple with the above information. To set the above values, pass a + 4-tuple with the required information. For example:: + + nic.ifconfig(('192.168.0.4', '255.255.255.0', '192.168.0.1', '8.8.8.8')) + """ + ... + def scan(self) -> List[Tuple]: + """ + Scan for the available wireless networks. + Hidden networks -- where the SSID is not broadcast -- will also be scanned + if the WLAN interface allows it. + + Scanning is only possible on STA interface. Returns list of tuples with + the information about WiFi access points: + + (ssid, bssid, channel, RSSI, security, hidden) + + *bssid* is hardware address of an access point, in binary form, returned as + bytes object. You can use `binascii.hexlify()` to convert it to ASCII form. + + There are five values for security: + + * 0 -- open + * 1 -- WEP + * 2 -- WPA-PSK + * 3 -- WPA2-PSK + * 4 -- WPA/WPA2-PSK + + and two for hidden: + + * 0 -- visible + * 1 -- hidden + """ + ... + def send_ethernet(self, *args, **kwargs) -> Incomplete: ... + def status(self, param: Optional[Any] = None) -> Incomplete: + """ + Return the current status of the wireless connection. + + When called with no argument the return value describes the network link status. + The possible statuses are defined as constants: + + * ``STAT_IDLE`` -- no connection and no activity, + * ``STAT_CONNECTING`` -- connecting in progress, + * ``STAT_WRONG_PASSWORD`` -- failed due to incorrect password, + * ``STAT_NO_AP_FOUND`` -- failed because no access point replied, + * ``STAT_CONNECT_FAIL`` -- failed due to other problems, + * ``STAT_GOT_IP`` -- connection successful. + + When called with one argument *param* should be a string naming the status + parameter to retrieve. Supported parameters in WiFI STA mode are: ``'rssi'``. + """ + ... + def config(self, *args, **kwargs) -> Incomplete: + """ + Get or set general network interface parameters. These methods allow to work + with additional parameters beyond standard IP configuration (as dealt with by + `WLAN.ifconfig()`). These include network-specific and hardware-specific + parameters. For setting parameters, keyword argument syntax should be used, + multiple parameters can be set at once. For querying, parameters name should + be quoted as a string, and only one parameter can be queries at time:: + + # Set WiFi access point name (formally known as SSID) and WiFi channel + ap.config(ssid='My AP', channel=11) + # Query params one by one + print(ap.config('ssid')) + print(ap.config('channel')) + + Following are commonly supported parameters (availability of a specific parameter + depends on network technology type, driver, and :term:`MicroPython port`). + + ============= =========== + Parameter Description + ============= =========== + mac MAC address (bytes) + ssid WiFi access point name (string) + channel WiFi channel (integer) + hidden Whether SSID is hidden (boolean) + security Security protocol supported (enumeration, see module constants) + key Access key (string) + hostname The hostname that will be sent to DHCP (STA interfaces) and mDNS (if supported, both STA and AP). (Deprecated, use :func:`network.hostname` instead) + reconnects Number of reconnect attempts to make (integer, 0=none, -1=unlimited) + txpower Maximum transmit power in dBm (integer or float) + pm WiFi Power Management setting (see below for allowed values) + ============= =========== + """ + ... + def active(self, is_active: Optional[Any] = None) -> None: + """ + Activate ("up") or deactivate ("down") network interface, if boolean + argument is passed. Otherwise, query current state if no argument is + provided. Most other methods require active interface. + """ + ... + def disconnect(self) -> None: + """ + Disconnect from the currently connected wireless network. + """ + ... + def connect(self, ssid=None, key=None, *, bssid=None) -> None: + """ + Connect to the specified wireless network, using the specified key. + If *bssid* is given then the connection will be restricted to the + access-point with that MAC address (the *ssid* must also be specified + in this case). + """ + ... + def deinit(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, interface_id) -> None: ... diff --git a/.vscode/Pico-W-Stub/ntptime.pyi b/.vscode/Pico-W-Stub/ntptime.pyi new file mode 100644 index 0000000..3b374f6 --- /dev/null +++ b/.vscode/Pico-W-Stub/ntptime.pyi @@ -0,0 +1,5 @@ +host: str +timeout: int + +def time(): ... +def settime() -> None: ... diff --git a/.vscode/Pico-W-Stub/onewire.pyi b/.vscode/Pico-W-Stub/onewire.pyi new file mode 100644 index 0000000..5ca094b --- /dev/null +++ b/.vscode/Pico-W-Stub/onewire.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete + +class OneWireError(Exception): ... + +class OneWire: + SEARCH_ROM: int + MATCH_ROM: int + SKIP_ROM: int + pin: Incomplete + def __init__(self, pin) -> None: ... + def reset(self, required: bool = ...): ... + def readbit(self): ... + def readbyte(self): ... + def readinto(self, buf) -> None: ... + def writebit(self, value): ... + def writebyte(self, value): ... + def write(self, buf) -> None: ... + def select_rom(self, rom) -> None: ... + def scan(self): ... + def _search_rom(self, l_rom, diff): ... + def crc8(self, data): ... diff --git a/.vscode/Pico-W-Stub/os.pyi b/.vscode/Pico-W-Stub/os.pyi new file mode 100644 index 0000000..17fd6a2 --- /dev/null +++ b/.vscode/Pico-W-Stub/os.pyi @@ -0,0 +1,253 @@ +""" +Basic "operating system" services. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/os.html + +CPython module: :mod:`python:os` https://docs.python.org/3/library/os.html . + +The ``os`` module contains functions for filesystem access and mounting, +terminal redirection and duplication, and the ``uname`` and ``urandom`` +functions. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.os import * +from typing import Any, IO, Iterator, Optional, Tuple + +def statvfs(path) -> Tuple: + """ + Get the status of a filesystem. + + Returns a tuple with the filesystem information in the following order: + + * ``f_bsize`` -- file system block size + * ``f_frsize`` -- fragment size + * ``f_blocks`` -- size of fs in f_frsize units + * ``f_bfree`` -- number of free blocks + * ``f_bavail`` -- number of free blocks for unprivileged users + * ``f_files`` -- number of inodes + * ``f_ffree`` -- number of free inodes + * ``f_favail`` -- number of free inodes for unprivileged users + * ``f_flag`` -- mount flags + * ``f_namemax`` -- maximum filename length + + Parameters related to inodes: ``f_files``, ``f_ffree``, ``f_avail`` + and the ``f_flags`` parameter may return ``0`` as they can be unavailable + in a port-specific implementation. + """ + ... + +def stat(path) -> Incomplete: + """ + Get the status of a file or directory. + """ + ... + +def rmdir(path) -> None: + """ + Remove a directory. + """ + ... + +def rename(old_path, new_path) -> None: + """ + Rename a file. + """ + ... + +def mount(fsobj, mount_point, *, readonly=False) -> Incomplete: + """ + Mount the filesystem object *fsobj* at the location in the VFS given by the + *mount_point* string. *fsobj* can be a a VFS object that has a ``mount()`` + method, or a block device. If it's a block device then the filesystem type + is automatically detected (an exception is raised if no filesystem was + recognised). *mount_point* may be ``'/'`` to mount *fsobj* at the root, + or ``'/'`` to mount it at a subdirectory under the root. + + If *readonly* is ``True`` then the filesystem is mounted read-only. + + During the mount process the method ``mount()`` is called on the filesystem + object. + + Will raise ``OSError(EPERM)`` if *mount_point* is already mounted. + """ + ... + +def sync() -> None: + """ + Sync all filesystems. + """ + ... + +def unlink(*args, **kwargs) -> Incomplete: ... +def uname() -> uname_result: + """ + Return a tuple (possibly a named tuple) containing information about the + underlying machine and/or its operating system. The tuple has five fields + in the following order, each of them being a string: + + * ``sysname`` -- the name of the underlying system + * ``nodename`` -- the network name (can be the same as ``sysname``) + * ``release`` -- the version of the underlying system + * ``version`` -- the MicroPython version and build date + * ``machine`` -- an identifier for the underlying hardware (eg board, CPU) + """ + ... + +def umount(mount_point) -> Incomplete: + """ + Unmount a filesystem. *mount_point* can be a string naming the mount location, + or a previously-mounted filesystem object. During the unmount process the + method ``umount()`` is called on the filesystem object. + + Will raise ``OSError(EINVAL)`` if *mount_point* is not found. + """ + ... + +def urandom(n) -> bytes: + """ + Return a bytes object with *n* random bytes. Whenever possible, it is + generated by the hardware random number generator. + """ + ... + +def chdir(path) -> Incomplete: + """ + Change current directory. + """ + ... + +def dupterm(stream_object, index=0, /) -> IO: + """ + Duplicate or switch the MicroPython terminal (the REPL) on the given `stream`-like + object. The *stream_object* argument must be a native stream object, or derive + from ``io.IOBase`` and implement the ``readinto()`` and + ``write()`` methods. The stream should be in non-blocking mode and + ``readinto()`` should return ``None`` if there is no data available for reading. + + After calling this function all terminal output is repeated on this stream, + and any input that is available on the stream is passed on to the terminal input. + + The *index* parameter should be a non-negative integer and specifies which + duplication slot is set. A given port may implement more than one slot (slot 0 + will always be available) and in that case terminal input and output is + duplicated on all the slots that are set. + + If ``None`` is passed as the *stream_object* then duplication is cancelled on + the slot given by *index*. + + The function returns the previous stream-like object in the given slot. + """ + ... + +def remove(path) -> None: + """ + Remove a file. + """ + ... + +def mkdir(path) -> Incomplete: + """ + Create a new directory. + """ + ... + +def getcwd() -> Incomplete: + """ + Get the current directory. + """ + ... + +def listdir(dir: Optional[Any] = None) -> Incomplete: + """ + With no argument, list the current directory. Otherwise list the given directory. + """ + ... + +def ilistdir(dir: Optional[Any] = None) -> Iterator[Tuple]: + """ + This function returns an iterator which then yields tuples corresponding to + the entries in the directory that it is listing. With no argument it lists the + current directory, otherwise it lists the directory given by *dir*. + + The tuples have the form *(name, type, inode[, size])*: + + - *name* is a string (or bytes if *dir* is a bytes object) and is the name of + the entry; + - *type* is an integer that specifies the type of the entry, with 0x4000 for + directories and 0x8000 for regular files; + - *inode* is an integer corresponding to the inode of the file, and may be 0 + for filesystems that don't have such a notion. + - Some platforms may return a 4-tuple that includes the entry's *size*. For + file entries, *size* is an integer representing the size of the file + or -1 if unknown. Its meaning is currently undefined for directory + entries. + """ + ... + +class VfsLfs2: + """ + Create a filesystem object that uses the `littlefs v2 filesystem format`_. + Storage of the littlefs filesystem is provided by *block_dev*, which must + support the :ref:`extended interface `. + Objects created by this constructor can be mounted using :func:`mount`. + + The *mtime* argument enables modification timestamps for files, stored using + littlefs attributes. This option can be disabled or enabled differently each + mount time and timestamps will only be added or updated if *mtime* is enabled, + otherwise the timestamps will remain untouched. Littlefs v2 filesystems without + timestamps will work without reformatting and timestamps will be added + transparently to existing files once they are opened for writing. When *mtime* + is enabled `os.stat` on files without timestamps will return 0 for the timestamp. + + See :ref:`filesystem` for more information. + """ + + def rename(self, *args, **kwargs) -> Incomplete: ... + @staticmethod + def mkfs(block_dev, readsize=32, progsize=32, lookahead=32) -> None: + """ + Build a Lfs2 filesystem on *block_dev*. + + ``Note:`` There are reports of littlefs v2 failing in certain situations, + for details see `littlefs issue 295`_. + """ + ... + def mount(self, *args, **kwargs) -> Incomplete: ... + def statvfs(self, *args, **kwargs) -> Incomplete: ... + def rmdir(self, *args, **kwargs) -> Incomplete: ... + def stat(self, *args, **kwargs) -> Incomplete: ... + def umount(self, *args, **kwargs) -> Incomplete: ... + def remove(self, *args, **kwargs) -> Incomplete: ... + def mkdir(self, *args, **kwargs) -> Incomplete: ... + def open(self, *args, **kwargs) -> Incomplete: ... + def ilistdir(self, *args, **kwargs) -> Incomplete: ... + def chdir(self, *args, **kwargs) -> Incomplete: ... + def getcwd(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, block_dev, readsize=32, progsize=32, lookahead=32, mtime=True) -> None: ... + +class VfsFat: + """ + Create a filesystem object that uses the FAT filesystem format. Storage of + the FAT filesystem is provided by *block_dev*. + Objects created by this constructor can be mounted using :func:`mount`. + """ + + def rename(self, *args, **kwargs) -> Incomplete: ... + @staticmethod + def mkfs(block_dev) -> None: + """ + Build a FAT filesystem on *block_dev*. + """ + ... + def mount(self, *args, **kwargs) -> Incomplete: ... + def statvfs(self, *args, **kwargs) -> Incomplete: ... + def rmdir(self, *args, **kwargs) -> Incomplete: ... + def stat(self, *args, **kwargs) -> Incomplete: ... + def umount(self, *args, **kwargs) -> Incomplete: ... + def remove(self, *args, **kwargs) -> Incomplete: ... + def mkdir(self, *args, **kwargs) -> Incomplete: ... + def open(self, *args, **kwargs) -> Incomplete: ... + def ilistdir(self, *args, **kwargs) -> Incomplete: ... + def chdir(self, *args, **kwargs) -> Incomplete: ... + def getcwd(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, block_dev) -> None: ... diff --git a/.vscode/Pico-W-Stub/platform.pyi b/.vscode/Pico-W-Stub/platform.pyi new file mode 100644 index 0000000..8a66667 --- /dev/null +++ b/.vscode/Pico-W-Stub/platform.pyi @@ -0,0 +1,43 @@ +""" +Access to underlying platform’s identifying data. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/platform.html + +CPython module: :mod:`python:platform` https://docs.python.org/3/library/platform.html . + +This module tries to retrieve as much platform-identifying data as possible. It +makes this information available via function APIs. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Tuple + +def platform() -> str: + """ + Returns a string identifying the underlying platform. This string is composed + of several substrings in the following order, delimited by dashes (``-``): + + - the name of the platform system (e.g. Unix, Windows or MicroPython) + - the MicroPython version + - the architecture of the platform + - the version of the underlying platform + - the concatenation of the name of the libc that MicroPython is linked to + and its corresponding version. + + For example, this could be + ``"MicroPython-1.20.0-xtensa-IDFv4.2.4-with-newlib3.0.0"``. + """ + ... + +def python_compiler() -> str: + """ + Returns a string identifying the compiler used for compiling MicroPython. + """ + ... + +def libc_ver() -> Tuple: + """ + Returns a tuple of strings *(lib, version)*, where *lib* is the name of the + libc that MicroPython is linked to, and *version* the corresponding version + of this libc. + """ + ... diff --git a/.vscode/Pico-W-Stub/random.pyi b/.vscode/Pico-W-Stub/random.pyi new file mode 100644 index 0000000..c0123b6 --- /dev/null +++ b/.vscode/Pico-W-Stub/random.pyi @@ -0,0 +1,84 @@ +""" +Random numbers. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/random.html + +This module implements a pseudo-random number generator (PRNG). + +CPython module: :mod:`python:random` https://docs.python.org/3/library/random.html . . + +.. note:: + + The following notation is used for intervals: + + - () are open interval brackets and do not include their endpoints. + For example, (0, 1) means greater than 0 and less than 1. + In set notation: (0, 1) = {x | 0 < x < 1}. + + - [] are closed interval brackets which include all their limit points. + For example, [0, 1] means greater than or equal to 0 and less than + or equal to 1. + In set notation: [0, 1] = {x | 0 <= x <= 1}. + +.. note:: + + The :func:`randrange`, :func:`randint` and :func:`choice` functions are only + available if the ``MICROPY_PY_RANDOM_EXTRA_FUNCS`` configuration option is + enabled. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +def randrange(start, stop, step: Optional[Any] = None) -> int: + """ + The first form returns a random integer from the range [0, *stop*). + The second form returns a random integer from the range [*start*, *stop*). + The third form returns a random integer from the range [*start*, *stop*) in + steps of *step*. For instance, calling ``randrange(1, 10, 2)`` will + return odd numbers between 1 and 9 inclusive. + """ + ... + +def random() -> int: + """ + Return a random floating point number in the range [0.0, 1.0). + """ + ... + +def seed(n=None, /) -> None: + """ + Initialise the random number generator module with the seed *n* which should + be an integer. When no argument (or ``None``) is passed in it will (if + supported by the port) initialise the PRNG with a true random number + (usually a hardware generated random number). + + The ``None`` case only works if ``MICROPY_PY_RANDOM_SEED_INIT_FUNC`` is + enabled by the port, otherwise it raises ``ValueError``. + """ + ... + +def uniform(a, b) -> int: + """ + Return a random floating point number N such that *a* <= N <= *b* for *a* <= *b*, + and *b* <= N <= *a* for *b* < *a*. + """ + ... + +def choice(sequence) -> Incomplete: + """ + Chooses and returns one item at random from *sequence* (tuple, list or + any object that supports the subscript operation). + """ + ... + +def randint(a, b) -> int: + """ + Return a random integer in the range [*a*, *b*]. + """ + ... + +def getrandbits(n) -> int: + """ + Return an integer with *n* random bits (0 <= n <= 32). + """ + ... diff --git a/.vscode/Pico-W-Stub/requests.pyi b/.vscode/Pico-W-Stub/requests.pyi new file mode 100644 index 0000000..d037cc8 --- /dev/null +++ b/.vscode/Pico-W-Stub/requests.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete as Incomplete + +def request(*args, **kwargs) -> Incomplete: ... +def head(*args, **kwargs) -> Incomplete: ... +def post(*args, **kwargs) -> Incomplete: ... +def patch(*args, **kwargs) -> Incomplete: ... +def delete(*args, **kwargs) -> Incomplete: ... +def put(*args, **kwargs) -> Incomplete: ... +def get(*args, **kwargs) -> Incomplete: ... + +class Response: + def json(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + content: Incomplete + text: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... diff --git a/.vscode/Pico-W-Stub/requests/__init__.pyi b/.vscode/Pico-W-Stub/requests/__init__.pyi new file mode 100644 index 0000000..593b62c --- /dev/null +++ b/.vscode/Pico-W-Stub/requests/__init__.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete + +class Response: + raw: Incomplete + encoding: str + _cached: Incomplete + def __init__(self, f) -> None: ... + def close(self) -> None: ... + @property + def content(self): ... + @property + def text(self): ... + def json(self): ... + +def request( + method, + url, + data: Incomplete | None = ..., + json: Incomplete | None = ..., + headers=..., + stream: Incomplete | None = ..., + auth: Incomplete | None = ..., + timeout: Incomplete | None = ..., + parse_headers: bool = ..., +): ... +def head(url, **kw): ... +def get(url, **kw): ... +def post(url, **kw): ... +def put(url, **kw): ... +def patch(url, **kw): ... +def delete(url, **kw): ... diff --git a/.vscode/Pico-W-Stub/rp2.pyi b/.vscode/Pico-W-Stub/rp2.pyi new file mode 100644 index 0000000..2ec5df9 --- /dev/null +++ b/.vscode/Pico-W-Stub/rp2.pyi @@ -0,0 +1,62 @@ +from _rp2 import * +from _typeshed import Incomplete + +_PROG_DATA: Incomplete +_PROG_OFFSET_PIO0: Incomplete +_PROG_OFFSET_PIO1: Incomplete +_PROG_EXECCTRL: Incomplete +_PROG_SHIFTCTRL: Incomplete +_PROG_OUT_PINS: Incomplete +_PROG_SET_PINS: Incomplete +_PROG_SIDESET_PINS: Incomplete +_PROG_MAX_FIELDS: Incomplete + +class PIOASMError(Exception): ... + +class PIOASMEmit: + labels: Incomplete + prog: Incomplete + wrap_used: bool + sideset_count: int + def __init__( + self, + *, + out_init: Incomplete | None = ..., + set_init: Incomplete | None = ..., + sideset_init: Incomplete | None = ..., + in_shiftdir: int = ..., + out_shiftdir: int = ..., + autopush: bool = ..., + autopull: bool = ..., + push_thresh: int = ..., + pull_thresh: int = ..., + fifo_join: int = ..., + ) -> None: ... + delay_max: int + sideset_opt: Incomplete + pass_: Incomplete + num_instr: int + num_sideset: int + def start_pass(self, pass_) -> None: ... + def __getitem__(self, key): ... + def delay(self, delay): ... + def side(self, value): ... + def wrap_target(self) -> None: ... + def wrap(self) -> None: ... + def label(self, label) -> None: ... + def word(self, instr, label: Incomplete | None = ...): ... + def nop(self): ... + def jmp(self, cond, label: Incomplete | None = ...): ... + def wait(self, polarity, src, index): ... + def in_(self, src, data): ... + def out(self, dest, data): ... + def push(self, value: int = ..., value2: int = ...): ... + def pull(self, value: int = ..., value2: int = ...): ... + def mov(self, dest, src): ... + def irq(self, mod, index: Incomplete | None = ...): ... + def set(self, dest, data): ... + +_pio_funcs: Incomplete + +def asm_pio(**kw): ... +def asm_pio_encode(instr, sideset_count, sideset_opt: bool = ...): ... diff --git a/.vscode/Pico-W-Stub/select.pyi b/.vscode/Pico-W-Stub/select.pyi new file mode 100644 index 0000000..9456f0d --- /dev/null +++ b/.vscode/Pico-W-Stub/select.pyi @@ -0,0 +1,103 @@ +""" +Wait for events on a set of streams. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/select.html + +CPython module: :mod:`python:select` https://docs.python.org/3/library/select.html . + +This module provides functions to efficiently wait for events on multiple +`streams ` (select streams which are ready for operations). +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Iterator, List, Optional, Tuple + +POLLOUT: int +POLLIN: int +POLLHUP: int +POLLERR: int + +def select(rlist, wlist, xlist, timeout: Optional[Any] = None) -> None: + """ + Wait for activity on a set of objects. + + This function is provided by some MicroPython ports for compatibility + and is not efficient. Usage of :class:`Poll` is recommended instead. + """ + ... + +class poll: + """ + Create an instance of the Poll class. + """ + + def __init__(self) -> None: ... + def register(self, obj, eventmask: Optional[Any] = None) -> None: + """ + Register `stream` *obj* for polling. *eventmask* is logical OR of: + + * ``select.POLLIN`` - data available for reading + * ``select.POLLOUT`` - more data can be written + + Note that flags like ``select.POLLHUP`` and ``select.POLLERR`` are + *not* valid as input eventmask (these are unsolicited events which + will be returned from `poll()` regardless of whether they are asked + for). This semantics is per POSIX. + + *eventmask* defaults to ``select.POLLIN | select.POLLOUT``. + + It is OK to call this function multiple times for the same *obj*. + Successive calls will update *obj*'s eventmask to the value of + *eventmask* (i.e. will behave as `modify()`). + """ + ... + def unregister(self, obj) -> Incomplete: + """ + Unregister *obj* from polling. + """ + ... + def modify(self, obj, eventmask) -> None: + """ + Modify the *eventmask* for *obj*. If *obj* is not registered, `OSError` + is raised with error of ENOENT. + """ + ... + def poll(self, timeout=-1, /) -> List: + """ + Wait for at least one of the registered objects to become ready or have an + exceptional condition, with optional timeout in milliseconds (if *timeout* + arg is not specified or -1, there is no timeout). + + Returns list of (``obj``, ``event``, ...) tuples. There may be other elements in + tuple, depending on a platform and version, so don't assume that its size is 2. + The ``event`` element specifies which events happened with a stream and + is a combination of ``select.POLL*`` constants described above. Note that + flags ``select.POLLHUP`` and ``select.POLLERR`` can be returned at any time + (even if were not asked for), and must be acted on accordingly (the + corresponding stream unregistered from poll and likely closed), because + otherwise all further invocations of `poll()` may return immediately with + these flags set for this stream again. + + In case of timeout, an empty list is returned. + + Difference to CPython + + Tuples returned may contain more than 2 elements as described above. + """ + ... + def ipoll(self, timeout=-1, flags=0, /) -> Iterator[Tuple]: + """ + Like :meth:`poll.poll`, but instead returns an iterator which yields a + `callee-owned tuple`. This function provides an efficient, allocation-free + way to poll on streams. + + If *flags* is 1, one-shot behaviour for events is employed: streams for + which events happened will have their event masks automatically reset + (equivalent to ``poll.modify(obj, 0)``), so new events for such a stream + won't be processed until new mask is set with `poll.modify()`. This + behaviour is useful for asynchronous I/O schedulers. + + Difference to CPython + + This function is a MicroPython extension. + """ + ... diff --git a/.vscode/Pico-W-Stub/socket.pyi b/.vscode/Pico-W-Stub/socket.pyi new file mode 100644 index 0000000..86a1183 --- /dev/null +++ b/.vscode/Pico-W-Stub/socket.pyi @@ -0,0 +1,271 @@ +""" +Socket module. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/socket.html + +CPython module: :mod:`python:socket` https://docs.python.org/3/library/socket.html . + +This module provides access to the BSD socket interface. + +Difference to CPython + + For efficiency and consistency, socket objects in MicroPython implement a `stream` + (file-like) interface directly. In CPython, you need to convert a socket to + a file-like object using `makefile()` method. This method is still supported + by MicroPython (but is a no-op), so where compatibility with CPython matters, + be sure to use it. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.socket import * +from typing import Any, IO, Optional, Tuple + +SOCK_STREAM: int +SOCK_RAW: int +SOCK_DGRAM: int +SOL_SOCKET: int +SO_BROADCAST: int +SO_REUSEADDR: int +AF_INET6: int +AF_INET: int +IP_DROP_MEMBERSHIP: int +IPPROTO_IP: int +IP_ADD_MEMBERSHIP: int + +def reset(*args, **kwargs) -> Incomplete: ... +def print_pcbs(*args, **kwargs) -> Incomplete: ... +def getaddrinfo(host, port, af=0, type=0, proto=0, flags=0, /) -> Incomplete: + """ + Translate the host/port argument into a sequence of 5-tuples that contain all the + necessary arguments for creating a socket connected to that service. Arguments + *af*, *type*, and *proto* (which have the same meaning as for the `socket()` function) + can be used to filter which kind of addresses are returned. If a parameter is not + specified or zero, all combinations of addresses can be returned (requiring + filtering on the user side). + + The resulting list of 5-tuples has the following structure:: + + (family, type, proto, canonname, sockaddr) + + The following example shows how to connect to a given url:: + + s = socket.socket() + # This assumes that if "type" is not specified, an address for + # SOCK_STREAM will be returned, which may be not true + s.connect(socket.getaddrinfo('www.micropython.org', 80)[0][-1]) + + Recommended use of filtering params:: + + s = socket.socket() + # Guaranteed to return an address which can be connect'ed to for + # stream operation. + s.connect(socket.getaddrinfo('www.micropython.org', 80, 0, SOCK_STREAM)[0][-1]) + + Difference to CPython + + CPython raises a ``socket.gaierror`` exception (`OSError` subclass) in case + of error in this function. MicroPython doesn't have ``socket.gaierror`` + and raises OSError directly. Note that error numbers of `getaddrinfo()` + form a separate namespace and may not match error numbers from + the :mod:`errno` module. To distinguish `getaddrinfo()` errors, they are + represented by negative numbers, whereas standard system errors are + positive numbers (error numbers are accessible using ``e.args[0]`` property + from an exception object). The use of negative values is a provisional + detail which may change in the future. + """ + ... + +def callback(*args, **kwargs) -> Incomplete: ... + +class socket: + """ + Create a new socket using the given address family, socket type and + protocol number. Note that specifying *proto* in most cases is not + required (and not recommended, as some MicroPython ports may omit + ``IPPROTO_*`` constants). Instead, *type* argument will select needed + protocol automatically:: + + # Create STREAM TCP socket + socket(AF_INET, SOCK_STREAM) + # Create DGRAM UDP socket + socket(AF_INET, SOCK_DGRAM) + """ + + def recvfrom(self, bufsize) -> Tuple: + """ + Receive data from the socket. The return value is a pair *(bytes, address)* where *bytes* is a + bytes object representing the data received and *address* is the address of the socket sending + the data. + """ + ... + def recv(self, bufsize) -> bytes: + """ + Receive data from the socket. The return value is a bytes object representing the data + received. The maximum amount of data to be received at once is specified by bufsize. + """ + ... + def makefile(self, mode="rb", buffering=0, /) -> IO: + """ + Return a file object associated with the socket. The exact returned type depends on the arguments + given to makefile(). The support is limited to binary modes only ('rb', 'wb', and 'rwb'). + CPython's arguments: *encoding*, *errors* and *newline* are not supported. + + Difference to CPython + + As MicroPython doesn't support buffered streams, values of *buffering* + parameter is ignored and treated as if it was 0 (unbuffered). + + Difference to CPython + + Closing the file object returned by makefile() WILL close the + original socket as well. + """ + ... + def listen(self, backlog: Optional[Any] = None) -> None: + """ + Enable a server to accept connections. If *backlog* is specified, it must be at least 0 + (if it's lower, it will be set to 0); and specifies the number of unaccepted connections + that the system will allow before refusing new connections. If not specified, a default + reasonable value is chosen. + """ + ... + def settimeout(self, value) -> Incomplete: + """ + **Note**: Not every port supports this method, see below. + + Set a timeout on blocking socket operations. The value argument can be a nonnegative floating + point number expressing seconds, or None. If a non-zero value is given, subsequent socket operations + will raise an `OSError` exception if the timeout period value has elapsed before the operation has + completed. If zero is given, the socket is put in non-blocking mode. If None is given, the socket + is put in blocking mode. + + Not every :term:`MicroPython port` supports this method. A more portable and + generic solution is to use `select.poll` object. This allows to wait on + multiple objects at the same time (and not just on sockets, but on generic + `stream` objects which support polling). Example:: + + # Instead of: + s.settimeout(1.0) # time in seconds + s.read(10) # may timeout + + # Use: + poller = select.poll() + poller.register(s, select.POLLIN) + res = poller.poll(1000) # time in milliseconds + if not res: + # s is still not ready for input, i.e. operation timed out + + Difference to CPython + + CPython raises a ``socket.timeout`` exception in case of timeout, + which is an `OSError` subclass. MicroPython raises an OSError directly + instead. If you use ``except OSError:`` to catch the exception, + your code will work both in MicroPython and CPython. + """ + ... + def sendall(self, bytes) -> int: + """ + Send all data to the socket. The socket must be connected to a remote socket. + Unlike `send()`, this method will try to send all of data, by sending data + chunk by chunk consecutively. + + The behaviour of this method on non-blocking sockets is undefined. Due to this, + on MicroPython, it's recommended to use `write()` method instead, which + has the same "no short writes" policy for blocking sockets, and will return + number of bytes sent on non-blocking sockets. + """ + ... + def setsockopt(self, level, optname, value) -> None: + """ + Set the value of the given socket option. The needed symbolic constants are defined in the + socket module (SO_* etc.). The *value* can be an integer or a bytes-like object representing + a buffer. + """ + ... + def setblocking(self, flag) -> Incomplete: + """ + Set blocking or non-blocking mode of the socket: if flag is false, the socket is set to non-blocking, + else to blocking mode. + + This method is a shorthand for certain `settimeout()` calls: + + * ``sock.setblocking(True)`` is equivalent to ``sock.settimeout(None)`` + * ``sock.setblocking(False)`` is equivalent to ``sock.settimeout(0)`` + """ + ... + def sendto(self, bytes, address) -> None: + """ + Send data to the socket. The socket should not be connected to a remote socket, since the + destination socket is specified by *address*. + """ + ... + def readline(self) -> Incomplete: + """ + Read a line, ending in a newline character. + + Return value: the line read. + """ + ... + def readinto(self, buf, nbytes: Optional[Any] = None) -> int: + """ + Read bytes into the *buf*. If *nbytes* is specified then read at most + that many bytes. Otherwise, read at most *len(buf)* bytes. Just as + `read()`, this method follows "no short reads" policy. + + Return value: number of bytes read and stored into *buf*. + """ + ... + def read(self, size: Optional[Any] = None) -> bytes: + """ + Read up to size bytes from the socket. Return a bytes object. If *size* is not given, it + reads all data available from the socket until EOF; as such the method will not return until + the socket is closed. This function tries to read as much data as + requested (no "short reads"). This may be not possible with + non-blocking socket though, and then less data will be returned. + """ + ... + def close(self) -> Incomplete: + """ + Mark the socket closed and release all resources. Once that happens, all future operations + on the socket object will fail. The remote end will receive EOF indication if + supported by protocol. + + Sockets are automatically closed when they are garbage-collected, but it is recommended + to `close()` them explicitly as soon you finished working with them. + """ + ... + def connect(self, address) -> None: + """ + Connect to a remote socket at *address*. + """ + ... + def send(self, bytes) -> int: + """ + Send data to the socket. The socket must be connected to a remote socket. + Returns number of bytes sent, which may be smaller than the length of data + ("short write"). + """ + ... + def bind(self, address) -> Incomplete: + """ + Bind the socket to *address*. The socket must not already be bound. + """ + ... + def accept(self) -> Tuple: + """ + Accept a connection. The socket must be bound to an address and listening for connections. + The return value is a pair (conn, address) where conn is a new socket object usable to send + and receive data on the connection, and address is the address bound to the socket on the + other end of the connection. + """ + ... + def write(self, buf) -> int: + """ + Write the buffer of bytes to the socket. This function will try to + write all data to a socket (no "short writes"). This may be not possible + with a non-blocking socket though, and returned value will be less than + the length of *buf*. + + Return value: number of bytes written. + """ + ... + def __init__(self, af=AF_INET, type=SOCK_STREAM, proto=IPPROTO_TCP, /) -> None: ... diff --git a/.vscode/Pico-W-Stub/ssl.pyi b/.vscode/Pico-W-Stub/ssl.pyi new file mode 100644 index 0000000..60fa240 --- /dev/null +++ b/.vscode/Pico-W-Stub/ssl.pyi @@ -0,0 +1,74 @@ +""" +TLS/SSL wrapper for socket objects. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/ssl.html + +CPython module: :mod:`python:ssl` https://docs.python.org/3/library/ssl.html . + +This module provides access to Transport Layer Security (previously and +widely known as “Secure Sockets Layer”) encryption and peer authentication +facilities for network sockets, both client-side and server-side. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.ssl import * +from typing import IO + +CERT_REQUIRED: int +PROTOCOL_TLS_CLIENT: int +PROTOCOL_TLS_SERVER: int +CERT_OPTIONAL: int +CERT_NONE: int + +def wrap_socket( + sock, server_side=False, keyfile=None, certfile=None, cert_reqs=None, cadata=None, server_hostname=None, do_handshake=True +) -> IO: + """ + Wrap the given *sock* and return a new wrapped-socket object. The implementation + of this function is to first create an `SSLContext` and then call the `SSLContext.wrap_socket` + method on that context object. The arguments *sock*, *server_side* and *server_hostname* are + passed through unchanged to the method call. The argument *do_handshake* is passed through as + *do_handshake_on_connect*. The remaining arguments have the following behaviour: + + - *cert_reqs* determines whether the peer (server or client) must present a valid certificate. + Note that for mbedtls based ports, ``ssl.CERT_NONE`` and ``ssl.CERT_OPTIONAL`` will not + validate any certificate, only ``ssl.CERT_REQUIRED`` will. + + - *cadata* is a bytes object containing the CA certificate chain (in DER format) that will + validate the peer's certificate. Currently only a single DER-encoded certificate is supported. + + Depending on the underlying module implementation in a particular + :term:`MicroPython port`, some or all keyword arguments above may be not supported. + """ + ... + +class SSLContext: + """ + Create a new SSLContext instance. The *protocol* argument must be one of the ``PROTOCOL_*`` + constants. + """ + + def wrap_socket(self, sock, *, server_side=False, do_handshake_on_connect=True, server_hostname=None) -> Incomplete: + """ + Takes a `stream` *sock* (usually socket.socket instance of ``SOCK_STREAM`` type), + and returns an instance of ssl.SSLSocket, wrapping the underlying stream. + The returned object has the usual `stream` interface methods like + ``read()``, ``write()``, etc. + + - *server_side* selects whether the wrapped socket is on the server or client side. + A server-side SSL socket should be created from a normal socket returned from + :meth:`~socket.socket.accept()` on a non-SSL listening server socket. + + - *do_handshake_on_connect* determines whether the handshake is done as part of the ``wrap_socket`` + or whether it is deferred to be done as part of the initial reads or writes + For blocking sockets doing the handshake immediately is standard. For non-blocking + sockets (i.e. when the *sock* passed into ``wrap_socket`` is in non-blocking mode) + the handshake should generally be deferred because otherwise ``wrap_socket`` blocks + until it completes. Note that in AXTLS the handshake can be deferred until the first + read or write but it then blocks until completion. + + - *server_hostname* is for use as a client, and sets the hostname to check against the received + server certificate. It also sets the name for Server Name Indication (SNI), allowing the server + to present the proper certificate. + """ + ... + def __init__(self, protocol, /) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/__future__.pyi b/.vscode/Pico-W-Stub/stdlib/__future__.pyi new file mode 100644 index 0000000..a90cf1e --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/__future__.pyi @@ -0,0 +1,36 @@ +from typing_extensions import TypeAlias + +_VersionInfo: TypeAlias = tuple[int, int, int, str, int] + +class _Feature: + def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ... + def getOptionalRelease(self) -> _VersionInfo: ... + def getMandatoryRelease(self) -> _VersionInfo | None: ... + compiler_flag: int + +absolute_import: _Feature +division: _Feature +generators: _Feature +nested_scopes: _Feature +print_function: _Feature +unicode_literals: _Feature +with_statement: _Feature +barry_as_FLUFL: _Feature +generator_stop: _Feature +annotations: _Feature + +all_feature_names: list[str] # undocumented + +__all__ = [ + "all_feature_names", + "absolute_import", + "division", + "generators", + "nested_scopes", + "print_function", + "unicode_literals", + "with_statement", + "barry_as_FLUFL", + "generator_stop", + "annotations", +] diff --git a/.vscode/Pico-W-Stub/stdlib/_ast.pyi b/.vscode/Pico-W-Stub/stdlib/_ast.pyi new file mode 100644 index 0000000..b7d081f --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/_ast.pyi @@ -0,0 +1,573 @@ +import sys +from typing import Any, ClassVar +from typing_extensions import Literal, TypeAlias + +PyCF_ONLY_AST: Literal[1024] +if sys.version_info >= (3, 8): + PyCF_TYPE_COMMENTS: Literal[4096] + PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192] + +_Identifier: TypeAlias = str + +class AST: + if sys.version_info >= (3, 10): + __match_args__ = () + _attributes: ClassVar[tuple[str, ...]] + _fields: ClassVar[tuple[str, ...]] + def __init__(self, *args: Any, **kwargs: Any) -> None: ... + # TODO: Not all nodes have all of the following attributes + lineno: int + col_offset: int + if sys.version_info >= (3, 8): + end_lineno: int | None + end_col_offset: int | None + type_comment: str | None + +class mod(AST): ... + +if sys.version_info >= (3, 8): + class type_ignore(AST): ... + + class TypeIgnore(type_ignore): + if sys.version_info >= (3, 10): + __match_args__ = ("lineno", "tag") + tag: str + + class FunctionType(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("argtypes", "returns") + argtypes: list[expr] + returns: expr + +class Module(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("body", "type_ignores") + body: list[stmt] + if sys.version_info >= (3, 8): + type_ignores: list[TypeIgnore] + +class Interactive(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("body",) + body: list[stmt] + +class Expression(mod): + if sys.version_info >= (3, 10): + __match_args__ = ("body",) + body: expr + +class stmt(AST): ... + +class FunctionDef(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") + name: _Identifier + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + +class AsyncFunctionDef(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment") + name: _Identifier + args: arguments + body: list[stmt] + decorator_list: list[expr] + returns: expr | None + +class ClassDef(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("name", "bases", "keywords", "body", "decorator_list") + name: _Identifier + bases: list[expr] + keywords: list[keyword] + body: list[stmt] + decorator_list: list[expr] + +class Return(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr | None + +class Delete(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("targets",) + targets: list[expr] + +class Assign(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("targets", "value", "type_comment") + targets: list[expr] + value: expr + +class AugAssign(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "op", "value") + target: expr + op: operator + value: expr + +class AnnAssign(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "annotation", "value", "simple") + target: expr + annotation: expr + value: expr | None + simple: int + +class For(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "body", "orelse", "type_comment") + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + +class AsyncFor(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "body", "orelse", "type_comment") + target: expr + iter: expr + body: list[stmt] + orelse: list[stmt] + +class While(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: list[stmt] + orelse: list[stmt] + +class If(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: list[stmt] + orelse: list[stmt] + +class With(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("items", "body", "type_comment") + items: list[withitem] + body: list[stmt] + +class AsyncWith(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("items", "body", "type_comment") + items: list[withitem] + body: list[stmt] + +class Raise(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("exc", "cause") + exc: expr | None + cause: expr | None + +class Try(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("body", "handlers", "orelse", "finalbody") + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + finalbody: list[stmt] + +if sys.version_info >= (3, 11): + class TryStar(stmt): + __match_args__ = ("body", "handlers", "orelse", "finalbody") + body: list[stmt] + handlers: list[ExceptHandler] + orelse: list[stmt] + finalbody: list[stmt] + +class Assert(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "msg") + test: expr + msg: expr | None + +class Import(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[alias] + +class ImportFrom(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("module", "names", "level") + module: str | None + names: list[alias] + level: int + +class Global(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[_Identifier] + +class Nonlocal(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("names",) + names: list[_Identifier] + +class Expr(stmt): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + +class Pass(stmt): ... +class Break(stmt): ... +class Continue(stmt): ... +class expr(AST): ... + +class BoolOp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("op", "values") + op: boolop + values: list[expr] + +class BinOp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("left", "op", "right") + left: expr + op: operator + right: expr + +class UnaryOp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("op", "operand") + op: unaryop + operand: expr + +class Lambda(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("args", "body") + args: arguments + body: expr + +class IfExp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("test", "body", "orelse") + test: expr + body: expr + orelse: expr + +class Dict(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("keys", "values") + keys: list[expr | None] + values: list[expr] + +class Set(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elts",) + elts: list[expr] + +class ListComp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + +class SetComp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + +class DictComp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("key", "value", "generators") + key: expr + value: expr + generators: list[comprehension] + +class GeneratorExp(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elt", "generators") + elt: expr + generators: list[comprehension] + +class Await(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + +class Yield(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr | None + +class YieldFrom(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value",) + value: expr + +class Compare(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("left", "ops", "comparators") + left: expr + ops: list[cmpop] + comparators: list[expr] + +class Call(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("func", "args", "keywords") + func: expr + args: list[expr] + keywords: list[keyword] + +class FormattedValue(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "conversion", "format_spec") + value: expr + conversion: int + format_spec: expr | None + +class JoinedStr(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("values",) + values: list[expr] + +if sys.version_info < (3, 8): + class Num(expr): # Deprecated in 3.8; use Constant + n: complex + + class Str(expr): # Deprecated in 3.8; use Constant + s: str + + class Bytes(expr): # Deprecated in 3.8; use Constant + s: bytes + + class NameConstant(expr): # Deprecated in 3.8; use Constant + value: Any + + class Ellipsis(expr): ... # Deprecated in 3.8; use Constant + +class Constant(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "kind") + value: Any # None, str, bytes, bool, int, float, complex, Ellipsis + kind: str | None + # Aliases for value, for backwards compatibility + s: Any + n: complex + +if sys.version_info >= (3, 8): + class NamedExpr(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "value") + target: expr + value: expr + +class Attribute(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "attr", "ctx") + value: expr + attr: _Identifier + ctx: expr_context + +if sys.version_info >= (3, 9): + _Slice: TypeAlias = expr +else: + class slice(AST): ... + _Slice: TypeAlias = slice + +class Slice(_Slice): + if sys.version_info >= (3, 10): + __match_args__ = ("lower", "upper", "step") + lower: expr | None + upper: expr | None + step: expr | None + +if sys.version_info < (3, 9): + class ExtSlice(slice): + dims: list[slice] + + class Index(slice): + value: expr + +class Subscript(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "slice", "ctx") + value: expr + slice: _Slice + ctx: expr_context + +class Starred(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("value", "ctx") + value: expr + ctx: expr_context + +class Name(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("id", "ctx") + id: _Identifier + ctx: expr_context + +class List(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elts", "ctx") + elts: list[expr] + ctx: expr_context + +class Tuple(expr): + if sys.version_info >= (3, 10): + __match_args__ = ("elts", "ctx") + elts: list[expr] + ctx: expr_context + if sys.version_info >= (3, 9): + dims: list[expr] + +class expr_context(AST): ... + +if sys.version_info < (3, 9): + class AugLoad(expr_context): ... + class AugStore(expr_context): ... + class Param(expr_context): ... + + class Suite(mod): + body: list[stmt] + +class Del(expr_context): ... +class Load(expr_context): ... +class Store(expr_context): ... +class boolop(AST): ... +class And(boolop): ... +class Or(boolop): ... +class operator(AST): ... +class Add(operator): ... +class BitAnd(operator): ... +class BitOr(operator): ... +class BitXor(operator): ... +class Div(operator): ... +class FloorDiv(operator): ... +class LShift(operator): ... +class Mod(operator): ... +class Mult(operator): ... +class MatMult(operator): ... +class Pow(operator): ... +class RShift(operator): ... +class Sub(operator): ... +class unaryop(AST): ... +class Invert(unaryop): ... +class Not(unaryop): ... +class UAdd(unaryop): ... +class USub(unaryop): ... +class cmpop(AST): ... +class Eq(cmpop): ... +class Gt(cmpop): ... +class GtE(cmpop): ... +class In(cmpop): ... +class Is(cmpop): ... +class IsNot(cmpop): ... +class Lt(cmpop): ... +class LtE(cmpop): ... +class NotEq(cmpop): ... +class NotIn(cmpop): ... + +class comprehension(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("target", "iter", "ifs", "is_async") + target: expr + iter: expr + ifs: list[expr] + is_async: int + +class excepthandler(AST): ... + +class ExceptHandler(excepthandler): + if sys.version_info >= (3, 10): + __match_args__ = ("type", "name", "body") + type: expr | None + name: _Identifier | None + body: list[stmt] + +class arguments(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults") + if sys.version_info >= (3, 8): + posonlyargs: list[arg] + args: list[arg] + vararg: arg | None + kwonlyargs: list[arg] + kw_defaults: list[expr | None] + kwarg: arg | None + defaults: list[expr] + +class arg(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("arg", "annotation", "type_comment") + arg: _Identifier + annotation: expr | None + +class keyword(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("arg", "value") + arg: _Identifier | None + value: expr + +class alias(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("name", "asname") + name: _Identifier + asname: _Identifier | None + +class withitem(AST): + if sys.version_info >= (3, 10): + __match_args__ = ("context_expr", "optional_vars") + context_expr: expr + optional_vars: expr | None + +if sys.version_info >= (3, 10): + class Match(stmt): + __match_args__ = ("subject", "cases") + subject: expr + cases: list[match_case] + + class pattern(AST): ... + # Without the alias, Pyright complains variables named pattern are recursively defined + _Pattern: TypeAlias = pattern + + class match_case(AST): + __match_args__ = ("pattern", "guard", "body") + pattern: _Pattern + guard: expr | None + body: list[stmt] + + class MatchValue(pattern): + __match_args__ = ("value",) + value: expr + + class MatchSingleton(pattern): + __match_args__ = ("value",) + value: Literal[True, False, None] + + class MatchSequence(pattern): + __match_args__ = ("patterns",) + patterns: list[pattern] + + class MatchStar(pattern): + __match_args__ = ("name",) + name: _Identifier | None + + class MatchMapping(pattern): + __match_args__ = ("keys", "patterns", "rest") + keys: list[expr] + patterns: list[pattern] + rest: _Identifier | None + + class MatchClass(pattern): + __match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns") + cls: expr + patterns: list[pattern] + kwd_attrs: list[_Identifier] + kwd_patterns: list[pattern] + + class MatchAs(pattern): + __match_args__ = ("pattern", "name") + pattern: _Pattern | None + name: _Identifier | None + + class MatchOr(pattern): + __match_args__ = ("patterns",) + patterns: list[pattern] diff --git a/.vscode/Pico-W-Stub/stdlib/_codecs.pyi b/.vscode/Pico-W-Stub/stdlib/_codecs.pyi new file mode 100644 index 0000000..232256f --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/_codecs.pyi @@ -0,0 +1,136 @@ +import codecs +import sys +from _typeshed import ReadableBuffer +from collections.abc import Callable +from typing import overload +from typing_extensions import Literal, TypeAlias + +# This type is not exposed; it is defined in unicodeobject.c +class _EncodingMap: + def size(self) -> int: ... + +_CharMap: TypeAlias = dict[int, int] | _EncodingMap +_Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]] +_SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None] + +def register(__search_function: _SearchFunction) -> None: ... + +if sys.version_info >= (3, 10): + def unregister(__search_function: _SearchFunction) -> None: ... + +def register_error(__errors: str, __handler: _Handler) -> None: ... +def lookup_error(__name: str) -> _Handler: ... + +# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300 +# https://docs.python.org/3/library/codecs.html#binary-transforms +_BytesToBytesEncoding: TypeAlias = Literal[ + "base64", + "base_64", + "base64_codec", + "bz2", + "bz2_codec", + "hex", + "hex_codec", + "quopri", + "quotedprintable", + "quoted_printable", + "quopri_codec", + "uu", + "uu_codec", + "zip", + "zlib", + "zlib_codec", +] +# https://docs.python.org/3/library/codecs.html#text-transforms +_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"] + +@overload +def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... +@overload +def encode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... # type: ignore[misc] +@overload +def encode(obj: str, encoding: str = ..., errors: str = ...) -> bytes: ... +@overload +def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc] +@overload +def decode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... + +# these are documented as text encodings but in practice they also accept str as input +@overload +def decode( + obj: str, encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"], errors: str = ... +) -> str: ... + +# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str +@overload +def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = ...) -> bytes: ... +@overload +def decode(obj: ReadableBuffer, encoding: str = ..., errors: str = ...) -> str: ... +def lookup(__encoding: str) -> codecs.CodecInfo: ... +def charmap_build(__map: str) -> _CharMap: ... +def ascii_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... +def ascii_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def charmap_decode(__data: ReadableBuffer, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[str, int]: ... +def charmap_encode(__str: str, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[bytes, int]: ... +def escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... +def escape_encode(__data: bytes, __errors: str | None = ...) -> tuple[bytes, int]: ... +def latin_1_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... +def latin_1_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + +if sys.version_info >= (3, 9): + def raw_unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ... + ) -> tuple[str, int]: ... + +else: + def raw_unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... + +def raw_unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def readbuffer_encode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ... + +if sys.version_info >= (3, 9): + def unicode_escape_decode( + __data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ... + ) -> tuple[str, int]: ... + +else: + def unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... + +def unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + +if sys.version_info < (3, 8): + def unicode_internal_decode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ... + def unicode_internal_encode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ... + +def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_16_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... +def utf_16_ex_decode( + __data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... +) -> tuple[str, int, int]: ... +def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_16_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_32_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ... +def utf_32_ex_decode( + __data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ... +) -> tuple[str, int, int]: ... +def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_32_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_7_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_7_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... +def utf_8_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... +def utf_8_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + +if sys.platform == "win32": + def mbcs_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def mbcs_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + def code_page_decode( + __codepage: int, __data: ReadableBuffer, __errors: str | None = ..., __final: int = ... + ) -> tuple[str, int]: ... + def code_page_encode(__code_page: int, __str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... + def oem_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ... + def oem_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/_collections_abc.pyi b/.vscode/Pico-W-Stub/stdlib/_collections_abc.pyi new file mode 100644 index 0000000..8373fe8 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/_collections_abc.pyi @@ -0,0 +1,81 @@ +import sys +from types import MappingProxyType +from typing import ( # noqa: Y027,Y038 + AbstractSet as Set, + AsyncGenerator as AsyncGenerator, + AsyncIterable as AsyncIterable, + AsyncIterator as AsyncIterator, + Awaitable as Awaitable, + ByteString as ByteString, + Callable as Callable, + Collection as Collection, + Container as Container, + Coroutine as Coroutine, + Generator as Generator, + Generic, + Hashable as Hashable, + ItemsView as ItemsView, + Iterable as Iterable, + Iterator as Iterator, + KeysView as KeysView, + Mapping as Mapping, + MappingView as MappingView, + MutableMapping as MutableMapping, + MutableSequence as MutableSequence, + MutableSet as MutableSet, + Reversible as Reversible, + Sequence as Sequence, + Sized as Sized, + TypeVar, + ValuesView as ValuesView, +) +from typing_extensions import final + +__all__ = [ + "Awaitable", + "Coroutine", + "AsyncIterable", + "AsyncIterator", + "AsyncGenerator", + "Hashable", + "Iterable", + "Iterator", + "Generator", + "Reversible", + "Sized", + "Container", + "Callable", + "Collection", + "Set", + "MutableSet", + "Mapping", + "MutableMapping", + "MappingView", + "KeysView", + "ItemsView", + "ValuesView", + "Sequence", + "MutableSequence", + "ByteString", +] + +_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. +_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. + +@final +class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + +@final +class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... + +@final +class dict_items(ItemsView[_KT_co, _VT_co], Generic[_KT_co, _VT_co]): # undocumented + if sys.version_info >= (3, 10): + @property + def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/_decimal.pyi b/.vscode/Pico-W-Stub/stdlib/_decimal.pyi new file mode 100644 index 0000000..9a90760 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/_decimal.pyi @@ -0,0 +1,281 @@ +import numbers +import sys +from collections.abc import Container, Sequence +from types import TracebackType +from typing import Any, ClassVar, NamedTuple, overload +from typing_extensions import Final, Literal, Self, TypeAlias + +_Decimal: TypeAlias = Decimal | int +_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int] +_ComparableNum: TypeAlias = Decimal | float | numbers.Rational + +__version__: Final[str] +__libmpdec_version__: Final[str] + +class DecimalTuple(NamedTuple): + sign: int + digits: tuple[int, ...] + exponent: int | Literal["n", "N", "F"] + +ROUND_DOWN: str +ROUND_HALF_UP: str +ROUND_HALF_EVEN: str +ROUND_CEILING: str +ROUND_FLOOR: str +ROUND_UP: str +ROUND_HALF_DOWN: str +ROUND_05UP: str +HAVE_CONTEXTVAR: bool +HAVE_THREADS: bool +MAX_EMAX: int +MAX_PREC: int +MIN_EMIN: int +MIN_ETINY: int + +class DecimalException(ArithmeticError): ... +class Clamped(DecimalException): ... +class InvalidOperation(DecimalException): ... +class ConversionSyntax(InvalidOperation): ... +class DivisionByZero(DecimalException, ZeroDivisionError): ... +class DivisionImpossible(InvalidOperation): ... +class DivisionUndefined(InvalidOperation, ZeroDivisionError): ... +class Inexact(DecimalException): ... +class InvalidContext(InvalidOperation): ... +class Rounded(DecimalException): ... +class Subnormal(DecimalException): ... +class Overflow(Inexact, Rounded): ... +class Underflow(Inexact, Rounded, Subnormal): ... +class FloatOperation(DecimalException, TypeError): ... + +def setcontext(__context: Context) -> None: ... +def getcontext() -> Context: ... + +if sys.version_info >= (3, 11): + def localcontext( + ctx: Context | None = None, + *, + prec: int | None = ..., + rounding: str | None = ..., + Emin: int | None = ..., + Emax: int | None = ..., + capitals: int | None = ..., + clamp: int | None = ..., + traps: dict[_TrapType, bool] | None = ..., + flags: dict[_TrapType, bool] | None = ..., + ) -> _ContextManager: ... + +else: + def localcontext(ctx: Context | None = None) -> _ContextManager: ... + +class Decimal: + def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ... + @classmethod + def from_float(cls, __f: float) -> Self: ... + def __bool__(self) -> bool: ... + def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __hash__(self) -> int: ... + def as_tuple(self) -> DecimalTuple: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + def to_eng_string(self, context: Context | None = None) -> str: ... + def __abs__(self) -> Decimal: ... + def __add__(self, __value: _Decimal) -> Decimal: ... + def __divmod__(self, __value: _Decimal) -> tuple[Decimal, Decimal]: ... + def __eq__(self, __value: object) -> bool: ... + def __floordiv__(self, __value: _Decimal) -> Decimal: ... + def __ge__(self, __value: _ComparableNum) -> bool: ... + def __gt__(self, __value: _ComparableNum) -> bool: ... + def __le__(self, __value: _ComparableNum) -> bool: ... + def __lt__(self, __value: _ComparableNum) -> bool: ... + def __mod__(self, __value: _Decimal) -> Decimal: ... + def __mul__(self, __value: _Decimal) -> Decimal: ... + def __neg__(self) -> Decimal: ... + def __pos__(self) -> Decimal: ... + def __pow__(self, __value: _Decimal, __mod: _Decimal | None = None) -> Decimal: ... + def __radd__(self, __value: _Decimal) -> Decimal: ... + def __rdivmod__(self, __value: _Decimal) -> tuple[Decimal, Decimal]: ... + def __rfloordiv__(self, __value: _Decimal) -> Decimal: ... + def __rmod__(self, __value: _Decimal) -> Decimal: ... + def __rmul__(self, __value: _Decimal) -> Decimal: ... + def __rsub__(self, __value: _Decimal) -> Decimal: ... + def __rtruediv__(self, __value: _Decimal) -> Decimal: ... + def __sub__(self, __value: _Decimal) -> Decimal: ... + def __truediv__(self, __value: _Decimal) -> Decimal: ... + def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __float__(self) -> float: ... + def __int__(self) -> int: ... + def __trunc__(self) -> int: ... + @property + def real(self) -> Decimal: ... + @property + def imag(self) -> Decimal: ... + def conjugate(self) -> Decimal: ... + def __complex__(self) -> complex: ... + @overload + def __round__(self) -> int: ... + @overload + def __round__(self, __ndigits: int) -> Decimal: ... + def __floor__(self) -> int: ... + def __ceil__(self) -> int: ... + def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ... + def __rpow__(self, __value: _Decimal, __mod: Context | None = None) -> Decimal: ... + def normalize(self, context: Context | None = None) -> Decimal: ... + def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ... + def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ... + def sqrt(self, context: Context | None = None) -> Decimal: ... + def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def adjusted(self) -> int: ... + def canonical(self) -> Decimal: ... + def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def copy_abs(self) -> Decimal: ... + def copy_negate(self) -> Decimal: ... + def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def exp(self, context: Context | None = None) -> Decimal: ... + def is_canonical(self) -> bool: ... + def is_finite(self) -> bool: ... + def is_infinite(self) -> bool: ... + def is_nan(self) -> bool: ... + def is_normal(self, context: Context | None = None) -> bool: ... + def is_qnan(self) -> bool: ... + def is_signed(self) -> bool: ... + def is_snan(self) -> bool: ... + def is_subnormal(self, context: Context | None = None) -> bool: ... + def is_zero(self) -> bool: ... + def ln(self, context: Context | None = None) -> Decimal: ... + def log10(self, context: Context | None = None) -> Decimal: ... + def logb(self, context: Context | None = None) -> Decimal: ... + def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_invert(self, context: Context | None = None) -> Decimal: ... + def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def next_minus(self, context: Context | None = None) -> Decimal: ... + def next_plus(self, context: Context | None = None) -> Decimal: ... + def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def number_class(self, context: Context | None = None) -> str: ... + def radix(self) -> Decimal: ... + def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ... + def __reduce__(self) -> tuple[type[Self], tuple[str]]: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, __memo: Any) -> Self: ... + def __format__(self, __specifier: str, __context: Context | None = ...) -> str: ... + +class _ContextManager: + new_context: Context + saved_context: Context + def __init__(self, new_context: Context) -> None: ... + def __enter__(self) -> Context: ... + def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ... + +_TrapType: TypeAlias = type[DecimalException] + +class Context: + # TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime, + # even settable attributes like `prec` and `rounding`, + # but that's inexpressable in the stub. + # Type checkers either ignore it or misinterpret it + # if you add a `def __delattr__(self, __name: str) -> NoReturn` method to the stub + prec: int + rounding: str + Emin: int + Emax: int + capitals: int + clamp: int + traps: dict[_TrapType, bool] + flags: dict[_TrapType, bool] + def __init__( + self, + prec: int | None = ..., + rounding: str | None = ..., + Emin: int | None = ..., + Emax: int | None = ..., + capitals: int | None = ..., + clamp: int | None = ..., + flags: None | dict[_TrapType, bool] | Container[_TrapType] = ..., + traps: None | dict[_TrapType, bool] | Container[_TrapType] = ..., + _ignored_flags: list[_TrapType] | None = ..., + ) -> None: ... + def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ... + def clear_flags(self) -> None: ... + def clear_traps(self) -> None: ... + def copy(self) -> Context: ... + def __copy__(self) -> Context: ... + # see https://github.com/python/cpython/issues/94107 + __hash__: ClassVar[None] # type: ignore[assignment] + def Etiny(self) -> int: ... + def Etop(self) -> int: ... + def create_decimal(self, __num: _DecimalNew = "0") -> Decimal: ... + def create_decimal_from_float(self, __f: float) -> Decimal: ... + def abs(self, __x: _Decimal) -> Decimal: ... + def add(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def canonical(self, __x: Decimal) -> Decimal: ... + def compare(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def compare_signal(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def compare_total(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def compare_total_mag(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def copy_abs(self, __x: _Decimal) -> Decimal: ... + def copy_decimal(self, __x: _Decimal) -> Decimal: ... + def copy_negate(self, __x: _Decimal) -> Decimal: ... + def copy_sign(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def divide(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def divide_int(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def divmod(self, __x: _Decimal, __y: _Decimal) -> tuple[Decimal, Decimal]: ... + def exp(self, __x: _Decimal) -> Decimal: ... + def fma(self, __x: _Decimal, __y: _Decimal, __z: _Decimal) -> Decimal: ... + def is_canonical(self, __x: _Decimal) -> bool: ... + def is_finite(self, __x: _Decimal) -> bool: ... + def is_infinite(self, __x: _Decimal) -> bool: ... + def is_nan(self, __x: _Decimal) -> bool: ... + def is_normal(self, __x: _Decimal) -> bool: ... + def is_qnan(self, __x: _Decimal) -> bool: ... + def is_signed(self, __x: _Decimal) -> bool: ... + def is_snan(self, __x: _Decimal) -> bool: ... + def is_subnormal(self, __x: _Decimal) -> bool: ... + def is_zero(self, __x: _Decimal) -> bool: ... + def ln(self, __x: _Decimal) -> Decimal: ... + def log10(self, __x: _Decimal) -> Decimal: ... + def logb(self, __x: _Decimal) -> Decimal: ... + def logical_and(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def logical_invert(self, __x: _Decimal) -> Decimal: ... + def logical_or(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def logical_xor(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def max(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def max_mag(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def min(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def min_mag(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def minus(self, __x: _Decimal) -> Decimal: ... + def multiply(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def next_minus(self, __x: _Decimal) -> Decimal: ... + def next_plus(self, __x: _Decimal) -> Decimal: ... + def next_toward(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def normalize(self, __x: _Decimal) -> Decimal: ... + def number_class(self, __x: _Decimal) -> str: ... + def plus(self, __x: _Decimal) -> Decimal: ... + def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ... + def quantize(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def radix(self) -> Decimal: ... + def remainder(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def remainder_near(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def rotate(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def same_quantum(self, __x: _Decimal, __y: _Decimal) -> bool: ... + def scaleb(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def shift(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def sqrt(self, __x: _Decimal) -> Decimal: ... + def subtract(self, __x: _Decimal, __y: _Decimal) -> Decimal: ... + def to_eng_string(self, __x: _Decimal) -> str: ... + def to_sci_string(self, __x: _Decimal) -> str: ... + def to_integral_exact(self, __x: _Decimal) -> Decimal: ... + def to_integral_value(self, __x: _Decimal) -> Decimal: ... + def to_integral(self, __x: _Decimal) -> Decimal: ... + +DefaultContext: Context +BasicContext: Context +ExtendedContext: Context diff --git a/.vscode/Pico-W-Stub/stdlib/_typeshed/__init__.pyi b/.vscode/Pico-W-Stub/stdlib/_typeshed/__init__.pyi new file mode 100644 index 0000000..dbcd796 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/_typeshed/__init__.pyi @@ -0,0 +1,318 @@ +# Utility types for typeshed +# +# See the README.md file in this directory for more information. + +import sys +from collections.abc import Awaitable, Callable, Iterable, Sequence +from collections.abc import Set as AbstractSet +from collections.abc import Sized +from dataclasses import Field +from os import PathLike +from types import FrameType, TracebackType +from typing import Any, AnyStr, ClassVar, Generic, Protocol, TypeVar, overload + +from typing_extensions import (Buffer, Final, Literal, LiteralString, + TypeAlias, final) + +_KT = TypeVar("_KT") +_KT_co = TypeVar("_KT_co", covariant=True) +_KT_contra = TypeVar("_KT_contra", contravariant=True) +_VT = TypeVar("_VT") +_VT_co = TypeVar("_VT_co", covariant=True) +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) + +# Use for "self" annotations: +# def __enter__(self: Self) -> Self: ... +Self = TypeVar("Self") # noqa: Y001 + +# covariant version of typing.AnyStr, useful for protocols +AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) # noqa: Y001 + +# For partially known annotations. Usually, fields where type annotations +# haven't been added are left unannotated, but in some situations this +# isn't possible or a type is already partially known. In cases like these, +# use Incomplete instead of Any as a marker. For example, use +# "Incomplete | None" instead of "Any | None". +Incomplete: TypeAlias = Any + +# To describe a function parameter that is unused and will work with anything. +Unused: TypeAlias = object + +# Used to mark arguments that default to a sentinel value. This prevents +# stubtest from complaining about the default value not matching. +# +# def foo(x: int | None = sentinel) -> None: ... +# +# In cases where the sentinel object is exported and can be used by user code, +# a construct like this is better: +# +# _SentinelType = NewType("_SentinelType", object) +# sentinel: _SentinelType +# def foo(x: int | None | _SentinelType = ...) -> None: ... +sentinel = Any # noqa: Y026 + +# stable +class IdentityFunction(Protocol): + def __call__(self, __x: _T) -> _T: ... + +# stable +class SupportsNext(Protocol[_T_co]): + def __next__(self) -> _T_co: ... + +# stable +class SupportsAnext(Protocol[_T_co]): + def __anext__(self) -> Awaitable[_T_co]: ... + +# Comparison protocols + +class SupportsDunderLT(Protocol[_T_contra]): + def __lt__(self, __other: _T_contra) -> bool: ... + +class SupportsDunderGT(Protocol[_T_contra]): + def __gt__(self, __other: _T_contra) -> bool: ... + +class SupportsDunderLE(Protocol[_T_contra]): + def __le__(self, __other: _T_contra) -> bool: ... + +class SupportsDunderGE(Protocol[_T_contra]): + def __ge__(self, __other: _T_contra) -> bool: ... + +class SupportsAllComparisons( + SupportsDunderLT[Any], SupportsDunderGT[Any], SupportsDunderLE[Any], SupportsDunderGE[Any], Protocol +): ... + +SupportsRichComparison: TypeAlias = SupportsDunderLT[Any] | SupportsDunderGT[Any] +SupportsRichComparisonT = TypeVar("SupportsRichComparisonT", bound=SupportsRichComparison) # noqa: Y001 + +# Dunder protocols + +class SupportsAdd(Protocol[_T_contra, _T_co]): + def __add__(self, __x: _T_contra) -> _T_co: ... + +class SupportsRAdd(Protocol[_T_contra, _T_co]): + def __radd__(self, __x: _T_contra) -> _T_co: ... + +class SupportsSub(Protocol[_T_contra, _T_co]): + def __sub__(self, __x: _T_contra) -> _T_co: ... + +class SupportsRSub(Protocol[_T_contra, _T_co]): + def __rsub__(self, __x: _T_contra) -> _T_co: ... + +class SupportsDivMod(Protocol[_T_contra, _T_co]): + def __divmod__(self, __other: _T_contra) -> _T_co: ... + +class SupportsRDivMod(Protocol[_T_contra, _T_co]): + def __rdivmod__(self, __other: _T_contra) -> _T_co: ... + +# This protocol is generic over the iterator type, while Iterable is +# generic over the type that is iterated over. +class SupportsIter(Protocol[_T_co]): + def __iter__(self) -> _T_co: ... + +# This protocol is generic over the iterator type, while AsyncIterable is +# generic over the type that is iterated over. +class SupportsAiter(Protocol[_T_co]): + def __aiter__(self) -> _T_co: ... + +class SupportsLenAndGetItem(Protocol[_T_co]): + def __len__(self) -> int: ... + def __getitem__(self, __k: int) -> _T_co: ... + +class SupportsTrunc(Protocol): + def __trunc__(self) -> int: ... + +# Mapping-like protocols + +# stable +class SupportsItems(Protocol[_KT_co, _VT_co]): + def items(self) -> AbstractSet[tuple[_KT_co, _VT_co]]: ... + +# stable +class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]): + def keys(self) -> Iterable[_KT]: ... + def __getitem__(self, __key: _KT) -> _VT_co: ... + +# stable +class SupportsGetItem(Protocol[_KT_contra, _VT_co]): + def __contains__(self, __x: Any) -> bool: ... + def __getitem__(self, __key: _KT_contra) -> _VT_co: ... + +# stable +class SupportsItemAccess(SupportsGetItem[_KT_contra, _VT], Protocol[_KT_contra, _VT]): + def __setitem__(self, __key: _KT_contra, __value: _VT) -> None: ... + def __delitem__(self, __key: _KT_contra) -> None: ... + +StrPath: TypeAlias = str | PathLike[str] # stable +BytesPath: TypeAlias = bytes | PathLike[bytes] # stable +GenericPath: TypeAlias = AnyStr | PathLike[AnyStr] +StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes] # stable + +OpenTextModeUpdating: TypeAlias = Literal[ + "r+", + "+r", + "rt+", + "r+t", + "+rt", + "tr+", + "t+r", + "+tr", + "w+", + "+w", + "wt+", + "w+t", + "+wt", + "tw+", + "t+w", + "+tw", + "a+", + "+a", + "at+", + "a+t", + "+at", + "ta+", + "t+a", + "+ta", + "x+", + "+x", + "xt+", + "x+t", + "+xt", + "tx+", + "t+x", + "+tx", +] +OpenTextModeWriting: TypeAlias = Literal["w", "wt", "tw", "a", "at", "ta", "x", "xt", "tx"] +OpenTextModeReading: TypeAlias = Literal["r", "rt", "tr", "U", "rU", "Ur", "rtU", "rUt", "Urt", "trU", "tUr", "Utr"] +OpenTextMode: TypeAlias = OpenTextModeUpdating | OpenTextModeWriting | OpenTextModeReading +OpenBinaryModeUpdating: TypeAlias = Literal[ + "rb+", + "r+b", + "+rb", + "br+", + "b+r", + "+br", + "wb+", + "w+b", + "+wb", + "bw+", + "b+w", + "+bw", + "ab+", + "a+b", + "+ab", + "ba+", + "b+a", + "+ba", + "xb+", + "x+b", + "+xb", + "bx+", + "b+x", + "+bx", +] +OpenBinaryModeWriting: TypeAlias = Literal["wb", "bw", "ab", "ba", "xb", "bx"] +OpenBinaryModeReading: TypeAlias = Literal["rb", "br", "rbU", "rUb", "Urb", "brU", "bUr", "Ubr"] +OpenBinaryMode: TypeAlias = OpenBinaryModeUpdating | OpenBinaryModeReading | OpenBinaryModeWriting + +# stable +class HasFileno(Protocol): + def fileno(self) -> int: ... + +FileDescriptor: TypeAlias = int # stable +FileDescriptorLike: TypeAlias = int | HasFileno # stable +FileDescriptorOrPath: TypeAlias = int | StrOrBytesPath + +# stable +class SupportsRead(Protocol[_T_co]): + def read(self, __length: int = ...) -> _T_co: ... + +# stable +class SupportsReadline(Protocol[_T_co]): + def readline(self, __length: int = ...) -> _T_co: ... + +# stable +class SupportsNoArgReadline(Protocol[_T_co]): + def readline(self) -> _T_co: ... + +# stable +class SupportsWrite(Protocol[_T_contra]): + def write(self, __s: _T_contra) -> object: ... + +# Unfortunately PEP 688 does not allow us to distinguish read-only +# from writable buffers. We use these aliases for readability for now. +# Perhaps a future extension of the buffer protocol will allow us to +# distinguish these cases in the type system. +ReadOnlyBuffer: TypeAlias = Buffer # stable +# Anything that implements the read-write buffer interface. +WriteableBuffer: TypeAlias = Buffer +# Same as WriteableBuffer, but also includes read-only buffer types (like bytes). +ReadableBuffer: TypeAlias = Buffer # stable + +class SliceableBuffer(Buffer, Protocol): + def __getitem__(self, __slice: slice) -> Sequence[int]: ... + +class IndexableBuffer(Buffer, Protocol): + def __getitem__(self, __i: int) -> int: ... + +class SupportsGetItemBuffer(SliceableBuffer, IndexableBuffer, Protocol): + def __contains__(self, __x: Any) -> bool: ... + @overload + def __getitem__(self, __slice: slice) -> Sequence[int]: ... + @overload + def __getitem__(self, __i: int) -> int: ... + +class SizedBuffer(Sized, Buffer, Protocol): ... + +# for compatibility with third-party stubs that may use this +_BufferWithLen: TypeAlias = SizedBuffer # not stable # noqa: Y047 + +ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType] +OptExcInfo: TypeAlias = ExcInfo | tuple[None, None, None] + +# stable +if sys.version_info >= (3, 10): + from types import NoneType as NoneType +else: + # Used by type checkers for checks involving None (does not exist at runtime) + @final + class NoneType: + def __bool__(self) -> Literal[False]: ... + +# This is an internal CPython type that is like, but subtly different from, a NamedTuple +# Subclasses of this type are found in multiple modules. +# In typeshed, `structseq` is only ever used as a mixin in combination with a fixed-length `Tuple` +# See discussion at #6546 & #6560 +# `structseq` classes are unsubclassable, so are all decorated with `@final`. +class structseq(Generic[_T_co]): + n_fields: Final[int] + n_unnamed_fields: Final[int] + n_sequence_fields: Final[int] + # The first parameter will generally only take an iterable of a specific length. + # E.g. `os.uname_result` takes any iterable of length exactly 5. + # + # The second parameter will accept a dict of any kind without raising an exception, + # but only has any meaning if you supply it a dict where the keys are strings. + # https://github.com/python/typeshed/pull/6560#discussion_r767149830 + def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ... + +# Superset of typing.AnyStr that also includes LiteralString +AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001 + +# Represents when str or LiteralStr is acceptable. Useful for string processing +# APIs where literalness of return value depends on literalness of inputs +StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001 + +# Objects suitable to be passed to sys.setprofile, threading.setprofile, and similar +ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object] + +# Objects suitable to be passed to sys.settrace, threading.settrace, and similar +TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None] + +# experimental +# Might not work as expected for pyright, see +# https://github.com/python/typeshed/pull/9362 +# https://github.com/microsoft/pyright/issues/4339 +class DataclassInstance(Protocol): + __dataclass_fields__: ClassVar[dict[str, Field[Any]]] diff --git a/.vscode/Pico-W-Stub/stdlib/_typeshed/dbapi.pyi b/.vscode/Pico-W-Stub/stdlib/_typeshed/dbapi.pyi new file mode 100644 index 0000000..022e959 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/_typeshed/dbapi.pyi @@ -0,0 +1,37 @@ +# PEP 249 Database API 2.0 Types +# https://www.python.org/dev/peps/pep-0249/ + +from collections.abc import Mapping, Sequence +from typing import Any, Protocol +from typing_extensions import TypeAlias + +DBAPITypeCode: TypeAlias = Any | None +# Strictly speaking, this should be a Sequence, but the type system does +# not support fixed-length sequences. +DBAPIColumnDescription: TypeAlias = tuple[str, DBAPITypeCode, int | None, int | None, int | None, int | None, bool | None] + +class DBAPIConnection(Protocol): + def close(self) -> object: ... + def commit(self) -> object: ... + # optional: + # def rollback(self) -> Any: ... + def cursor(self) -> DBAPICursor: ... + +class DBAPICursor(Protocol): + @property + def description(self) -> Sequence[DBAPIColumnDescription] | None: ... + @property + def rowcount(self) -> int: ... + # optional: + # def callproc(self, __procname: str, __parameters: Sequence[Any] = ...) -> Sequence[Any]: ... + def close(self) -> object: ... + def execute(self, __operation: str, __parameters: Sequence[Any] | Mapping[str, Any] = ...) -> object: ... + def executemany(self, __operation: str, __seq_of_parameters: Sequence[Sequence[Any]]) -> object: ... + def fetchone(self) -> Sequence[Any] | None: ... + def fetchmany(self, __size: int = ...) -> Sequence[Sequence[Any]]: ... + def fetchall(self) -> Sequence[Sequence[Any]]: ... + # optional: + # def nextset(self) -> None | Literal[True]: ... + arraysize: int + def setinputsizes(self, __sizes: Sequence[DBAPITypeCode | int | None]) -> object: ... + def setoutputsize(self, __size: int, __column: int = ...) -> object: ... diff --git a/.vscode/Pico-W-Stub/stdlib/_typeshed/wsgi.pyi b/.vscode/Pico-W-Stub/stdlib/_typeshed/wsgi.pyi new file mode 100644 index 0000000..de731ae --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/_typeshed/wsgi.pyi @@ -0,0 +1,44 @@ +# Types to support PEP 3333 (WSGI) +# +# Obsolete since Python 3.11: Use wsgiref.types instead. +# +# See the README.md file in this directory for more information. + +import sys +from _typeshed import OptExcInfo +from collections.abc import Callable, Iterable, Iterator +from typing import Any, Protocol +from typing_extensions import TypeAlias + +class _Readable(Protocol): + def read(self, size: int = ...) -> bytes: ... + # Optional: def close(self) -> object: ... + +if sys.version_info >= (3, 11): + from wsgiref.types import * +else: + # stable + class StartResponse(Protocol): + def __call__( + self, __status: str, __headers: list[tuple[str, str]], __exc_info: OptExcInfo | None = ... + ) -> Callable[[bytes], object]: ... + + WSGIEnvironment: TypeAlias = dict[str, Any] # stable + WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] # stable + + # WSGI input streams per PEP 3333, stable + class InputStream(Protocol): + def read(self, __size: int = ...) -> bytes: ... + def readline(self, __size: int = ...) -> bytes: ... + def readlines(self, __hint: int = ...) -> list[bytes]: ... + def __iter__(self) -> Iterator[bytes]: ... + + # WSGI error streams per PEP 3333, stable + class ErrorStream(Protocol): + def flush(self) -> object: ... + def write(self, __s: str) -> object: ... + def writelines(self, __seq: list[str]) -> object: ... + + # Optional file wrapper in wsgi.file_wrapper + class FileWrapper(Protocol): + def __call__(self, __file: _Readable, __block_size: int = ...) -> Iterable[bytes]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/_typeshed/xml.pyi b/.vscode/Pico-W-Stub/stdlib/_typeshed/xml.pyi new file mode 100644 index 0000000..231c2b8 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/_typeshed/xml.pyi @@ -0,0 +1,9 @@ +# See the README.md file in this directory for more information. + +from typing import Any, Protocol + +# As defined https://docs.python.org/3/library/xml.dom.html#domimplementation-objects +class DOMImplementation(Protocol): + def hasFeature(self, feature: str, version: str | None) -> bool: ... + def createDocument(self, namespaceUri: str, qualifiedName: str, doctype: Any | None) -> Any: ... + def createDocumentType(self, qualifiedName: str, publicId: str, systemId: str) -> Any: ... diff --git a/.vscode/Pico-W-Stub/stdlib/abc.pyi b/.vscode/Pico-W-Stub/stdlib/abc.pyi new file mode 100644 index 0000000..e259f7e --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/abc.pyi @@ -0,0 +1,57 @@ +import sys +from collections.abc import Callable +from typing import Any, TypeVar + +import _typeshed +from _typeshed import SupportsWrite +from typing_extensions import Concatenate, Literal, ParamSpec + +_T = TypeVar("_T") +_R_co = TypeVar("_R_co", covariant=True) +_FuncT = TypeVar("_FuncT", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +# These definitions have special processing in mypy +class ABCMeta(type): + __abstractmethods__: frozenset[str] + if sys.version_info >= (3, 11): + def __new__( + __mcls: type[_typeshed.Self], + __name: str, + __bases: tuple[type, ...], + __namespace: dict[str, Any], + **kwargs: Any + ) -> _typeshed.Self: ... + else: + def __new__( + mcls: type[_typeshed.Self], + name: str, + bases: tuple[type, ...], + namespace: dict[str, Any], + **kwargs: Any + ) -> _typeshed.Self: ... + + def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ... + def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ... + def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: ... + def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ... + +def abstractmethod(funcobj: _FuncT) -> _FuncT: ... + +class abstractclassmethod(classmethod[_T, _P, _R_co]): # type: ignore + __isabstractmethod__: Literal[True] + def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ... # type: ignore + +class abstractstaticmethod(staticmethod[_P, _R_co]): # type: ignore + __isabstractmethod__: Literal[True] + def __init__(self, callable: Callable[_P, _R_co]) -> None: ... # type: ignore + +class abstractproperty(property): + __isabstractmethod__: Literal[True] + +class ABC(metaclass=ABCMeta): ... + +def get_cache_token() -> object: ... + +if sys.version_info >= (3, 10): + def update_abstractmethods(cls: type[_T]) -> type[_T]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/__init__.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/__init__.pyi new file mode 100644 index 0000000..5c7eb8e --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/__init__.pyi @@ -0,0 +1,45 @@ +import sys +from collections.abc import Awaitable, Coroutine, Generator +from typing import Any, TypeVar + +from typing_extensions import TypeAlias + +# As at runtime, this depends on all submodules defining __all__ accurately. +from .base_events import * +from .coroutines import * +from .events import * +from .futures import * +from .locks import * +from .protocols import * +from .queues import * +from .runners import * +from .streams import * + +# from .subprocess import * +from .tasks import * +from .transports import * + +if sys.version_info >= (3, 8): + from .exceptions import * + +if sys.version_info >= (3, 9): + from .threads import * + +if sys.version_info >= (3, 11): + from .taskgroups import * + from .timeouts import * + +if sys.platform == "win32": + from .windows_events import * +else: + from .unix_events import * + +_T = TypeVar("_T") + +# Aliases imported by multiple submodules in typeshed +if sys.version_info >= (3, 12): + _AwaitableLike: TypeAlias = Awaitable[_T] # noqa: Y047 + _CoroutineLike: TypeAlias = Coroutine[Any, Any, _T] # noqa: Y047 +else: + _AwaitableLike: TypeAlias = Generator[Any, None, _T] | Awaitable[_T] + _CoroutineLike: TypeAlias = Generator[Any, None, _T] | Coroutine[Any, Any, _T] diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/base_events.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/base_events.pyi new file mode 100644 index 0000000..576a5bc --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/base_events.pyi @@ -0,0 +1,521 @@ +import ssl +import sys +from asyncio import _AwaitableLike, _CoroutineLike +from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle, _TaskFactory +from asyncio.futures import Future +from asyncio.protocols import BaseProtocol +from asyncio.tasks import Task +from asyncio.transports import ( + BaseTransport, + DatagramTransport, + ReadTransport, + SubprocessTransport, + Transport, + WriteTransport, +) +from collections.abc import Callable, Iterable, Sequence +from contextvars import Context +from typing import IO, Any, TypeVar, overload + +from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer +from stdlib.socket import AddressFamily, SocketKind, _Address, _RetAddress, socket +from typing_extensions import Literal, TypeAlias + +if sys.version_info >= (3, 9): + __all__ = ("BaseEventLoop", "Server") +else: + __all__ = ("BaseEventLoop",) + +_T = TypeVar("_T") +_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol) +_Context: TypeAlias = dict[str, Any] +_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object] +_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol] +_SSLContext: TypeAlias = bool | None | ssl.SSLContext # type: ignore[misc] + +class Server(AbstractServer): + if sys.version_info >= (3, 11): + def __init__( + self, + loop: AbstractEventLoop, + sockets: Iterable[socket], + protocol_factory: _ProtocolFactory, + ssl_context: _SSLContext, + backlog: int, + ssl_handshake_timeout: float | None, + ssl_shutdown_timeout: float | None = None, + ) -> None: ... + else: + def __init__( + self, + loop: AbstractEventLoop, + sockets: Iterable[socket], + protocol_factory: _ProtocolFactory, + ssl_context: _SSLContext, + backlog: int, + ssl_handshake_timeout: float | None, + ) -> None: ... + + def get_loop(self) -> AbstractEventLoop: ... + def is_serving(self) -> bool: ... + async def start_serving(self) -> None: ... + async def serve_forever(self) -> None: ... + if sys.version_info >= (3, 8): + @property + def sockets(self) -> tuple[socket, ...]: ... + else: + @property + def sockets(self) -> list[socket]: ... + + def close(self) -> None: ... + async def wait_closed(self) -> None: ... + +class BaseEventLoop(AbstractEventLoop): + def run_forever(self) -> None: ... + def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ... + def stop(self) -> None: ... + def is_running(self) -> bool: ... + def is_closed(self) -> bool: ... + def close(self) -> None: ... + async def shutdown_asyncgens(self) -> None: ... + # Methods scheduling callbacks. All these return Handles. + def call_soon( + self, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> Handle: ... + def call_later( + self, + delay: float, + callback: Callable[..., object], + *args: Any, + context: Context | None = None, + ) -> TimerHandle: ... + def call_at( + self, + when: float, + callback: Callable[..., object], + *args: Any, + context: Context | None = None, + ) -> TimerHandle: ... + def time(self) -> float: ... + # Future methods + def create_future(self) -> Future[Any]: ... + # Tasks methods + if sys.version_info >= (3, 11): + def create_task( + self, coro: _CoroutineLike[_T], *, name: object = None, context: Context | None = None + ) -> Task[_T]: ... + elif sys.version_info >= (3, 8): + def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: ... + else: + def create_task(self, coro: _CoroutineLike[_T]) -> Task[_T]: ... + + def set_task_factory(self, factory: _TaskFactory | None) -> None: ... + def get_task_factory(self) -> _TaskFactory | None: ... + # Methods for interacting with threads + def call_soon_threadsafe( + self, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> Handle: ... + def run_in_executor( + self, executor: Any, func: Callable[..., _T], *args: Any + ) -> Future[_T]: ... + def set_default_executor(self, executor: Any) -> None: ... + # Network I/O methods returning Futures. + async def getaddrinfo( + self, + host: bytes | str | None, + port: bytes | str | int | None, + *, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]] + ]: ... + async def getnameinfo( + self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0 + ) -> tuple[str, str]: ... + if sys.version_info >= (3, 12): + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + all_errors: bool = False, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + all_errors: bool = False, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 11): + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 8): + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + if sys.version_info >= (3, 11): + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> Transport | None: ... + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, # type: ignore[misc] + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport | None: ... + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + + async def sock_sendfile( + self, + sock: socket, + file: IO[bytes], + offset: int = 0, + count: int | None = None, + *, + fallback: bool | None = True, + ) -> int: ... + async def sendfile( + self, + transport: WriteTransport, + file: IO[bytes], + offset: int = 0, + count: int | None = None, + *, + fallback: bool = True, + ) -> int: ... + if sys.version_info >= (3, 11): + async def create_datagram_endpoint( # type: ignore[override] + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, + *, + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... + else: + async def create_datagram_endpoint( + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, + *, + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_address: bool | None = ..., + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... + # Pipes and subprocesses. + async def connect_read_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[ReadTransport, _ProtocolT]: ... + async def connect_write_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[WriteTransport, _ProtocolT]: ... + async def subprocess_shell( + self, + protocol_factory: Callable[[], _ProtocolT], + cmd: bytes | str, + *, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + async def subprocess_exec( + self, + protocol_factory: Callable[[], _ProtocolT], + program: Any, + *args: Any, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + def add_reader( + self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any + ) -> None: ... + def remove_reader(self, fd: FileDescriptorLike) -> bool: ... + def add_writer( + self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any + ) -> None: ... + def remove_writer(self, fd: FileDescriptorLike) -> bool: ... + # The sock_* methods (and probably some others) are not actually implemented on + # BaseEventLoop, only on subclasses. We list them here for now for convenience. + async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... + async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... + async def sock_connect(self, sock: socket, address: _Address) -> None: ... + async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... + if sys.version_info >= (3, 11): + async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ... + async def sock_recvfrom_into( + self, sock: socket, buf: WriteableBuffer, nbytes: int = 0 + ) -> tuple[int, _RetAddress]: ... + async def sock_sendto( + self, sock: socket, data: ReadableBuffer, address: _Address + ) -> int: ... + # Signal handling. + def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ... + def remove_signal_handler(self, sig: int) -> bool: ... + # Error handlers. + def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ... + def get_exception_handler(self) -> _ExceptionHandler | None: ... + def default_exception_handler(self, context: _Context) -> None: ... + def call_exception_handler(self, context: _Context) -> None: ... + # Debug flag management. + def get_debug(self) -> bool: ... + def set_debug(self, enabled: bool) -> None: ... + if sys.version_info >= (3, 12): + async def shutdown_default_executor(self, timeout: float | None = None) -> None: ... + elif sys.version_info >= (3, 9): + async def shutdown_default_executor(self) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/base_futures.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/base_futures.pyi new file mode 100644 index 0000000..c51174e --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/base_futures.pyi @@ -0,0 +1,20 @@ +from collections.abc import Callable, Sequence +from contextvars import Context +from typing import Any +from typing_extensions import Literal + +from . import futures + +__all__ = () + +# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py +# but it leads to circular import error in pytype tool. +# That's why the import order is reversed. +from .futures import isfuture as isfuture + +_PENDING: Literal["PENDING"] # undocumented +_CANCELLED: Literal["CANCELLED"] # undocumented +_FINISHED: Literal["FINISHED"] # undocumented + +def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented +def _future_repr_info(future: futures.Future[Any]) -> list[str]: ... # undocumented diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/base_tasks.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/base_tasks.pyi new file mode 100644 index 0000000..42e952f --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/base_tasks.pyi @@ -0,0 +1,9 @@ +from _typeshed import StrOrBytesPath +from types import FrameType +from typing import Any + +from . import tasks + +def _task_repr_info(task: tasks.Task[Any]) -> list[str]: ... # undocumented +def _task_get_stack(task: tasks.Task[Any], limit: int | None) -> list[FrameType]: ... # undocumented +def _task_print_stack(task: tasks.Task[Any], limit: int | None, file: StrOrBytesPath) -> None: ... # undocumented diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/constants.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/constants.pyi new file mode 100644 index 0000000..60d8529 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/constants.pyi @@ -0,0 +1,20 @@ +import enum +import sys +from typing_extensions import Literal + +LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5] +ACCEPT_RETRY_DELAY: Literal[1] +DEBUG_STACK_DEPTH: Literal[10] +SSL_HANDSHAKE_TIMEOUT: float +SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144] +if sys.version_info >= (3, 11): + SSL_SHUTDOWN_TIMEOUT: float + FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256] + FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512] +if sys.version_info >= (3, 12): + THREAD_JOIN_TIMEOUT: Literal[300] + +class _SendfileMode(enum.Enum): + UNSUPPORTED: int + TRY_NATIVE: int + FALLBACK: int diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/coroutines.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/coroutines.pyi new file mode 100644 index 0000000..14fb627 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/coroutines.pyi @@ -0,0 +1,28 @@ +import sys +from collections.abc import Awaitable, Callable, Coroutine +from typing import Any, TypeVar, overload +from typing_extensions import ParamSpec, TypeGuard + +if sys.version_info >= (3, 11): + __all__ = ("iscoroutinefunction", "iscoroutine") +else: + __all__ = ("coroutine", "iscoroutinefunction", "iscoroutine") + +_T = TypeVar("_T") +_FunctionT = TypeVar("_FunctionT", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +if sys.version_info < (3, 11): + def coroutine(func: _FunctionT) -> _FunctionT: ... + +@overload +def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ... +@overload +def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ... +@overload +def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ... +@overload +def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ... + +# Can actually be a generator-style coroutine on Python 3.7 +def iscoroutine(obj: object) -> TypeGuard[Coroutine[Any, Any, Any]]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/events.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/events.pyi new file mode 100644 index 0000000..49541d0 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/events.pyi @@ -0,0 +1,687 @@ +import ssl +import sys +from abc import ABCMeta, abstractmethod +from collections.abc import Callable, Coroutine, Generator, Sequence +from contextvars import Context +from typing import IO, Any, Protocol, TypeVar, overload + +from _typeshed import FileDescriptorLike, ReadableBuffer, StrPath, Unused, WriteableBuffer +from stdlib.socket import AddressFamily, SocketKind, _Address, _RetAddress, socket +from typing_extensions import Literal, Self, TypeAlias + +from . import _AwaitableLike, _CoroutineLike +from .base_events import Server +from .futures import Future +from .protocols import BaseProtocol +from .tasks import Task +from .transports import ( + BaseTransport, + DatagramTransport, + ReadTransport, + SubprocessTransport, + Transport, + WriteTransport, +) +from .unix_events import AbstractChildWatcher + +if sys.version_info >= (3, 8): + __all__ = ( + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "get_child_watcher", + "set_child_watcher", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) + +else: + __all__ = ( + "AbstractEventLoopPolicy", + "AbstractEventLoop", + "AbstractServer", + "Handle", + "TimerHandle", + "SendfileNotAvailableError", + "get_event_loop_policy", + "set_event_loop_policy", + "get_event_loop", + "set_event_loop", + "new_event_loop", + "get_child_watcher", + "set_child_watcher", + "_set_running_loop", + "get_running_loop", + "_get_running_loop", + ) + +_T = TypeVar("_T") +_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol) +_Context: TypeAlias = dict[str, Any] +_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object] +_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol] +_SSLContext: TypeAlias = bool | None | ssl.SSLContext # type: ignore + +class _TaskFactory(Protocol): + def __call__( + self, + __loop: AbstractEventLoop, + __factory: Coroutine[Any, Any, _T] | Generator[Any, None, _T], + ) -> Future[_T]: ... + +class Handle: + _cancelled: bool + _args: Sequence[Any] + def __init__( + self, + callback: Callable[..., object], + args: Sequence[Any], + loop: AbstractEventLoop, + context: Context | None = None, + ) -> None: ... + def cancel(self) -> None: ... + def _run(self) -> None: ... + def cancelled(self) -> bool: ... + if sys.version_info >= (3, 12): + def get_context(self) -> Context: ... + +class TimerHandle(Handle): + def __init__( + self, + when: float, + callback: Callable[..., object], + args: Sequence[Any], + loop: AbstractEventLoop, + context: Context | None = None, + ) -> None: ... + def __hash__(self) -> int: ... + def when(self) -> float: ... + def __lt__(self, other: TimerHandle) -> bool: ... + def __le__(self, other: TimerHandle) -> bool: ... + def __gt__(self, other: TimerHandle) -> bool: ... + def __ge__(self, other: TimerHandle) -> bool: ... + def __eq__(self, other: object) -> bool: ... + +class AbstractServer: + @abstractmethod + def close(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *exc: Unused) -> None: ... + @abstractmethod + def get_loop(self) -> AbstractEventLoop: ... + @abstractmethod + def is_serving(self) -> bool: ... + @abstractmethod + async def start_serving(self) -> None: ... + @abstractmethod + async def serve_forever(self) -> None: ... + @abstractmethod + async def wait_closed(self) -> None: ... + +class AbstractEventLoop: + slow_callback_duration: float + @abstractmethod + def run_forever(self) -> None: ... + @abstractmethod + def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ... + @abstractmethod + def stop(self) -> None: ... + @abstractmethod + def is_running(self) -> bool: ... + @abstractmethod + def is_closed(self) -> bool: ... + @abstractmethod + def close(self) -> None: ... + @abstractmethod + async def shutdown_asyncgens(self) -> None: ... + # Methods scheduling callbacks. All these return Handles. + if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 + @abstractmethod + def call_soon( + self, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> Handle: ... + @abstractmethod + def call_later( + self, + delay: float, + callback: Callable[..., object], + *args: Any, + context: Context | None = None, + ) -> TimerHandle: ... + @abstractmethod + def call_at( + self, + when: float, + callback: Callable[..., object], + *args: Any, + context: Context | None = None, + ) -> TimerHandle: ... + else: + @abstractmethod + def call_soon(self, callback: Callable[..., object], *args: Any) -> Handle: ... + @abstractmethod + def call_later( + self, delay: float, callback: Callable[..., object], *args: Any + ) -> TimerHandle: ... + @abstractmethod + def call_at( + self, when: float, callback: Callable[..., object], *args: Any + ) -> TimerHandle: ... + + @abstractmethod + def time(self) -> float: ... + # Future methods + @abstractmethod + def create_future(self) -> Future[Any]: ... + # Tasks methods + if sys.version_info >= (3, 11): + @abstractmethod + def create_task( + self, + coro: _CoroutineLike[_T], + *, + name: str | None = None, + context: Context | None = None, + ) -> Task[_T]: ... + elif sys.version_info >= (3, 8): + @abstractmethod + def create_task( + self, coro: _CoroutineLike[_T], *, name: str | None = None + ) -> Task[_T]: ... + else: + @abstractmethod + def create_task(self, coro: _CoroutineLike[_T]) -> Task[_T]: ... + + @abstractmethod + def set_task_factory(self, factory: _TaskFactory | None) -> None: ... + @abstractmethod + def get_task_factory(self) -> _TaskFactory | None: ... + # Methods for interacting with threads + if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2 + @abstractmethod + def call_soon_threadsafe( + self, callback: Callable[..., object], *args: Any, context: Context | None = None + ) -> Handle: ... + else: + @abstractmethod + def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any) -> Handle: ... + + @abstractmethod + def run_in_executor( + self, executor: Any, func: Callable[..., _T], *args: Any + ) -> Future[_T]: ... + @abstractmethod + def set_default_executor(self, executor: Any) -> None: ... + # Network I/O methods returning Futures. + @abstractmethod + async def getaddrinfo( + self, + host: bytes | str | None, + port: bytes | str | int | None, + *, + family: int = 0, + type: int = 0, + proto: int = 0, + flags: int = 0, + ) -> list[ + tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]] + ]: ... + @abstractmethod + async def getnameinfo( + self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0 + ) -> tuple[str, str]: ... + if sys.version_info >= (3, 11): + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 8): + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + happy_eyeballs_delay: float | None = None, + interleave: int | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: str = ..., + port: int = ..., + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: None = None, + local_addr: tuple[str, int] | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + @overload + @abstractmethod + async def create_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + host: None = None, + port: None = None, + *, + ssl: _SSLContext = None, + family: int = 0, + proto: int = 0, + flags: int = 0, + sock: socket, + local_addr: None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + if sys.version_info >= (3, 11): + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @abstractmethod + async def start_tls( + self, + transport: WriteTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> Transport | None: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: StrPath | None = None, + *, + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + else: + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: str | Sequence[str] | None = None, + port: int = ..., + *, + family: int = ..., + flags: int = ..., + sock: None = None, + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @overload + @abstractmethod + async def create_server( + self, + protocol_factory: _ProtocolFactory, + host: None = None, + port: None = None, + *, + family: int = ..., + flags: int = ..., + sock: socket = ..., + backlog: int = 100, + ssl: _SSLContext = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + @abstractmethod + async def start_tls( + self, + transport: BaseTransport, + protocol: BaseProtocol, + sslcontext: ssl.SSLContext, # type: ignore + *, + server_side: bool = False, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> Transport | None: ... + async def create_unix_server( + self, + protocol_factory: _ProtocolFactory, + path: StrPath | None = None, + *, + sock: socket | None = None, + backlog: int = 100, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + start_serving: bool = True, + ) -> Server: ... + if sys.version_info >= (3, 11): + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + elif sys.version_info >= (3, 10): + async def connect_accepted_socket( + self, + protocol_factory: Callable[[], _ProtocolT], + sock: socket, + *, + ssl: _SSLContext = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + if sys.version_info >= (3, 11): + async def create_unix_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + path: str | None = None, + *, + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + else: + async def create_unix_connection( + self, + protocol_factory: Callable[[], _ProtocolT], + path: str | None = None, + *, + ssl: _SSLContext = None, + sock: socket | None = None, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ) -> tuple[Transport, _ProtocolT]: ... + + @abstractmethod + async def sock_sendfile( + self, + sock: socket, + file: IO[bytes], + offset: int = 0, + count: int | None = None, + *, + fallback: bool | None = None, + ) -> int: ... + @abstractmethod + async def sendfile( + self, + transport: WriteTransport, + file: IO[bytes], + offset: int = 0, + count: int | None = None, + *, + fallback: bool = True, + ) -> int: ... + @abstractmethod + async def create_datagram_endpoint( + self, + protocol_factory: Callable[[], _ProtocolT], + local_addr: tuple[str, int] | str | None = None, + remote_addr: tuple[str, int] | str | None = None, + *, + family: int = 0, + proto: int = 0, + flags: int = 0, + reuse_address: bool | None = None, + reuse_port: bool | None = None, + allow_broadcast: bool | None = None, + sock: socket | None = None, + ) -> tuple[DatagramTransport, _ProtocolT]: ... + # Pipes and subprocesses. + @abstractmethod + async def connect_read_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[ReadTransport, _ProtocolT]: ... + @abstractmethod + async def connect_write_pipe( + self, protocol_factory: Callable[[], _ProtocolT], pipe: Any + ) -> tuple[WriteTransport, _ProtocolT]: ... + @abstractmethod + async def subprocess_shell( + self, + protocol_factory: Callable[[], _ProtocolT], + cmd: bytes | str, + *, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[True] = True, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + text: Literal[False, None] = ..., + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + @abstractmethod + async def subprocess_exec( + self, + protocol_factory: Callable[[], _ProtocolT], + program: Any, + *args: Any, + stdin: int | IO[Any] | None = -1, + stdout: int | IO[Any] | None = -1, + stderr: int | IO[Any] | None = -1, + universal_newlines: Literal[False] = False, + shell: Literal[False] = False, + bufsize: Literal[0] = 0, + encoding: None = None, + errors: None = None, + **kwargs: Any, + ) -> tuple[SubprocessTransport, _ProtocolT]: ... + @abstractmethod + def add_reader( + self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any + ) -> None: ... + @abstractmethod + def remove_reader(self, fd: FileDescriptorLike) -> bool: ... + @abstractmethod + def add_writer( + self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any + ) -> None: ... + @abstractmethod + def remove_writer(self, fd: FileDescriptorLike) -> bool: ... + # Completion based I/O methods returning Futures prior to 3.7 + @abstractmethod + async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ... + @abstractmethod + async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ... + @abstractmethod + async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ... + @abstractmethod + async def sock_connect(self, sock: socket, address: _Address) -> None: ... + @abstractmethod + async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ... + if sys.version_info >= (3, 11): + @abstractmethod + async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ... + @abstractmethod + async def sock_recvfrom_into( + self, sock: socket, buf: WriteableBuffer, nbytes: int = 0 + ) -> tuple[int, _RetAddress]: ... + @abstractmethod + async def sock_sendto( + self, sock: socket, data: ReadableBuffer, address: _Address + ) -> int: ... + # Signal handling. + @abstractmethod + def add_signal_handler( + self, sig: int, callback: Callable[..., object], *args: Any + ) -> None: ... + @abstractmethod + def remove_signal_handler(self, sig: int) -> bool: ... + # Error handlers. + @abstractmethod + def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ... + @abstractmethod + def get_exception_handler(self) -> _ExceptionHandler | None: ... + @abstractmethod + def default_exception_handler(self, context: _Context) -> None: ... + @abstractmethod + def call_exception_handler(self, context: _Context) -> None: ... + # Debug flag management. + @abstractmethod + def get_debug(self) -> bool: ... + @abstractmethod + def set_debug(self, enabled: bool) -> None: ... + if sys.version_info >= (3, 9): + @abstractmethod + async def shutdown_default_executor(self) -> None: ... + +class AbstractEventLoopPolicy: + @abstractmethod + def get_event_loop(self) -> AbstractEventLoop: ... + @abstractmethod + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + @abstractmethod + def new_event_loop(self) -> AbstractEventLoop: ... + # Child processes handling (Unix only). + @abstractmethod + def get_child_watcher(self) -> AbstractChildWatcher: ... + @abstractmethod + def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ... + +class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy, metaclass=ABCMeta): + def get_event_loop(self) -> AbstractEventLoop: ... + def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ... + def new_event_loop(self) -> AbstractEventLoop: ... + +def get_event_loop_policy() -> AbstractEventLoopPolicy: ... +def set_event_loop_policy(policy: AbstractEventLoopPolicy | None) -> None: ... +def get_event_loop() -> AbstractEventLoop: ... +def set_event_loop(loop: AbstractEventLoop | None) -> None: ... +def new_event_loop() -> AbstractEventLoop: ... +def get_child_watcher() -> AbstractChildWatcher: ... +def set_child_watcher(watcher: AbstractChildWatcher) -> None: ... +def _set_running_loop(__loop: AbstractEventLoop | None) -> None: ... +def _get_running_loop() -> AbstractEventLoop: ... +def get_running_loop() -> AbstractEventLoop: ... + +if sys.version_info < (3, 8): + class SendfileNotAvailableError(RuntimeError): ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/exceptions.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/exceptions.pyi new file mode 100644 index 0000000..075fbb8 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/exceptions.pyi @@ -0,0 +1,38 @@ +import sys + +if sys.version_info >= (3, 11): + __all__ = ( + "BrokenBarrierError", + "CancelledError", + "InvalidStateError", + "TimeoutError", + "IncompleteReadError", + "LimitOverrunError", + "SendfileNotAvailableError", + ) +else: + __all__ = ( + "CancelledError", + "InvalidStateError", + "TimeoutError", + "IncompleteReadError", + "LimitOverrunError", + "SendfileNotAvailableError", + ) + +class CancelledError(BaseException): ... +class TimeoutError(Exception): ... +class InvalidStateError(Exception): ... +class SendfileNotAvailableError(RuntimeError): ... + +class IncompleteReadError(EOFError): + expected: int | None + partial: bytes + def __init__(self, partial: bytes, expected: int | None) -> None: ... + +class LimitOverrunError(Exception): + consumed: int + def __init__(self, message: str, consumed: int) -> None: ... + +if sys.version_info >= (3, 11): + class BrokenBarrierError(RuntimeError): ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/format_helpers.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/format_helpers.pyi new file mode 100644 index 0000000..1c78dff --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/format_helpers.pyi @@ -0,0 +1,20 @@ +import functools +import traceback +from collections.abc import Iterable +from types import FrameType, FunctionType +from typing import Any, overload +from typing_extensions import TypeAlias + +class _HasWrapper: + __wrapper__: _HasWrapper | FunctionType + +_FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | functools.partialmethod[Any] + +@overload +def _get_function_source(func: _FuncType) -> tuple[str, int]: ... +@overload +def _get_function_source(func: object) -> tuple[str, int] | None: ... +def _format_callback_source(func: object, args: Iterable[Any]) -> str: ... +def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ... +def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ... +def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/futures.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/futures.pyi new file mode 100644 index 0000000..af05425 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/futures.pyi @@ -0,0 +1,66 @@ +import sys +from collections.abc import Awaitable, Callable, Generator, Iterable +from concurrent.futures._base import Error, Future as _ConcurrentFuture +from typing import Any, TypeVar +from typing_extensions import Literal, Self, TypeGuard + +from .events import AbstractEventLoop + +if sys.version_info < (3, 8): + from concurrent.futures import CancelledError as CancelledError, TimeoutError as TimeoutError + + class InvalidStateError(Error): ... + +from contextvars import Context + +if sys.version_info >= (3, 9): + from types import GenericAlias + +if sys.version_info >= (3, 8): + __all__ = ("Future", "wrap_future", "isfuture") +else: + __all__ = ("CancelledError", "TimeoutError", "InvalidStateError", "Future", "wrap_future", "isfuture") + +_T = TypeVar("_T") + +# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py +# but it leads to circular import error in pytype tool. +# That's why the import order is reversed. +def isfuture(obj: object) -> TypeGuard[Future[Any]]: ... + +class Future(Awaitable[_T], Iterable[_T]): + _state: str + @property + def _exception(self) -> BaseException | None: ... + _blocking: bool + @property + def _log_traceback(self) -> bool: ... + @_log_traceback.setter + def _log_traceback(self, val: Literal[False]) -> None: ... + _asyncio_future_blocking: bool # is a part of duck-typing contract for `Future` + def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ... + def __del__(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + @property + def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ... + def add_done_callback(self, __fn: Callable[[Self], object], *, context: Context | None = None) -> None: ... + if sys.version_info >= (3, 9): + def cancel(self, msg: Any | None = None) -> bool: ... + else: + def cancel(self) -> bool: ... + + def cancelled(self) -> bool: ... + def done(self) -> bool: ... + def result(self) -> _T: ... + def exception(self) -> BaseException | None: ... + def remove_done_callback(self, __fn: Callable[[Self], object]) -> int: ... + def set_result(self, __result: _T) -> None: ... + def set_exception(self, __exception: type | BaseException) -> None: ... + def __iter__(self) -> Generator[Any, None, _T]: ... + def __await__(self) -> Generator[Any, None, _T]: ... + @property + def _loop(self) -> AbstractEventLoop: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/locks.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/locks.pyi new file mode 100644 index 0000000..21f98ab --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/locks.pyi @@ -0,0 +1,116 @@ +import enum +import sys +from _typeshed import Unused +from collections import deque +from collections.abc import Callable, Generator +from types import TracebackType +from typing import Any, TypeVar +from typing_extensions import Literal, Self + +from .events import AbstractEventLoop +from .futures import Future + +if sys.version_info >= (3, 11): + from .mixins import _LoopBoundMixin + +if sys.version_info >= (3, 11): + __all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore", "Barrier") +else: + __all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore") + +_T = TypeVar("_T") + +if sys.version_info >= (3, 9): + class _ContextManagerMixin: + async def __aenter__(self) -> None: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: ... + +else: + class _ContextManager: + def __init__(self, lock: Lock | Semaphore) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *args: Unused) -> None: ... + + class _ContextManagerMixin: + # Apparently this exists to *prohibit* use as a context manager. + # def __enter__(self) -> NoReturn: ... see: https://github.com/python/typing/issues/1043 + # def __exit__(self, *args: Any) -> None: ... + def __iter__(self) -> Generator[Any, None, _ContextManager]: ... + def __await__(self) -> Generator[Any, None, _ContextManager]: ... + async def __aenter__(self) -> None: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: ... + +class Lock(_ContextManagerMixin): + if sys.version_info >= (3, 10): + def __init__(self) -> None: ... + else: + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... + + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + +class Event: + if sys.version_info >= (3, 10): + def __init__(self) -> None: ... + else: + def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ... + + def is_set(self) -> bool: ... + def set(self) -> None: ... + def clear(self) -> None: ... + async def wait(self) -> Literal[True]: ... + +class Condition(_ContextManagerMixin): + if sys.version_info >= (3, 10): + def __init__(self, lock: Lock | None = None) -> None: ... + else: + def __init__(self, lock: Lock | None = None, *, loop: AbstractEventLoop | None = None) -> None: ... + + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + async def wait(self) -> Literal[True]: ... + async def wait_for(self, predicate: Callable[[], _T]) -> _T: ... + def notify(self, n: int = 1) -> None: ... + def notify_all(self) -> None: ... + +class Semaphore(_ContextManagerMixin): + _value: int + _waiters: deque[Future[Any]] # type: ignore + if sys.version_info >= (3, 10): + def __init__(self, value: int = 1) -> None: ... + else: + def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ... + + def locked(self) -> bool: ... + async def acquire(self) -> Literal[True]: ... + def release(self) -> None: ... + def _wake_up_next(self) -> None: ... + +class BoundedSemaphore(Semaphore): ... + +if sys.version_info >= (3, 11): + class _BarrierState(enum.Enum): # undocumented + FILLING: str + DRAINING: str + RESETTING: str + BROKEN: str + + class Barrier(_LoopBoundMixin): + def __init__(self, parties: int) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__(self, *args: Unused) -> None: ... + async def wait(self) -> int: ... + async def abort(self) -> None: ... + async def reset(self) -> None: ... + @property + def parties(self) -> int: ... + @property + def n_waiting(self) -> int: ... + @property + def broken(self) -> bool: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/log.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/log.pyi new file mode 100644 index 0000000..e1de0b3 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/log.pyi @@ -0,0 +1,3 @@ +import logging + +logger: logging.Logger diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/mixins.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/mixins.pyi new file mode 100644 index 0000000..d34b282 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/mixins.pyi @@ -0,0 +1,10 @@ +import sys +import threading + +from typing_extensions import Never + +_global_lock: threading.Lock # type: ignore + +class _LoopBoundMixin: + if sys.version_info < (3, 11): + def __init__(self, *, loop: Never = ...) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/proactor_events.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/proactor_events.pyi new file mode 100644 index 0000000..33fdf84 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/proactor_events.pyi @@ -0,0 +1,74 @@ +import sys +from collections.abc import Mapping +from socket import socket +from typing import Any, ClassVar, Protocol +from typing_extensions import Literal + +from . import base_events, constants, events, futures, streams, transports + +__all__ = ("BaseProactorEventLoop",) + +if sys.version_info >= (3, 8): + class _WarnCallbackProtocol(Protocol): + def __call__( + self, message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ... + ) -> object: ... + +class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport): + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + ) -> None: ... + if sys.version_info >= (3, 8): + def __del__(self, _warn: _WarnCallbackProtocol = ...) -> None: ... + else: + def __del__(self) -> None: ... + +class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport): + if sys.version_info >= (3, 10): + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + buffer_size: int = 65536, + ) -> None: ... + else: + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + ) -> None: ... + +class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): ... +class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ... +class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ... + +class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): + _sendfile_compatible: ClassVar[constants._SendfileMode] + def __init__( + self, + loop: events.AbstractEventLoop, + sock: socket, + protocol: streams.StreamReaderProtocol, + waiter: futures.Future[Any] | None = None, + extra: Mapping[Any, Any] | None = None, + server: events.AbstractServer | None = None, + ) -> None: ... + def _set_extra(self, sock: socket) -> None: ... + def can_write_eof(self) -> Literal[True]: ... + +class BaseProactorEventLoop(base_events.BaseEventLoop): + def __init__(self, proactor: Any) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/protocols.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/protocols.pyi new file mode 100644 index 0000000..5173b74 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/protocols.pyi @@ -0,0 +1,34 @@ +from _typeshed import ReadableBuffer +from asyncio import transports +from typing import Any + +__all__ = ("BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol", "BufferedProtocol") + +class BaseProtocol: + def connection_made(self, transport: transports.BaseTransport) -> None: ... + def connection_lost(self, exc: Exception | None) -> None: ... + def pause_writing(self) -> None: ... + def resume_writing(self) -> None: ... + +class Protocol(BaseProtocol): + def data_received(self, data: bytes) -> None: ... + def eof_received(self) -> bool | None: ... + +class BufferedProtocol(BaseProtocol): + def get_buffer(self, sizehint: int) -> ReadableBuffer: ... + def buffer_updated(self, nbytes: int) -> None: ... + def eof_received(self) -> bool | None: ... + +class DatagramProtocol(BaseProtocol): + def connection_made(self, transport: transports.DatagramTransport) -> None: ... # type: ignore[override] + # addr can be a tuple[int, int] for some unusual protocols like socket.AF_NETLINK. + # Use tuple[str | Any, int] to not cause typechecking issues on most usual cases. + # This could be improved by using tuple[AnyOf[str, int], int] if the AnyOf feature is accepted. + # See https://github.com/python/typing/issues/566 + def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: ... + def error_received(self, exc: Exception) -> None: ... + +class SubprocessProtocol(BaseProtocol): + def pipe_data_received(self, fd: int, data: bytes) -> None: ... + def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: ... + def process_exited(self) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/queues.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/queues.pyi new file mode 100644 index 0000000..f56a095 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/queues.pyi @@ -0,0 +1,40 @@ +import sys +from asyncio.events import AbstractEventLoop +from typing import Any, Generic, TypeVar + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty") + +class QueueEmpty(Exception): ... +class QueueFull(Exception): ... + +_T = TypeVar("_T") + +class Queue(Generic[_T]): + if sys.version_info >= (3, 10): + def __init__(self, maxsize: int = 0) -> None: ... + else: + def __init__(self, maxsize: int = 0, *, loop: AbstractEventLoop | None = None) -> None: ... + + def _init(self, maxsize: int) -> None: ... + def _get(self) -> _T: ... + def _put(self, item: _T) -> None: ... + def _format(self) -> str: ... + def qsize(self) -> int: ... + @property + def maxsize(self) -> int: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + async def put(self, item: _T) -> None: ... + def put_nowait(self, item: _T) -> None: ... + async def get(self) -> _T: ... + def get_nowait(self) -> _T: ... + async def join(self) -> None: ... + def task_done(self) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, type: Any) -> GenericAlias: ... + +class PriorityQueue(Queue[_T]): ... +class LifoQueue(Queue[_T]): ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/runners.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/runners.pyi new file mode 100644 index 0000000..847072b --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/runners.pyi @@ -0,0 +1,35 @@ +import sys +from _typeshed import Unused +from collections.abc import Callable, Coroutine +from contextvars import Context +from typing import Any, TypeVar +from typing_extensions import Self, final + +from .events import AbstractEventLoop + +if sys.version_info >= (3, 11): + __all__ = ("Runner", "run") +else: + __all__ = ("run",) +_T = TypeVar("_T") + +if sys.version_info >= (3, 11): + @final + class Runner: + def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ... + def close(self) -> None: ... + def get_loop(self) -> AbstractEventLoop: ... + def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ... + +if sys.version_info >= (3, 12): + def run( + main: Coroutine[Any, Any, _T], *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ... + ) -> _T: ... + +elif sys.version_info >= (3, 8): + def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ... + +else: + def run(main: Coroutine[Any, Any, _T], *, debug: bool = False) -> _T: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/selector_events.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/selector_events.pyi new file mode 100644 index 0000000..430f2dd --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/selector_events.pyi @@ -0,0 +1,8 @@ +import selectors + +from . import base_events + +__all__ = ("BaseSelectorEventLoop",) + +class BaseSelectorEventLoop(base_events.BaseEventLoop): + def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/sslproto.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/sslproto.pyi new file mode 100644 index 0000000..ca1113c --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/sslproto.pyi @@ -0,0 +1,176 @@ +import sys +from collections import deque +from collections.abc import Callable +from enum import Enum +from typing import Any, ClassVar + +import stdlib.ssl as ssl # type: ignore +from typing_extensions import Literal, TypeAlias + +from . import constants, events, futures, protocols, transports + +def _create_transport_context( + server_side: bool, server_hostname: str | None +) -> ssl.SSLContext: ... + +if sys.version_info >= (3, 11): + SSLAgainErrors: tuple[type[ssl.SSLWantReadError], type[ssl.SSLSyscallError]] + + class SSLProtocolState(Enum): + UNWRAPPED: str + DO_HANDSHAKE: str + WRAPPED: str + FLUSHING: str + SHUTDOWN: str + + class AppProtocolState(Enum): + STATE_INIT: str + STATE_CON_MADE: str + STATE_EOF: str + STATE_CON_LOST: str + def add_flowcontrol_defaults( + high: int | None, low: int | None, kb: int + ) -> tuple[int, int]: ... + +else: + _UNWRAPPED: Literal["UNWRAPPED"] + _DO_HANDSHAKE: Literal["DO_HANDSHAKE"] + _WRAPPED: Literal["WRAPPED"] + _SHUTDOWN: Literal["SHUTDOWN"] + +if sys.version_info < (3, 11): + class _SSLPipe: + max_size: ClassVar[int] + + _context: ssl.SSLContext + _server_side: bool + _server_hostname: str | None + _state: str + _incoming: ssl.MemoryBIO + _outgoing: ssl.MemoryBIO + _sslobj: ssl.SSLObject | None + _need_ssldata: bool + _handshake_cb: Callable[[BaseException | None], None] | None + _shutdown_cb: Callable[[], None] | None + def __init__( + self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None + ) -> None: ... + @property + def context(self) -> ssl.SSLContext: ... + @property + def ssl_object(self) -> ssl.SSLObject | None: ... + @property + def need_ssldata(self) -> bool: ... + @property + def wrapped(self) -> bool: ... + def do_handshake( + self, callback: Callable[[BaseException | None], object] | None = None + ) -> list[bytes]: ... + def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ... + def feed_eof(self) -> None: ... + def feed_ssldata( + self, data: bytes, only_handshake: bool = False + ) -> tuple[list[bytes], list[bytes]]: ... + def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ... + +class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport): + _sendfile_compatible: ClassVar[constants._SendfileMode] + + _loop: events.AbstractEventLoop + if sys.version_info >= (3, 11): + _ssl_protocol: SSLProtocol | None + else: + _ssl_protocol: SSLProtocol + _closed: bool + def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ... + def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ... + @property + def _protocol_paused(self) -> bool: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def can_write_eof(self) -> Literal[False]: ... + if sys.version_info >= (3, 11): + def get_write_buffer_limits(self) -> tuple[int, int]: ... + def get_read_buffer_limits(self) -> tuple[int, int]: ... + def set_read_buffer_limits( + self, high: int | None = None, low: int | None = None + ) -> None: ... + def get_read_buffer_size(self) -> int: ... + +if sys.version_info >= (3, 11): + _SSLProtocolBase: TypeAlias = protocols.BufferedProtocol +else: + _SSLProtocolBase: TypeAlias = protocols.Protocol + +class SSLProtocol(_SSLProtocolBase): + _server_side: bool + _server_hostname: str | None + _sslcontext: ssl.SSLContext + _extra: dict[str, Any] + _write_backlog: deque[tuple[bytes, int]] # type: ignore + _write_buffer_size: int + _waiter: futures.Future[Any] + _loop: events.AbstractEventLoop + _app_transport: _SSLProtocolTransport + _transport: transports.BaseTransport | None + _ssl_handshake_timeout: int | None + _app_protocol: protocols.BaseProtocol + _app_protocol_is_buffer: bool + + if sys.version_info >= (3, 11): + max_size: ClassVar[int] + else: + _sslpipe: _SSLPipe | None + _session_established: bool + _call_connection_made: bool + _in_handshake: bool + _in_shutdown: bool + + if sys.version_info >= (3, 11): + def __init__( + self, + loop: events.AbstractEventLoop, + app_protocol: protocols.BaseProtocol, + sslcontext: ssl.SSLContext, + waiter: futures.Future[Any], + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> None: ... + else: + def __init__( + self, + loop: events.AbstractEventLoop, + app_protocol: protocols.BaseProtocol, + sslcontext: ssl.SSLContext, + waiter: futures.Future[Any], + server_side: bool = False, + server_hostname: str | None = None, + call_connection_made: bool = True, + ssl_handshake_timeout: int | None = None, + ) -> None: ... + + def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ... + def _wakeup_waiter(self, exc: BaseException | None = None) -> None: ... + def connection_lost(self, exc: BaseException | None) -> None: ... + def eof_received(self) -> None: ... + def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ... + def _start_shutdown(self) -> None: ... + if sys.version_info >= (3, 11): + def _write_appdata(self, list_of_data: list[bytes]) -> None: ... + else: + def _write_appdata(self, data: bytes) -> None: ... + + def _start_handshake(self) -> None: ... + def _check_handshake_timeout(self) -> None: ... + def _on_handshake_complete(self, handshake_exc: BaseException | None) -> None: ... + def _fatal_error( + self, exc: BaseException, message: str = "Fatal error on transport" + ) -> None: ... + def _abort(self) -> None: ... + if sys.version_info >= (3, 11): + def get_buffer(self, n: int) -> memoryview: ... + else: + def _finalize(self) -> None: ... + def _process_write_backlog(self) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/staggered.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/staggered.pyi new file mode 100644 index 0000000..3324777 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/staggered.pyi @@ -0,0 +1,10 @@ +from collections.abc import Awaitable, Callable, Iterable +from typing import Any + +from . import events + +__all__ = ("staggered_race",) + +async def staggered_race( + coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = None +) -> tuple[Any, int | None, list[Exception | None]]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/streams.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/streams.pyi new file mode 100644 index 0000000..804be1c --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/streams.pyi @@ -0,0 +1,179 @@ +import ssl +import sys +from _typeshed import StrPath +from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence +from typing import Any +from typing_extensions import Self, SupportsIndex, TypeAlias + +from . import events, protocols, transports +from .base_events import Server + +if sys.platform == "win32": + if sys.version_info >= (3, 8): + __all__ = ("StreamReader", "StreamWriter", "StreamReaderProtocol", "open_connection", "start_server") + else: + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "IncompleteReadError", + "LimitOverrunError", + ) +else: + if sys.version_info >= (3, 8): + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "open_unix_connection", + "start_unix_server", + ) + else: + __all__ = ( + "StreamReader", + "StreamWriter", + "StreamReaderProtocol", + "open_connection", + "start_server", + "IncompleteReadError", + "LimitOverrunError", + "open_unix_connection", + "start_unix_server", + ) + +_ClientConnectedCallback: TypeAlias = Callable[[StreamReader, StreamWriter], Awaitable[None] | None] + +if sys.version_info < (3, 8): + class IncompleteReadError(EOFError): + expected: int | None + partial: bytes + def __init__(self, partial: bytes, expected: int | None) -> None: ... + + class LimitOverrunError(Exception): + consumed: int + def __init__(self, message: str, consumed: int) -> None: ... + +if sys.version_info >= (3, 10): + async def open_connection( + host: str | None = None, + port: int | str | None = None, + *, + limit: int = 65536, + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_server( + client_connected_cb: _ClientConnectedCallback, + host: str | Sequence[str] | None = None, + port: int | str | None = None, + *, + limit: int = 65536, + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> Server: ... + +else: + async def open_connection( + host: str | None = None, + port: int | str | None = None, + *, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_server( + client_connected_cb: _ClientConnectedCallback, + host: str | None = None, + port: int | str | None = None, + *, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + ssl_handshake_timeout: float | None = ..., + **kwds: Any, + ) -> Server: ... + +if sys.platform != "win32": + if sys.version_info >= (3, 10): + async def open_unix_connection( + path: StrPath | None = None, *, limit: int = 65536, **kwds: Any + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_unix_server( + client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, *, limit: int = 65536, **kwds: Any + ) -> Server: ... + else: + async def open_unix_connection( + path: StrPath | None = None, *, loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any + ) -> tuple[StreamReader, StreamWriter]: ... + async def start_unix_server( + client_connected_cb: _ClientConnectedCallback, + path: StrPath | None = None, + *, + loop: events.AbstractEventLoop | None = None, + limit: int = 65536, + **kwds: Any, + ) -> Server: ... + +class FlowControlMixin(protocols.Protocol): + def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ... + +class StreamReaderProtocol(FlowControlMixin, protocols.Protocol): + def __init__( + self, + stream_reader: StreamReader, + client_connected_cb: _ClientConnectedCallback | None = None, + loop: events.AbstractEventLoop | None = None, + ) -> None: ... + +class StreamWriter: + def __init__( + self, + transport: transports.WriteTransport, + protocol: protocols.BaseProtocol, + reader: StreamReader | None, + loop: events.AbstractEventLoop, + ) -> None: ... + @property + def transport(self) -> transports.WriteTransport: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, data: Iterable[bytes | bytearray | memoryview]) -> None: ... + def write_eof(self) -> None: ... + def can_write_eof(self) -> bool: ... + def close(self) -> None: ... + def is_closing(self) -> bool: ... + async def wait_closed(self) -> None: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... + async def drain(self) -> None: ... + if sys.version_info >= (3, 12): + async def start_tls( + self, + sslcontext: ssl.SSLContext, + *, + server_hostname: str | None = None, + ssl_handshake_timeout: float | None = None, + ssl_shutdown_timeout: float | None = None, + ) -> None: ... + elif sys.version_info >= (3, 11): + async def start_tls( + self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None + ) -> None: ... + +class StreamReader(AsyncIterator[bytes]): + def __init__(self, limit: int = 65536, loop: events.AbstractEventLoop | None = None) -> None: ... + def exception(self) -> Exception: ... + def set_exception(self, exc: Exception) -> None: ... + def set_transport(self, transport: transports.BaseTransport) -> None: ... + def feed_eof(self) -> None: ... + def at_eof(self) -> bool: ... + def feed_data(self, data: Iterable[SupportsIndex]) -> None: ... + async def readline(self) -> bytes: ... + # Can be any buffer that supports len(); consider changing to a Protocol if PEP 688 is accepted + async def readuntil(self, separator: bytes | bytearray | memoryview = b"\n") -> bytes: ... + async def read(self, n: int = -1) -> bytes: ... + async def readexactly(self, n: int) -> bytes: ... + def __aiter__(self) -> Self: ... + async def __anext__(self) -> bytes: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/taskgroups.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/taskgroups.pyi new file mode 100644 index 0000000..47d9bb2 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/taskgroups.pyi @@ -0,0 +1,20 @@ +import sys +from contextvars import Context +from types import TracebackType +from typing import TypeVar +from typing_extensions import Self + +from . import _CoroutineLike +from .tasks import Task + +if sys.version_info >= (3, 12): + __all__ = ("TaskGroup",) +else: + __all__ = ["TaskGroup"] + +_T = TypeVar("_T") + +class TaskGroup: + async def __aenter__(self) -> Self: ... + async def __aexit__(self, et: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None) -> None: ... + def create_task(self, coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/tasks.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/tasks.pyi new file mode 100644 index 0000000..f7b6700 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/tasks.pyi @@ -0,0 +1,348 @@ +import concurrent.futures +import sys +from collections.abc import Awaitable, Coroutine, Generator, Iterable, Iterator +from types import FrameType +from typing import Any, Generic, TextIO, TypeVar, overload +from typing_extensions import Literal, TypeAlias + +from . import _CoroutineLike +from .events import AbstractEventLoop +from .futures import Future + +if sys.version_info >= (3, 9): + from types import GenericAlias +if sys.version_info >= (3, 11): + from contextvars import Context + +__all__ = ( + "Task", + "create_task", + "FIRST_COMPLETED", + "FIRST_EXCEPTION", + "ALL_COMPLETED", + "wait", + "wait_for", + "as_completed", + "sleep", + "gather", + "shield", + "ensure_future", + "run_coroutine_threadsafe", + "current_task", + "all_tasks", + "_register_task", + "_unregister_task", + "_enter_task", + "_leave_task", +) + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_T5 = TypeVar("_T5") +_FT = TypeVar("_FT", bound=Future[Any]) +_FutureLike: TypeAlias = Future[_T] | Generator[Any, None, _T] | Awaitable[_T] +_TaskYieldType: TypeAlias = Future[object] | None + +FIRST_COMPLETED = concurrent.futures.FIRST_COMPLETED +FIRST_EXCEPTION = concurrent.futures.FIRST_EXCEPTION +ALL_COMPLETED = concurrent.futures.ALL_COMPLETED + +if sys.version_info >= (3, 10): + def as_completed(fs: Iterable[_FutureLike[_T]], *, timeout: float | None = None) -> Iterator[Future[_T]]: ... + +else: + def as_completed( + fs: Iterable[_FutureLike[_T]], *, loop: AbstractEventLoop | None = None, timeout: float | None = None + ) -> Iterator[Future[_T]]: ... + +@overload +def ensure_future(coro_or_future: _FT, *, loop: AbstractEventLoop | None = None) -> _FT: ... # type: ignore[misc] +@overload +def ensure_future(coro_or_future: Awaitable[_T], *, loop: AbstractEventLoop | None = None) -> Task[_T]: ... + +# `gather()` actually returns a list with length equal to the number +# of tasks passed; however, Tuple is used similar to the annotation for +# zip() because typing does not support variadic type variables. See +# typing PR #1550 for discussion. +# +# The many type: ignores here are because the overloads overlap, +# but having overlapping overloads is the only way to get acceptable type inference in all edge cases. +if sys.version_info >= (3, 10): + @overload + def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: Literal[False] = False) -> Future[tuple[_T1]]: ... # type: ignore[misc] + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: Literal[False] = False + ) -> Future[tuple[_T1, _T2]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + *, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def gather(__coro_or_future1: _FutureLike[_T1], *, return_exceptions: bool) -> Future[tuple[_T1 | BaseException]]: ... # type: ignore[misc] + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], __coro_or_future2: _FutureLike[_T2], *, return_exceptions: bool + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + *, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + *, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + *, + return_exceptions: bool, + ) -> Future[ + tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] + ]: ... + @overload + def gather(*coros_or_futures: _FutureLike[Any], return_exceptions: bool = False) -> Future[list[Any]]: ... + +else: + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: Literal[False] = False + ) -> Future[tuple[_T1]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: Literal[False] = False, + ) -> Future[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], *, loop: AbstractEventLoop | None = None, return_exceptions: bool + ) -> Future[tuple[_T1 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException]]: ... + @overload + def gather( # type: ignore[misc] + __coro_or_future1: _FutureLike[_T1], + __coro_or_future2: _FutureLike[_T2], + __coro_or_future3: _FutureLike[_T3], + __coro_or_future4: _FutureLike[_T4], + __coro_or_future5: _FutureLike[_T5], + *, + loop: AbstractEventLoop | None = None, + return_exceptions: bool, + ) -> Future[ + tuple[_T1 | BaseException, _T2 | BaseException, _T3 | BaseException, _T4 | BaseException, _T5 | BaseException] + ]: ... + @overload + def gather( + *coros_or_futures: _FutureLike[Any], loop: AbstractEventLoop | None = None, return_exceptions: bool = False + ) -> Future[list[Any]]: ... + +def run_coroutine_threadsafe(coro: _FutureLike[_T], loop: AbstractEventLoop) -> concurrent.futures.Future[_T]: ... + +if sys.version_info >= (3, 10): + def shield(arg: _FutureLike[_T]) -> Future[_T]: ... + @overload + async def sleep(delay: float) -> None: ... + @overload + async def sleep(delay: float, result: _T) -> _T: ... + @overload + async def wait(fs: Iterable[_FT], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED") -> tuple[set[_FT], set[_FT]]: ... # type: ignore[misc] + @overload + async def wait( + fs: Iterable[Awaitable[_T]], *, timeout: float | None = None, return_when: str = "ALL_COMPLETED" + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None) -> _T: ... + +else: + def shield(arg: _FutureLike[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ... + @overload + async def sleep(delay: float, *, loop: AbstractEventLoop | None = None) -> None: ... + @overload + async def sleep(delay: float, result: _T, *, loop: AbstractEventLoop | None = None) -> _T: ... + @overload + async def wait( # type: ignore[misc] + fs: Iterable[_FT], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", + ) -> tuple[set[_FT], set[_FT]]: ... + @overload + async def wait( + fs: Iterable[Awaitable[_T]], + *, + loop: AbstractEventLoop | None = None, + timeout: float | None = None, + return_when: str = "ALL_COMPLETED", + ) -> tuple[set[Task[_T]], set[Task[_T]]]: ... + async def wait_for(fut: _FutureLike[_T], timeout: float | None, *, loop: AbstractEventLoop | None = None) -> _T: ... + +if sys.version_info >= (3, 12): + _TaskCompatibleCoro: TypeAlias = Coroutine[Any, Any, _T_co] +else: + _TaskCompatibleCoro: TypeAlias = Generator[_TaskYieldType, None, _T_co] | Awaitable[_T_co] + +# mypy and pyright complain that a subclass of an invariant class shouldn't be covariant. +# While this is true in general, here it's sort-of okay to have a covariant subclass, +# since the only reason why `asyncio.Future` is invariant is the `set_result()` method, +# and `asyncio.Task.set_result()` always raises. +class Task(Future[_T_co], Generic[_T_co]): # type: ignore[type-var] # pyright: ignore[reportGeneralTypeIssues] + if sys.version_info >= (3, 12): + def __init__( + self, + coro: _TaskCompatibleCoro[_T_co], + *, + loop: AbstractEventLoop = ..., + name: str | None = ..., + context: Context | None = None, + eager_start: bool = False, + ) -> None: ... + elif sys.version_info >= (3, 11): + def __init__( + self, + coro: _TaskCompatibleCoro[_T_co], + *, + loop: AbstractEventLoop = ..., + name: str | None = ..., + context: Context | None = None, + ) -> None: ... + elif sys.version_info >= (3, 8): + def __init__( + self, coro: _TaskCompatibleCoro[_T_co], *, loop: AbstractEventLoop = ..., name: str | None = ... + ) -> None: ... + else: + def __init__(self, coro: _TaskCompatibleCoro[_T_co], *, loop: AbstractEventLoop = ...) -> None: ... + if sys.version_info >= (3, 8): + def get_coro(self) -> _TaskCompatibleCoro[_T_co]: ... + def get_name(self) -> str: ... + def set_name(self, __value: object) -> None: ... + if sys.version_info >= (3, 12): + def get_context(self) -> Context: ... + + def get_stack(self, *, limit: int | None = None) -> list[FrameType]: ... + def print_stack(self, *, limit: int | None = None, file: TextIO | None = None) -> None: ... + if sys.version_info >= (3, 11): + def cancelling(self) -> int: ... + def uncancel(self) -> int: ... + if sys.version_info < (3, 9): + @classmethod + def current_task(cls, loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... + @classmethod + def all_tasks(cls, loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def all_tasks(loop: AbstractEventLoop | None = None) -> set[Task[Any]]: ... + +if sys.version_info >= (3, 11): + def create_task(coro: _CoroutineLike[_T], *, name: str | None = None, context: Context | None = None) -> Task[_T]: ... + +elif sys.version_info >= (3, 8): + def create_task(coro: _CoroutineLike[_T], *, name: str | None = None) -> Task[_T]: ... + +else: + def create_task(coro: _CoroutineLike[_T]) -> Task[_T]: ... + +def current_task(loop: AbstractEventLoop | None = None) -> Task[Any] | None: ... +def _enter_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... +def _leave_task(loop: AbstractEventLoop, task: Task[Any]) -> None: ... +def _register_task(task: Task[Any]) -> None: ... +def _unregister_task(task: Task[Any]) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/threads.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/threads.pyi new file mode 100644 index 0000000..88c4fdd --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/threads.pyi @@ -0,0 +1,9 @@ +from collections.abc import Callable +from typing import TypeVar +from typing_extensions import ParamSpec + +__all__ = ("to_thread",) +_P = ParamSpec("_P") +_R = TypeVar("_R") + +async def to_thread(__func: Callable[_P, _R], *args: _P.args, **kwargs: _P.kwargs) -> _R: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/timeouts.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/timeouts.pyi new file mode 100644 index 0000000..2d31b77 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/timeouts.pyi @@ -0,0 +1,18 @@ +from types import TracebackType +from typing_extensions import Self, final + +__all__ = ("Timeout", "timeout", "timeout_at") + +@final +class Timeout: + def __init__(self, when: float | None) -> None: ... + def when(self) -> float | None: ... + def reschedule(self, when: float | None) -> None: ... + def expired(self) -> bool: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + +def timeout(delay: float | None) -> Timeout: ... +def timeout_at(when: float | None) -> Timeout: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/transports.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/transports.pyi new file mode 100644 index 0000000..9ffbc8b --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/transports.pyi @@ -0,0 +1,47 @@ +from asyncio.events import AbstractEventLoop +from asyncio.protocols import BaseProtocol +from collections.abc import Iterable, Mapping +from stdlib.socket import _Address +from typing import Any + +__all__ = ("BaseTransport", "ReadTransport", "WriteTransport", "Transport", "DatagramTransport", "SubprocessTransport") + +class BaseTransport: + def __init__(self, extra: Mapping[str, Any] | None = None) -> None: ... + def get_extra_info(self, name: str, default: Any = None) -> Any: ... + def is_closing(self) -> bool: ... + def close(self) -> None: ... + def set_protocol(self, protocol: BaseProtocol) -> None: ... + def get_protocol(self) -> BaseProtocol: ... + +class ReadTransport(BaseTransport): + def is_reading(self) -> bool: ... + def pause_reading(self) -> None: ... + def resume_reading(self) -> None: ... + +class WriteTransport(BaseTransport): + def set_write_buffer_limits(self, high: int | None = None, low: int | None = None) -> None: ... + def get_write_buffer_size(self) -> int: ... + def get_write_buffer_limits(self) -> tuple[int, int]: ... + def write(self, data: bytes | bytearray | memoryview) -> None: ... + def writelines(self, list_of_data: Iterable[bytes | bytearray | memoryview]) -> None: ... + def write_eof(self) -> None: ... + def can_write_eof(self) -> bool: ... + def abort(self) -> None: ... + +class Transport(ReadTransport, WriteTransport): ... + +class DatagramTransport(BaseTransport): + def sendto(self, data: bytes | bytearray | memoryview, addr: _Address | None = None) -> None: ... + def abort(self) -> None: ... + +class SubprocessTransport(BaseTransport): + def get_pid(self) -> int: ... + def get_returncode(self) -> int | None: ... + def get_pipe_transport(self, fd: int) -> BaseTransport | None: ... + def send_signal(self, signal: int) -> None: ... + def terminate(self) -> None: ... + def kill(self) -> None: ... + +class _FlowControlMixin(Transport): + def __init__(self, extra: Mapping[str, Any] | None = None, loop: AbstractEventLoop | None = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/trsock.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/trsock.pyi new file mode 100644 index 0000000..563d369 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/trsock.pyi @@ -0,0 +1,122 @@ +import sys +from builtins import type as Type # alias to avoid name clashes with property named "type" +from collections.abc import Iterable +from types import TracebackType +from typing import Any, BinaryIO, NoReturn, overload + +import stdlib.socket as socket +from _typeshed import ReadableBuffer +from typing_extensions import TypeAlias + +# These are based in socket, maybe move them out into _typeshed.pyi or such +_Address: TypeAlias = socket._Address +_RetAddress: TypeAlias = Any +_WriteBuffer: TypeAlias = bytearray | memoryview +_CMSG: TypeAlias = tuple[int, int, bytes] + +class TransportSocket: + def __init__(self, sock: socket.socket) -> None: ... + @property + def family(self) -> int: ... + @property + def type(self) -> int: ... + @property + def proto(self) -> int: ... + def __getstate__(self) -> NoReturn: ... + def fileno(self) -> int: ... + def dup(self) -> socket.socket: ... + def get_inheritable(self) -> bool: ... + def shutdown(self, how: int) -> None: ... + @overload + def getsockopt(self, level: int, optname: int) -> int: ... + @overload + def getsockopt(self, level: int, optname: int, buflen: int) -> bytes: ... + @overload + def setsockopt(self, level: int, optname: int, value: int | ReadableBuffer) -> None: ... + @overload + def setsockopt(self, level: int, optname: int, value: None, optlen: int) -> None: ... + def getpeername(self) -> _RetAddress: ... + def getsockname(self) -> _RetAddress: ... + def getsockbyname( + self, + ) -> NoReturn: ... # This method doesn't exist on socket, yet is passed through? + def settimeout(self, value: float | None) -> None: ... + def gettimeout(self) -> float | None: ... + def setblocking(self, flag: bool) -> None: ... + if sys.version_info < (3, 11): + def _na(self, what: str) -> None: ... + def accept(self) -> tuple[socket.socket, _RetAddress]: ... + def connect(self, address: _Address) -> None: ... + def connect_ex(self, address: _Address) -> int: ... + def bind(self, address: _Address) -> None: ... + if sys.platform == "win32": + def ioctl(self, control: int, option: int | tuple[int, int, int] | bool) -> None: ... + else: + def ioctl( + self, control: int, option: int | tuple[int, int, int] | bool + ) -> NoReturn: ... + + def listen(self, __backlog: int = ...) -> None: ... + def makefile(self) -> BinaryIO: ... + def sendfile(self, file: BinaryIO, offset: int = ..., count: int | None = ...) -> int: ... + def close(self) -> None: ... + def detach(self) -> int: ... + if sys.platform == "linux": + def sendmsg_afalg( + self, + msg: Iterable[ReadableBuffer] = ..., + *, + op: int, + iv: Any = ..., + assoclen: int = ..., + flags: int = ... + ) -> int: ... + else: + def sendmsg_afalg( + self, + msg: Iterable[ReadableBuffer] = ..., + *, + op: int, + iv: Any = ..., + assoclen: int = ..., + flags: int = ... + ) -> NoReturn: ... + + def sendmsg( + self, + __buffers: Iterable[ReadableBuffer], + __ancdata: Iterable[_CMSG] = ..., + __flags: int = ..., + __address: _Address = ..., + ) -> int: ... + @overload + def sendto(self, data: ReadableBuffer, address: _Address) -> int: ... + @overload + def sendto(self, data: ReadableBuffer, flags: int, address: _Address) -> int: ... + def send(self, data: ReadableBuffer, flags: int = ...) -> int: ... + def sendall(self, data: ReadableBuffer, flags: int = ...) -> None: ... + def set_inheritable(self, inheritable: bool) -> None: ... + if sys.platform == "win32": + def share(self, process_id: int) -> bytes: ... + else: + def share(self, process_id: int) -> NoReturn: ... + + def recv_into(self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ...) -> int: ... + def recvfrom_into( + self, buffer: _WriteBuffer, nbytes: int = ..., flags: int = ... + ) -> tuple[int, _RetAddress]: ... + def recvmsg_into( + self, __buffers: Iterable[_WriteBuffer], __ancbufsize: int = ..., __flags: int = ... + ) -> tuple[int, list[_CMSG], int, Any]: ... + def recvmsg( + self, __bufsize: int, __ancbufsize: int = ..., __flags: int = ... + ) -> tuple[bytes, list[_CMSG], int, Any]: ... + def recvfrom(self, bufsize: int, flags: int = ...) -> tuple[bytes, _RetAddress]: ... + def recv(self, bufsize: int, flags: int = ...) -> bytes: ... + def __enter__(self) -> socket.socket: ... + def __exit__( + self, + exc_type: Type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/asyncio/unix_events.pyi b/.vscode/Pico-W-Stub/stdlib/asyncio/unix_events.pyi new file mode 100644 index 0000000..e28d64b --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/asyncio/unix_events.pyi @@ -0,0 +1,127 @@ +import sys +import types +from abc import ABCMeta, abstractmethod +from collections.abc import Callable +from typing import Any +from typing_extensions import Literal, Self + +from .events import AbstractEventLoop, BaseDefaultEventLoopPolicy +from .selector_events import BaseSelectorEventLoop + +# This is also technically not available on Win, +# but other parts of typeshed need this definition. +# So, it is special cased. +class AbstractChildWatcher: + @abstractmethod + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + @abstractmethod + def remove_child_handler(self, pid: int) -> bool: ... + @abstractmethod + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + @abstractmethod + def close(self) -> None: ... + @abstractmethod + def __enter__(self) -> Self: ... + @abstractmethod + def __exit__(self, typ: type[BaseException] | None, exc: BaseException | None, tb: types.TracebackType | None) -> None: ... + if sys.version_info >= (3, 8): + @abstractmethod + def is_active(self) -> bool: ... + +if sys.platform != "win32": + if sys.version_info >= (3, 9): + __all__ = ( + "SelectorEventLoop", + "AbstractChildWatcher", + "SafeChildWatcher", + "FastChildWatcher", + "PidfdChildWatcher", + "MultiLoopChildWatcher", + "ThreadedChildWatcher", + "DefaultEventLoopPolicy", + ) + elif sys.version_info >= (3, 8): + __all__ = ( + "SelectorEventLoop", + "AbstractChildWatcher", + "SafeChildWatcher", + "FastChildWatcher", + "MultiLoopChildWatcher", + "ThreadedChildWatcher", + "DefaultEventLoopPolicy", + ) + else: + __all__ = ("SelectorEventLoop", "AbstractChildWatcher", "SafeChildWatcher", "FastChildWatcher", "DefaultEventLoopPolicy") + + # Doesn't actually have ABCMeta metaclass at runtime, but mypy complains if we don't have it in the stub. + # See discussion in #7412 + class BaseChildWatcher(AbstractChildWatcher, metaclass=ABCMeta): + def close(self) -> None: ... + if sys.version_info >= (3, 8): + def is_active(self) -> bool: ... + + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + class SafeChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + class FastChildWatcher(BaseChildWatcher): + def __enter__(self) -> Self: ... + def __exit__(self, a: type[BaseException] | None, b: BaseException | None, c: types.TracebackType | None) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + + class _UnixSelectorEventLoop(BaseSelectorEventLoop): ... + + class _UnixDefaultEventLoopPolicy(BaseDefaultEventLoopPolicy): + def get_child_watcher(self) -> AbstractChildWatcher: ... + def set_child_watcher(self, watcher: AbstractChildWatcher | None) -> None: ... + SelectorEventLoop = _UnixSelectorEventLoop + + DefaultEventLoopPolicy = _UnixDefaultEventLoopPolicy + + if sys.version_info >= (3, 8): + from typing import Protocol + + class _Warn(Protocol): + def __call__( + self, message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ... + ) -> object: ... + + class MultiLoopChildWatcher(AbstractChildWatcher): + def is_active(self) -> bool: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + class ThreadedChildWatcher(AbstractChildWatcher): + def is_active(self) -> Literal[True]: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def __del__(self, _warn: _Warn = ...) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + + if sys.version_info >= (3, 9): + class PidfdChildWatcher(AbstractChildWatcher): + def __enter__(self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def is_active(self) -> bool: ... + def close(self) -> None: ... + def attach_loop(self, loop: AbstractEventLoop | None) -> None: ... + def add_child_handler(self, pid: int, callback: Callable[..., object], *args: Any) -> None: ... + def remove_child_handler(self, pid: int) -> bool: ... diff --git a/.vscode/Pico-W-Stub/stdlib/builtins.pyi b/.vscode/Pico-W-Stub/stdlib/builtins.pyi new file mode 100644 index 0000000..8de0f92 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/builtins.pyi @@ -0,0 +1,1958 @@ +# JV-Patch +import sys +# end path +import types +from _ast import AST +from collections.abc import (Awaitable, Callable, Iterable, Iterator, + MutableSet, Reversible) +from collections.abc import Set as AbstractSet +from collections.abc import Sized +# end patch +from types import CodeType, TracebackType, _Cell +# mypy crashes if any of {ByteString, Sequence, MutableSequence, Mapping, MutableMapping} are imported from collections.abc in builtins.pyi +from typing import (IO, Any, BinaryIO, ByteString, ClassVar, # noqa: Y027 + Generic, Mapping, MutableMapping, MutableSequence, + NoReturn, Protocol, Sequence, SupportsAbs, SupportsBytes, + SupportsComplex, SupportsFloat, SupportsInt, TypeVar, + overload, type_check_only) + +import stdlib.sys +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import (AnyStr_co, OpenBinaryMode, OpenBinaryModeReading, + OpenBinaryModeUpdating, OpenBinaryModeWriting, + OpenTextMode, ReadableBuffer, Self, StrOrBytesPath, + SupportsAdd, SupportsAiter, SupportsAnext, + SupportsDivMod, SupportsIter, SupportsKeysAndGetItem, + SupportsLenAndGetItem, SupportsNext, SupportsRAdd, + SupportsRDivMod, SupportsRichComparison, + SupportsRichComparisonT, SupportsTrunc, SupportsWrite) +# JV-Patch +from stdlib.io import (BufferedRandom, BufferedReader, BufferedWriter, FileIO, + TextIOWrapper) +from typing_extensions import (Literal, LiteralString, SupportsIndex, + TypeAlias, TypeGuard, final) + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) +_R_co = TypeVar("_R_co", covariant=True) +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_S = TypeVar("_S") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_T4 = TypeVar("_T4") +_T5 = TypeVar("_T5") +_SupportsNextT = TypeVar("_SupportsNextT", bound=SupportsNext[Any], covariant=True) +_SupportsAnextT = TypeVar("_SupportsAnextT", bound=SupportsAnext[Any], covariant=True) +_AwaitableT = TypeVar("_AwaitableT", bound=Awaitable[Any]) +_AwaitableT_co = TypeVar("_AwaitableT_co", bound=Awaitable[Any], covariant=True) + +class object: + __doc__: str | None + __dict__: dict[str, Any] + __module__: str + __annotations__: dict[str, Any] + @property + def __class__(self: Self) -> type[Self]: ... + # Ignore errors about type mismatch between property getter and setter + @__class__.setter + def __class__(self, __type: type[object]) -> None: ... # noqa: F811 + def __init__(self) -> None: ... + def __new__(cls: type[Self]) -> Self: ... + # N.B. `object.__setattr__` and `object.__delattr__` are heavily special-cased by type checkers. + # Overriding them in subclasses has different semantics, even if the override has an identical signature. + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __delattr__(self, __name: str) -> None: ... + def __eq__(self, __o: object) -> bool: ... + def __ne__(self, __o: object) -> bool: ... + def __str__(self) -> str: ... # noqa: Y029 + def __repr__(self) -> str: ... # noqa: Y029 + def __hash__(self) -> int: ... + def __format__(self, __format_spec: str) -> str: ... + def __getattribute__(self, __name: str) -> Any: ... + def __sizeof__(self) -> int: ... + # return type of pickle methods is rather hard to express in the current type system + # see #6661 and https://docs.python.org/3/library/pickle.html#object.__reduce__ + def __reduce__(self) -> str | tuple[Any, ...]: ... + if sys.version_info >= (3, 8): + def __reduce_ex__(self, __protocol: SupportsIndex) -> str | tuple[Any, ...]: ... + else: + def __reduce_ex__(self, __protocol: int) -> str | tuple[Any, ...]: ... + + def __dir__(self) -> Iterable[str]: ... + def __init_subclass__(cls) -> None: ... + +class staticmethod(Generic[_R_co]): + @property + def __func__(self) -> Callable[..., _R_co]: ... + @property + def __isabstractmethod__(self) -> bool: ... + def __init__(self: staticmethod[_R_co], __f: Callable[..., _R_co]) -> None: ... + def __get__(self, __obj: _T, __type: type[_T] | None = ...) -> Callable[..., _R_co]: ... + if sys.version_info >= (3, 10): + __name__: str + __qualname__: str + @property + def __wrapped__(self) -> Callable[..., _R_co]: ... + def __call__(self, *args: Any, **kwargs: Any) -> _R_co: ... + +class classmethod(Generic[_R_co]): + @property + def __func__(self) -> Callable[..., _R_co]: ... + @property + def __isabstractmethod__(self) -> bool: ... + def __init__(self: classmethod[_R_co], __f: Callable[..., _R_co]) -> None: ... + def __get__(self, __obj: _T, __type: type[_T] | None = ...) -> Callable[..., _R_co]: ... + if sys.version_info >= (3, 10): + __name__: str + __qualname__: str + @property + def __wrapped__(self) -> Callable[..., _R_co]: ... + +class type: + @property + def __base__(self) -> type: ... + __bases__: tuple[type, ...] + @property + def __basicsize__(self) -> int: ... + @property + def __dict__(self) -> types.MappingProxyType[str, Any]: ... # type: ignore[override] + @property + def __dictoffset__(self) -> int: ... + @property + def __flags__(self) -> int: ... + @property + def __itemsize__(self) -> int: ... + __module__: str + @property + def __mro__(self) -> tuple[type, ...]: ... + __name__: str + __qualname__: str + @property + def __text_signature__(self) -> str | None: ... + @property + def __weakrefoffset__(self) -> int: ... + @overload + def __init__(self, __o: object) -> None: ... + @overload + def __init__(self, __name: str, __bases: tuple[type, ...], __dict: dict[str, Any], **kwds: Any) -> None: ... + @overload + def __new__(cls, __o: object) -> type: ... + @overload + def __new__(cls: type[Self], __name: str, __bases: tuple[type, ...], __namespace: dict[str, Any], **kwds: Any) -> Self: ... + def __call__(self, *args: Any, **kwds: Any) -> Any: ... + def __subclasses__(self: Self) -> list[Self]: ... + # Note: the documentation doesn't specify what the return type is, the standard + # implementation seems to be returning a list. + def mro(self) -> list[type]: ... + def __instancecheck__(self, __instance: Any) -> bool: ... + def __subclasscheck__(self, __subclass: type) -> bool: ... + @classmethod + def __prepare__(metacls, __name: str, __bases: tuple[type, ...], **kwds: Any) -> Mapping[str, object]: ... + if sys.version_info >= (3, 10): + def __or__(self, __t: Any) -> types.UnionType: ... + def __ror__(self, __t: Any) -> types.UnionType: ... + +class super: + @overload + def __init__(self, __t: Any, __obj: Any) -> None: ... + @overload + def __init__(self, __t: Any) -> None: ... + @overload + def __init__(self) -> None: ... + +_PositiveInteger: TypeAlias = Literal[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] +_NegativeInteger: TypeAlias = Literal[-1, -2, -3, -4, -5, -6, -7, -8, -9, -10, -11, -12, -13, -14, -15, -16, -17, -18, -19, -20] +_LiteralInteger = _PositiveInteger | _NegativeInteger | Literal[0] # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed + +class int: + @overload + def __new__(cls: type[Self], __x: str | ReadableBuffer | SupportsInt | SupportsIndex | SupportsTrunc = ...) -> Self: ... + @overload + def __new__(cls: type[Self], __x: str | bytes | bytearray, base: SupportsIndex) -> Self: ... + if sys.version_info >= (3, 8): + def as_integer_ratio(self) -> tuple[int, Literal[1]]: ... + + @property + def real(self) -> int: ... + @property + def imag(self) -> Literal[0]: ... + @property + def numerator(self) -> int: ... + @property + def denominator(self) -> Literal[1]: ... + def conjugate(self) -> int: ... + def bit_length(self) -> int: ... + if sys.version_info >= (3, 10): + def bit_count(self) -> int: ... + + if sys.version_info >= (3, 11): + def to_bytes( + self, length: SupportsIndex = ..., byteorder: Literal["little", "big"] = ..., *, signed: bool = ... + ) -> bytes: ... + @classmethod + def from_bytes( + cls: type[Self], + bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, + byteorder: Literal["little", "big"] = ..., + *, + signed: bool = ..., + ) -> Self: ... + else: + def to_bytes(self, length: SupportsIndex, byteorder: Literal["little", "big"], *, signed: bool = ...) -> bytes: ... + @classmethod + def from_bytes( + cls: type[Self], + bytes: Iterable[SupportsIndex] | SupportsBytes | ReadableBuffer, + byteorder: Literal["little", "big"], + *, + signed: bool = ..., + ) -> Self: ... + + def __add__(self, __x: int) -> int: ... + def __sub__(self, __x: int) -> int: ... + def __mul__(self, __x: int) -> int: ... + def __floordiv__(self, __x: int) -> int: ... + def __truediv__(self, __x: int) -> float: ... + def __mod__(self, __x: int) -> int: ... + def __divmod__(self, __x: int) -> tuple[int, int]: ... + def __radd__(self, __x: int) -> int: ... + def __rsub__(self, __x: int) -> int: ... + def __rmul__(self, __x: int) -> int: ... + def __rfloordiv__(self, __x: int) -> int: ... + def __rtruediv__(self, __x: int) -> float: ... + def __rmod__(self, __x: int) -> int: ... + def __rdivmod__(self, __x: int) -> tuple[int, int]: ... + @overload + def __pow__(self, __x: Literal[0]) -> Literal[1]: ... + @overload + def __pow__(self, __x: Literal[0], __modulo: None) -> Literal[1]: ... + @overload + def __pow__(self, __x: _PositiveInteger, __modulo: None = ...) -> int: ... + @overload + def __pow__(self, __x: _NegativeInteger, __modulo: None = ...) -> float: ... + # positive x -> int; negative x -> float + # return type must be Any as `int | float` causes too many false-positive errors + @overload + def __pow__(self, __x: int, __modulo: None = ...) -> Any: ... + @overload + def __pow__(self, __x: int, __modulo: int) -> int: ... + def __rpow__(self, __x: int, __mod: int | None = ...) -> Any: ... + def __and__(self, __n: int) -> int: ... + def __or__(self, __n: int) -> int: ... + def __xor__(self, __n: int) -> int: ... + def __lshift__(self, __n: int) -> int: ... + def __rshift__(self, __n: int) -> int: ... + def __rand__(self, __n: int) -> int: ... + def __ror__(self, __n: int) -> int: ... + def __rxor__(self, __n: int) -> int: ... + def __rlshift__(self, __n: int) -> int: ... + def __rrshift__(self, __n: int) -> int: ... + def __neg__(self) -> int: ... + def __pos__(self) -> int: ... + def __invert__(self) -> int: ... + def __trunc__(self) -> int: ... + def __ceil__(self) -> int: ... + def __floor__(self) -> int: ... + def __round__(self, __ndigits: SupportsIndex = ...) -> int: ... + def __getnewargs__(self) -> tuple[int]: ... + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: int) -> bool: ... + def __le__(self, __x: int) -> bool: ... + def __gt__(self, __x: int) -> bool: ... + def __ge__(self, __x: int) -> bool: ... + def __float__(self) -> float: ... + def __int__(self) -> int: ... + def __abs__(self) -> int: ... + def __bool__(self) -> bool: ... + def __index__(self) -> int: ... + def __hash__(self) -> int: ... + +class float: + def __new__(cls: type[Self], __x: SupportsFloat | SupportsIndex | str | ReadableBuffer = ...) -> Self: ... + def as_integer_ratio(self) -> tuple[int, int]: ... + def hex(self) -> str: ... + def is_integer(self) -> bool: ... + @classmethod + def fromhex(cls: type[Self], __s: str) -> Self: ... + @property + def real(self) -> float: ... + @property + def imag(self) -> float: ... + def conjugate(self) -> float: ... + def __add__(self, __x: float) -> float: ... + def __sub__(self, __x: float) -> float: ... + def __mul__(self, __x: float) -> float: ... + def __floordiv__(self, __x: float) -> float: ... + def __truediv__(self, __x: float) -> float: ... + def __mod__(self, __x: float) -> float: ... + def __divmod__(self, __x: float) -> tuple[float, float]: ... + @overload + def __pow__(self, __x: int, __mod: None = ...) -> float: ... + # positive x -> float; negative x -> complex + # return type must be Any as `float | complex` causes too many false-positive errors + @overload + def __pow__(self, __x: float, __mod: None = ...) -> Any: ... + def __radd__(self, __x: float) -> float: ... + def __rsub__(self, __x: float) -> float: ... + def __rmul__(self, __x: float) -> float: ... + def __rfloordiv__(self, __x: float) -> float: ... + def __rtruediv__(self, __x: float) -> float: ... + def __rmod__(self, __x: float) -> float: ... + def __rdivmod__(self, __x: float) -> tuple[float, float]: ... + @overload + def __rpow__(self, __x: _PositiveInteger, __modulo: None = ...) -> float: ... + @overload + def __rpow__(self, __x: _NegativeInteger, __mod: None = ...) -> complex: ... + # Returning `complex` for the general case gives too many false-positive errors. + @overload + def __rpow__(self, __x: float, __mod: None = ...) -> Any: ... + def __getnewargs__(self) -> tuple[float]: ... + def __trunc__(self) -> int: ... + if sys.version_info >= (3, 9): + def __ceil__(self) -> int: ... + def __floor__(self) -> int: ... + + @overload + def __round__(self, __ndigits: None = ...) -> int: ... + @overload + def __round__(self, __ndigits: SupportsIndex) -> float: ... + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: float) -> bool: ... + def __le__(self, __x: float) -> bool: ... + def __gt__(self, __x: float) -> bool: ... + def __ge__(self, __x: float) -> bool: ... + def __neg__(self) -> float: ... + def __pos__(self) -> float: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __abs__(self) -> float: ... + def __bool__(self) -> bool: ... + +class complex: + if sys.version_info >= (3, 8): + # Python doesn't currently accept SupportsComplex for the second argument + @overload + def __new__( + cls: type[Self], + real: complex | SupportsComplex | SupportsFloat | SupportsIndex = ..., + imag: complex | SupportsFloat | SupportsIndex = ..., + ) -> Self: ... + @overload + def __new__(cls: type[Self], real: str | SupportsComplex | SupportsFloat | SupportsIndex | complex) -> Self: ... + else: + @overload + def __new__( + cls: type[Self], real: complex | SupportsComplex | SupportsFloat = ..., imag: complex | SupportsFloat = ... + ) -> Self: ... + @overload + def __new__(cls: type[Self], real: str | SupportsComplex | SupportsFloat | complex) -> Self: ... + + @property + def real(self) -> float: ... + @property + def imag(self) -> float: ... + def conjugate(self) -> complex: ... + def __add__(self, __x: complex) -> complex: ... + def __sub__(self, __x: complex) -> complex: ... + def __mul__(self, __x: complex) -> complex: ... + def __pow__(self, __x: complex, __mod: None = ...) -> complex: ... + def __truediv__(self, __x: complex) -> complex: ... + def __radd__(self, __x: complex) -> complex: ... + def __rsub__(self, __x: complex) -> complex: ... + def __rmul__(self, __x: complex) -> complex: ... + def __rpow__(self, __x: complex, __mod: None = ...) -> complex: ... + def __rtruediv__(self, __x: complex) -> complex: ... + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __neg__(self) -> complex: ... + def __pos__(self) -> complex: ... + def __abs__(self) -> float: ... + def __bool__(self) -> bool: ... + if sys.version_info >= (3, 11): + def __complex__(self) -> complex: ... + +class _FormatMapMapping(Protocol): + def __getitem__(self, __key: str) -> Any: ... + +class _TranslateTable(Protocol): + def __getitem__(self, __key: int) -> str | int | None: ... + +class str(Sequence[str]): + @overload + def __new__(cls: type[Self], object: object = ...) -> Self: ... + @overload + def __new__(cls: type[Self], object: ReadableBuffer, encoding: str = ..., errors: str = ...) -> Self: ... + @overload + def capitalize(self: LiteralString) -> LiteralString: ... + @overload + def capitalize(self) -> str: ... # type: ignore[misc] + @overload + def casefold(self: LiteralString) -> LiteralString: ... + @overload + def casefold(self) -> str: ... # type: ignore[misc] + @overload + def center(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = ...) -> LiteralString: ... + @overload + def center(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] + def count(self, x: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + def encode(self, encoding: str = ..., errors: str = ...) -> bytes: ... + def endswith( + self, __suffix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> bool: ... + if sys.version_info >= (3, 8): + @overload + def expandtabs(self: LiteralString, tabsize: SupportsIndex = ...) -> LiteralString: ... + @overload + def expandtabs(self, tabsize: SupportsIndex = ...) -> str: ... # type: ignore[misc] + else: + @overload + def expandtabs(self: LiteralString, tabsize: int = ...) -> LiteralString: ... + @overload + def expandtabs(self, tabsize: int = ...) -> str: ... # type: ignore[misc] + + def find(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def format(self: LiteralString, *args: LiteralString, **kwargs: LiteralString) -> LiteralString: ... + @overload + def format(self, *args: object, **kwargs: object) -> str: ... # type: ignore[misc] + def format_map(self, map: _FormatMapMapping) -> str: ... + def index(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdecimal(self) -> bool: ... + def isdigit(self) -> bool: ... + def isidentifier(self) -> bool: ... + def islower(self) -> bool: ... + def isnumeric(self) -> bool: ... + def isprintable(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + @overload + def join(self: LiteralString, __iterable: Iterable[LiteralString]) -> LiteralString: ... + @overload + def join(self, __iterable: Iterable[str]) -> str: ... # type: ignore[misc] + @overload + def ljust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = ...) -> LiteralString: ... + @overload + def ljust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] + @overload + def lower(self: LiteralString) -> LiteralString: ... + @overload + def lower(self) -> str: ... # type: ignore[misc] + @overload + def lstrip(self: LiteralString, __chars: LiteralString | None = ...) -> LiteralString: ... + @overload + def lstrip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] + @overload + def partition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload + def partition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def replace( + self: LiteralString, __old: LiteralString, __new: LiteralString, __count: SupportsIndex = ... + ) -> LiteralString: ... + @overload + def replace(self, __old: str, __new: str, __count: SupportsIndex = ...) -> str: ... # type: ignore[misc] + if sys.version_info >= (3, 9): + @overload + def removeprefix(self: LiteralString, __prefix: LiteralString) -> LiteralString: ... + @overload + def removeprefix(self, __prefix: str) -> str: ... # type: ignore[misc] + @overload + def removesuffix(self: LiteralString, __suffix: LiteralString) -> LiteralString: ... + @overload + def removesuffix(self, __suffix: str) -> str: ... # type: ignore[misc] + + def rfind(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + def rindex(self, __sub: str, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ...) -> int: ... + @overload + def rjust(self: LiteralString, __width: SupportsIndex, __fillchar: LiteralString = ...) -> LiteralString: ... + @overload + def rjust(self, __width: SupportsIndex, __fillchar: str = ...) -> str: ... # type: ignore[misc] + @overload + def rpartition(self: LiteralString, __sep: LiteralString) -> tuple[LiteralString, LiteralString, LiteralString]: ... + @overload + def rpartition(self, __sep: str) -> tuple[str, str, str]: ... # type: ignore[misc] + @overload + def rsplit(self: LiteralString, sep: LiteralString | None = ..., maxsplit: SupportsIndex = ...) -> list[LiteralString]: ... + @overload + def rsplit(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... # type: ignore[misc] + @overload + def rstrip(self: LiteralString, __chars: LiteralString | None = ...) -> LiteralString: ... + @overload + def rstrip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] + @overload + def split(self: LiteralString, sep: LiteralString | None = ..., maxsplit: SupportsIndex = ...) -> list[LiteralString]: ... + @overload + def split(self, sep: str | None = ..., maxsplit: SupportsIndex = ...) -> list[str]: ... # type: ignore[misc] + @overload + def splitlines(self: LiteralString, keepends: bool = ...) -> list[LiteralString]: ... + @overload + def splitlines(self, keepends: bool = ...) -> list[str]: ... # type: ignore[misc] + def startswith( + self, __prefix: str | tuple[str, ...], __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> bool: ... + @overload + def strip(self: LiteralString, __chars: LiteralString | None = ...) -> LiteralString: ... + @overload + def strip(self, __chars: str | None = ...) -> str: ... # type: ignore[misc] + @overload + def swapcase(self: LiteralString) -> LiteralString: ... + @overload + def swapcase(self) -> str: ... # type: ignore[misc] + @overload + def title(self: LiteralString) -> LiteralString: ... + @overload + def title(self) -> str: ... # type: ignore[misc] + def translate(self, __table: _TranslateTable) -> str: ... + @overload + def upper(self: LiteralString) -> LiteralString: ... + @overload + def upper(self) -> str: ... # type: ignore[misc] + @overload + def zfill(self: LiteralString, __width: SupportsIndex) -> LiteralString: ... + @overload + def zfill(self, __width: SupportsIndex) -> str: ... # type: ignore[misc] + @staticmethod + @overload + def maketrans(__x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... + @staticmethod + @overload + def maketrans(__x: str, __y: str, __z: str | None = ...) -> dict[int, int | None]: ... + @overload + def __add__(self: LiteralString, __s: LiteralString) -> LiteralString: ... + @overload + def __add__(self, __s: str) -> str: ... # type: ignore[misc] + # Incompatible with Sequence.__contains__ + def __contains__(self, __o: str) -> bool: ... # type: ignore[override] + def __eq__(self, __x: object) -> bool: ... + def __ge__(self, __x: str) -> bool: ... + def __getitem__(self, __i: SupportsIndex | slice) -> str: ... + def __gt__(self, __x: str) -> bool: ... + @overload + def __iter__(self: LiteralString) -> Iterator[LiteralString]: ... + @overload + def __iter__(self) -> Iterator[str]: ... # type: ignore[misc] + def __le__(self, __x: str) -> bool: ... + def __len__(self) -> int: ... + def __lt__(self, __x: str) -> bool: ... + @overload + def __mod__(self: LiteralString, __x: LiteralString | tuple[LiteralString, ...]) -> LiteralString: ... + @overload + def __mod__(self, __x: Any) -> str: ... # type: ignore[misc] + @overload + def __mul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload + def __mul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] + def __ne__(self, __x: object) -> bool: ... + @overload + def __rmul__(self: LiteralString, __n: SupportsIndex) -> LiteralString: ... + @overload + def __rmul__(self, __n: SupportsIndex) -> str: ... # type: ignore[misc] + def __getnewargs__(self) -> tuple[str]: ... + def __hash__(self) -> int: ... + +class bytes(ByteString): + @overload + def __new__(cls: type[Self], __o: Iterable[SupportsIndex] | SupportsIndex | SupportsBytes | ReadableBuffer) -> Self: ... + @overload + def __new__(cls: type[Self], __string: str, encoding: str, errors: str = ...) -> Self: ... + @overload + def __new__(cls: type[Self]) -> Self: ... + def capitalize(self) -> bytes: ... + def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytes: ... + def count( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def decode(self, encoding: str = ..., errors: str = ...) -> str: ... + def endswith( + self, + __suffix: ReadableBuffer | tuple[ReadableBuffer, ...], + __start: SupportsIndex | None = ..., + __end: SupportsIndex | None = ..., + ) -> bool: ... + if sys.version_info >= (3, 8): + def expandtabs(self, tabsize: SupportsIndex = ...) -> bytes: ... + else: + def expandtabs(self, tabsize: int = ...) -> bytes: ... + + def find( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + if sys.version_info >= (3, 8): + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: ... + else: + def hex(self) -> str: ... + + def index( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... + def islower(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def join(self, __iterable_of_bytes: Iterable[ReadableBuffer]) -> bytes: ... + def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytes: ... + def lower(self) -> bytes: ... + def lstrip(self, __bytes: ReadableBuffer | None = ...) -> bytes: ... + def partition(self, __sep: ReadableBuffer) -> tuple[bytes, bytes, bytes]: ... + def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = ...) -> bytes: ... + if sys.version_info >= (3, 9): + def removeprefix(self, __prefix: ReadableBuffer) -> bytes: ... + def removesuffix(self, __suffix: ReadableBuffer) -> bytes: ... + + def rfind( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def rindex( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytes: ... + def rpartition(self, __sep: ReadableBuffer) -> tuple[bytes, bytes, bytes]: ... + def rsplit(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytes]: ... + def rstrip(self, __bytes: ReadableBuffer | None = ...) -> bytes: ... + def split(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytes]: ... + def splitlines(self, keepends: bool = ...) -> list[bytes]: ... + def startswith( + self, + __prefix: ReadableBuffer | tuple[ReadableBuffer, ...], + __start: SupportsIndex | None = ..., + __end: SupportsIndex | None = ..., + ) -> bool: ... + def strip(self, __bytes: ReadableBuffer | None = ...) -> bytes: ... + def swapcase(self) -> bytes: ... + def title(self) -> bytes: ... + def translate(self, __table: ReadableBuffer | None, delete: bytes = ...) -> bytes: ... + def upper(self) -> bytes: ... + def zfill(self, __width: SupportsIndex) -> bytes: ... + @classmethod + def fromhex(cls: type[Self], __s: str) -> Self: ... + @staticmethod + def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[int]: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> int: ... + @overload + def __getitem__(self, __s: slice) -> bytes: ... + def __add__(self, __s: ReadableBuffer) -> bytes: ... + def __mul__(self, __n: SupportsIndex) -> bytes: ... + def __rmul__(self, __n: SupportsIndex) -> bytes: ... + def __mod__(self, __value: Any) -> bytes: ... + # Incompatible with Sequence.__contains__ + def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: bytes) -> bool: ... + def __le__(self, __x: bytes) -> bool: ... + def __gt__(self, __x: bytes) -> bool: ... + def __ge__(self, __x: bytes) -> bool: ... + def __getnewargs__(self) -> tuple[bytes]: ... + if sys.version_info >= (3, 11): + def __bytes__(self) -> bytes: ... + +class bytearray(MutableSequence[int], ByteString): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, __ints: Iterable[SupportsIndex] | SupportsIndex | ReadableBuffer) -> None: ... + @overload + def __init__(self, __string: str, encoding: str, errors: str = ...) -> None: ... + def append(self, __item: SupportsIndex) -> None: ... + def capitalize(self) -> bytearray: ... + def center(self, __width: SupportsIndex, __fillchar: bytes = ...) -> bytearray: ... + def count( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def copy(self) -> bytearray: ... + def decode(self, encoding: str = ..., errors: str = ...) -> str: ... + def endswith( + self, + __suffix: ReadableBuffer | tuple[ReadableBuffer, ...], + __start: SupportsIndex | None = ..., + __end: SupportsIndex | None = ..., + ) -> bool: ... + if sys.version_info >= (3, 8): + def expandtabs(self, tabsize: SupportsIndex = ...) -> bytearray: ... + else: + def expandtabs(self, tabsize: int = ...) -> bytearray: ... + + def extend(self, __iterable_of_ints: Iterable[SupportsIndex]) -> None: ... + def find( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + if sys.version_info >= (3, 8): + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: ... + else: + def hex(self) -> str: ... + + def index( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def insert(self, __index: SupportsIndex, __item: SupportsIndex) -> None: ... + def isalnum(self) -> bool: ... + def isalpha(self) -> bool: ... + def isascii(self) -> bool: ... + def isdigit(self) -> bool: ... + def islower(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def join(self, __iterable_of_bytes: Iterable[ReadableBuffer]) -> bytearray: ... + def ljust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytearray: ... + def lower(self) -> bytearray: ... + def lstrip(self, __bytes: ReadableBuffer | None = ...) -> bytearray: ... + def partition(self, __sep: ReadableBuffer) -> tuple[bytearray, bytearray, bytearray]: ... + def pop(self, __index: int = ...) -> int: ... + def remove(self, __value: int) -> None: ... + if sys.version_info >= (3, 9): + def removeprefix(self, __prefix: ReadableBuffer) -> bytearray: ... + def removesuffix(self, __suffix: ReadableBuffer) -> bytearray: ... + + def replace(self, __old: ReadableBuffer, __new: ReadableBuffer, __count: SupportsIndex = ...) -> bytearray: ... + def rfind( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def rindex( + self, __sub: ReadableBuffer | SupportsIndex, __start: SupportsIndex | None = ..., __end: SupportsIndex | None = ... + ) -> int: ... + def rjust(self, __width: SupportsIndex, __fillchar: bytes | bytearray = ...) -> bytearray: ... + def rpartition(self, __sep: ReadableBuffer) -> tuple[bytearray, bytearray, bytearray]: ... + def rsplit(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytearray]: ... + def rstrip(self, __bytes: ReadableBuffer | None = ...) -> bytearray: ... + def split(self, sep: ReadableBuffer | None = ..., maxsplit: SupportsIndex = ...) -> list[bytearray]: ... + def splitlines(self, keepends: bool = ...) -> list[bytearray]: ... + def startswith( + self, + __prefix: ReadableBuffer | tuple[ReadableBuffer, ...], + __start: SupportsIndex | None = ..., + __end: SupportsIndex | None = ..., + ) -> bool: ... + def strip(self, __bytes: ReadableBuffer | None = ...) -> bytearray: ... + def swapcase(self) -> bytearray: ... + def title(self) -> bytearray: ... + def translate(self, __table: ReadableBuffer | None, delete: bytes = ...) -> bytearray: ... + def upper(self) -> bytearray: ... + def zfill(self, __width: SupportsIndex) -> bytearray: ... + @classmethod + def fromhex(cls: type[Self], __string: str) -> Self: ... + @staticmethod + def maketrans(__frm: ReadableBuffer, __to: ReadableBuffer) -> bytes: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[int]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + @overload + def __getitem__(self, __i: SupportsIndex) -> int: ... + @overload + def __getitem__(self, __s: slice) -> bytearray: ... + @overload + def __setitem__(self, __i: SupportsIndex, __x: SupportsIndex) -> None: ... + @overload + def __setitem__(self, __s: slice, __x: Iterable[SupportsIndex] | bytes) -> None: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + def __add__(self, __s: ReadableBuffer) -> bytearray: ... + # The superclass wants us to accept Iterable[int], but that fails at runtime. + def __iadd__(self: Self, __s: ReadableBuffer) -> Self: ... # type: ignore[override] + def __mul__(self, __n: SupportsIndex) -> bytearray: ... + def __rmul__(self, __n: SupportsIndex) -> bytearray: ... + def __imul__(self: Self, __n: SupportsIndex) -> Self: ... + def __mod__(self, __value: Any) -> bytes: ... + # Incompatible with Sequence.__contains__ + def __contains__(self, __o: SupportsIndex | ReadableBuffer) -> bool: ... # type: ignore[override] + def __eq__(self, __x: object) -> bool: ... + def __ne__(self, __x: object) -> bool: ... + def __lt__(self, __x: ReadableBuffer) -> bool: ... + def __le__(self, __x: ReadableBuffer) -> bool: ... + def __gt__(self, __x: ReadableBuffer) -> bool: ... + def __ge__(self, __x: ReadableBuffer) -> bool: ... + def __alloc__(self) -> int: ... + +@final +class memoryview(Sequence[int]): + @property + def format(self) -> str: ... + @property + def itemsize(self) -> int: ... + @property + def shape(self) -> tuple[int, ...] | None: ... + @property + def strides(self) -> tuple[int, ...] | None: ... + @property + def suboffsets(self) -> tuple[int, ...] | None: ... + @property + def readonly(self) -> bool: ... + @property + def ndim(self) -> int: ... + @property + def obj(self) -> ReadableBuffer: ... + @property + def c_contiguous(self) -> bool: ... + @property + def f_contiguous(self) -> bool: ... + @property + def contiguous(self) -> bool: ... + @property + def nbytes(self) -> int: ... + def __init__(self, obj: ReadableBuffer) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_val: BaseException | None, __exc_tb: TracebackType | None + ) -> None: ... + def cast(self, format: str, shape: list[int] | tuple[int, ...] = ...) -> memoryview: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> int: ... + @overload + def __getitem__(self, __s: slice) -> memoryview: ... + def __contains__(self, __x: object) -> bool: ... + def __iter__(self) -> Iterator[int]: ... + def __len__(self) -> int: ... + @overload + def __setitem__(self, __s: slice, __o: ReadableBuffer) -> None: ... + @overload + def __setitem__(self, __i: SupportsIndex, __o: SupportsIndex) -> None: ... + if sys.version_info >= (3, 8): + def tobytes(self, order: Literal["C", "F", "A"] | None = ...) -> bytes: ... + else: + def tobytes(self) -> bytes: ... + + def tolist(self) -> list[int]: ... + if sys.version_info >= (3, 8): + def toreadonly(self) -> memoryview: ... + + def release(self) -> None: ... + if sys.version_info >= (3, 8): + def hex(self, sep: str | bytes = ..., bytes_per_sep: SupportsIndex = ...) -> str: ... + else: + def hex(self) -> str: ... + +@final +class bool(int): + def __new__(cls: type[Self], __o: object = ...) -> Self: ... + # The following overloads could be represented more elegantly with a TypeVar("_B", bool, int), + # however mypy has a bug regarding TypeVar constraints (https://github.com/python/mypy/issues/11880). + @overload + def __and__(self, __x: bool) -> bool: ... + @overload + def __and__(self, __x: int) -> int: ... + @overload + def __or__(self, __x: bool) -> bool: ... + @overload + def __or__(self, __x: int) -> int: ... + @overload + def __xor__(self, __x: bool) -> bool: ... + @overload + def __xor__(self, __x: int) -> int: ... + @overload + def __rand__(self, __x: bool) -> bool: ... + @overload + def __rand__(self, __x: int) -> int: ... + @overload + def __ror__(self, __x: bool) -> bool: ... + @overload + def __ror__(self, __x: int) -> int: ... + @overload + def __rxor__(self, __x: bool) -> bool: ... + @overload + def __rxor__(self, __x: int) -> int: ... + def __getnewargs__(self) -> tuple[int]: ... + +@final +class slice: + @property + def start(self) -> Any: ... + @property + def step(self) -> Any: ... + @property + def stop(self) -> Any: ... + @overload + def __init__(self, __stop: Any) -> None: ... + @overload + def __init__(self, __start: Any, __stop: Any, __step: Any = ...) -> None: ... + __hash__: ClassVar[None] # type: ignore[assignment] + def indices(self, __len: SupportsIndex) -> tuple[int, int, int]: ... + +class tuple(Sequence[_T_co], Generic[_T_co]): + def __new__(cls: type[Self], __iterable: Iterable[_T_co] = ...) -> Self: ... + def __len__(self) -> int: ... + def __contains__(self, __x: object) -> bool: ... + @overload + def __getitem__(self, __x: SupportsIndex) -> _T_co: ... + @overload + def __getitem__(self, __x: slice) -> tuple[_T_co, ...]: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __lt__(self, __x: tuple[_T_co, ...]) -> bool: ... + def __le__(self, __x: tuple[_T_co, ...]) -> bool: ... + def __gt__(self, __x: tuple[_T_co, ...]) -> bool: ... + def __ge__(self, __x: tuple[_T_co, ...]) -> bool: ... + @overload + def __add__(self, __x: tuple[_T_co, ...]) -> tuple[_T_co, ...]: ... + @overload + def __add__(self, __x: tuple[_T, ...]) -> tuple[_T_co | _T, ...]: ... + def __mul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... + def __rmul__(self, __n: SupportsIndex) -> tuple[_T_co, ...]: ... + def count(self, __value: Any) -> int: ... + def index(self, __value: Any, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +# Doesn't exist at runtime, but deleting this breaks mypy. See #2999 +@final +@type_check_only +class function: + # Make sure this class definition stays roughly in line with `types.FunctionType` + @property + def __closure__(self) -> tuple[_Cell, ...] | None: ... + __code__: CodeType + __defaults__: tuple[Any, ...] | None + __dict__: dict[str, Any] + @property + def __globals__(self) -> dict[str, Any]: ... + __name__: str + __qualname__: str + __annotations__: dict[str, Any] + __kwdefaults__: dict[str, Any] + if sys.version_info >= (3, 10): + @property + def __builtins__(self) -> dict[str, Any]: ... + + __module__: str + # mypy uses `builtins.function.__get__` to represent methods, properties, and getset_descriptors so we type the return as Any. + def __get__(self, obj: object | None, type: type | None = ...) -> Any: ... + +class list(MutableSequence[_T], Generic[_T]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, __iterable: Iterable[_T]) -> None: ... + def copy(self) -> list[_T]: ... + def append(self, __object: _T) -> None: ... + def extend(self, __iterable: Iterable[_T]) -> None: ... + def pop(self, __index: SupportsIndex = ...) -> _T: ... + # Signature of `list.index` should be kept in line with `collections.UserList.index()` + # and multiprocessing.managers.ListProxy.index() + def index(self, __value: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + def count(self, __value: _T) -> int: ... + def insert(self, __index: SupportsIndex, __object: _T) -> None: ... + def remove(self, __value: _T) -> None: ... + # Signature of `list.sort` should be kept inline with `collections.UserList.sort()` + # and multiprocessing.managers.ListProxy.sort() + # + # Use list[SupportsRichComparisonT] for the first overload rather than [SupportsRichComparison] + # to work around invariance + @overload + def sort(self: list[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... + def __len__(self) -> int: ... + def __iter__(self) -> Iterator[_T]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + @overload + def __getitem__(self, __i: SupportsIndex) -> _T: ... + @overload + def __getitem__(self, __s: slice) -> list[_T]: ... + @overload + def __setitem__(self, __i: SupportsIndex, __o: _T) -> None: ... + @overload + def __setitem__(self, __s: slice, __o: Iterable[_T]) -> None: ... + def __delitem__(self, __i: SupportsIndex | slice) -> None: ... + # Overloading looks unnecessary, but is needed to work around complex mypy problems + @overload + def __add__(self, __x: list[_T]) -> list[_T]: ... + @overload + def __add__(self, __x: list[_S]) -> list[_S | _T]: ... + def __iadd__(self: Self, __x: Iterable[_T]) -> Self: ... # type: ignore[misc] + def __mul__(self, __n: SupportsIndex) -> list[_T]: ... + def __rmul__(self, __n: SupportsIndex) -> list[_T]: ... + def __imul__(self: Self, __n: SupportsIndex) -> Self: ... + def __contains__(self, __o: object) -> bool: ... + def __reversed__(self) -> Iterator[_T]: ... + def __gt__(self, __x: list[_T]) -> bool: ... + def __ge__(self, __x: list[_T]) -> bool: ... + def __lt__(self, __x: list[_T]) -> bool: ... + def __le__(self, __x: list[_T]) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +class dict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + # __init__ should be kept roughly in line with `collections.UserDict.__init__`, which has similar semantics + # Also multiprocessing.managers.SyncManager.dict() + @overload + def __init__(self) -> None: ... + @overload + def __init__(self: dict[str, _VT], **kwargs: _VT) -> None: ... + @overload + def __init__(self, __map: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... + @overload + def __init__(self: dict[str, _VT], __map: SupportsKeysAndGetItem[str, _VT], **kwargs: _VT) -> None: ... + @overload + def __init__(self, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ... + @overload + def __init__(self: dict[str, _VT], __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None: ... + # Next overload is for dict(string.split(sep) for string in iterable) + # Cannot be Iterable[Sequence[_T]] or otherwise dict(["foo", "bar", "baz"]) is not an error + @overload + def __init__(self: dict[str, str], __iterable: Iterable[list[str]]) -> None: ... + def __new__(cls: type[Self], *args: Any, **kwargs: Any) -> Self: ... + def copy(self) -> dict[_KT, _VT]: ... + def keys(self) -> dict_keys[_KT, _VT]: ... + def values(self) -> dict_values[_KT, _VT]: ... + def items(self) -> dict_items[_KT, _VT]: ... + # Signature of `dict.fromkeys` should be kept identical to `fromkeys` methods of `OrderedDict`/`ChainMap`/`UserDict` in `collections` + # TODO: the true signature of `dict.fromkeys` is not expressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, __iterable: Iterable[_T], __value: None = ...) -> dict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> dict[_T, _S]: ... + # Positional-only in dict, but not in MutableMapping + @overload + def get(self, __key: _KT) -> _VT | None: ... + @overload + def get(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + @overload + def pop(self, __key: _KT) -> _VT: ... + @overload + def pop(self, __key: _KT, __default: _VT | _T) -> _VT | _T: ... + def __len__(self) -> int: ... + def __getitem__(self, __key: _KT) -> _VT: ... + def __setitem__(self, __key: _KT, __value: _VT) -> None: ... + def __delitem__(self, __key: _KT) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + if sys.version_info >= (3, 8): + def __reversed__(self) -> Iterator[_KT]: ... + __hash__: ClassVar[None] # type: ignore[assignment] + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + def __or__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... + def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT | _T2]: ... + # dict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self: Self, __value: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self: Self, __value: Iterable[tuple[_KT, _VT]]) -> Self: ... + +class set(MutableSet[_T], Generic[_T]): + @overload + def __init__(self) -> None: ... + @overload + def __init__(self, __iterable: Iterable[_T]) -> None: ... + def add(self, __element: _T) -> None: ... + def copy(self) -> set[_T]: ... + def difference(self, *s: Iterable[Any]) -> set[_T]: ... + def difference_update(self, *s: Iterable[Any]) -> None: ... + def discard(self, __element: _T) -> None: ... + def intersection(self, *s: Iterable[Any]) -> set[_T]: ... + def intersection_update(self, *s: Iterable[Any]) -> None: ... + def isdisjoint(self, __s: Iterable[Any]) -> bool: ... + def issubset(self, __s: Iterable[Any]) -> bool: ... + def issuperset(self, __s: Iterable[Any]) -> bool: ... + def remove(self, __element: _T) -> None: ... + def symmetric_difference(self, __s: Iterable[_T]) -> set[_T]: ... + def symmetric_difference_update(self, __s: Iterable[_T]) -> None: ... + def union(self, *s: Iterable[_S]) -> set[_T | _S]: ... + def update(self, *s: Iterable[_T]) -> None: ... + def __len__(self) -> int: ... + def __contains__(self, __o: object) -> bool: ... + def __iter__(self) -> Iterator[_T]: ... + def __and__(self, __s: AbstractSet[object]) -> set[_T]: ... + def __iand__(self: Self, __s: AbstractSet[object]) -> Self: ... + def __or__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... + def __ior__(self: Self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __sub__(self, __s: AbstractSet[_T | None]) -> set[_T]: ... + def __isub__(self: Self, __s: AbstractSet[object]) -> Self: ... + def __xor__(self, __s: AbstractSet[_S]) -> set[_T | _S]: ... + def __ixor__(self: Self, __s: AbstractSet[_T]) -> Self: ... # type: ignore[override,misc] + def __le__(self, __s: AbstractSet[object]) -> bool: ... + def __lt__(self, __s: AbstractSet[object]) -> bool: ... + def __ge__(self, __s: AbstractSet[object]) -> bool: ... + def __gt__(self, __s: AbstractSet[object]) -> bool: ... + __hash__: ClassVar[None] # type: ignore[assignment] + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +class frozenset(AbstractSet[_T_co], Generic[_T_co]): + @overload + def __new__(cls: type[Self]) -> Self: ... + @overload + def __new__(cls: type[Self], __iterable: Iterable[_T_co]) -> Self: ... + def copy(self) -> frozenset[_T_co]: ... + def difference(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def intersection(self, *s: Iterable[object]) -> frozenset[_T_co]: ... + def isdisjoint(self, __s: Iterable[_T_co]) -> bool: ... + def issubset(self, __s: Iterable[object]) -> bool: ... + def issuperset(self, __s: Iterable[object]) -> bool: ... + def symmetric_difference(self, __s: Iterable[_T_co]) -> frozenset[_T_co]: ... + def union(self, *s: Iterable[_S]) -> frozenset[_T_co | _S]: ... + def __len__(self) -> int: ... + def __contains__(self, __o: object) -> bool: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __and__(self, __s: AbstractSet[_T_co]) -> frozenset[_T_co]: ... + def __or__(self, __s: AbstractSet[_S]) -> frozenset[_T_co | _S]: ... + def __sub__(self, __s: AbstractSet[_T_co]) -> frozenset[_T_co]: ... + def __xor__(self, __s: AbstractSet[_S]) -> frozenset[_T_co | _S]: ... + def __le__(self, __s: AbstractSet[object]) -> bool: ... + def __lt__(self, __s: AbstractSet[object]) -> bool: ... + def __ge__(self, __s: AbstractSet[object]) -> bool: ... + def __gt__(self, __s: AbstractSet[object]) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +class enumerate(Iterator[tuple[int, _T]], Generic[_T]): + def __init__(self, iterable: Iterable[_T], start: int = ...) -> None: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> tuple[int, _T]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +@final +class range(Sequence[int]): + @property + def start(self) -> int: ... + @property + def stop(self) -> int: ... + @property + def step(self) -> int: ... + @overload + def __init__(self, __stop: SupportsIndex) -> None: ... + @overload + def __init__(self, __start: SupportsIndex, __stop: SupportsIndex, __step: SupportsIndex = ...) -> None: ... + def count(self, __value: int) -> int: ... + def index(self, __value: int) -> int: ... # type: ignore[override] + def __len__(self) -> int: ... + def __contains__(self, __o: object) -> bool: ... + def __iter__(self) -> Iterator[int]: ... + @overload + def __getitem__(self, __i: SupportsIndex) -> int: ... + @overload + def __getitem__(self, __s: slice) -> range: ... + def __reversed__(self) -> Iterator[int]: ... + +class property: + fget: Callable[[Any], Any] | None + fset: Callable[[Any, Any], None] | None + fdel: Callable[[Any], None] | None + __isabstractmethod__: bool + def __init__( + self, + fget: Callable[[Any], Any] | None = ..., + fset: Callable[[Any, Any], None] | None = ..., + fdel: Callable[[Any], None] | None = ..., + doc: str | None = ..., + ) -> None: ... + def getter(self, __fget: Callable[[Any], Any]) -> property: ... + def setter(self, __fset: Callable[[Any, Any], None]) -> property: ... + def deleter(self, __fdel: Callable[[Any], None]) -> property: ... + def __get__(self, __obj: Any, __type: type | None = ...) -> Any: ... + def __set__(self, __obj: Any, __value: Any) -> None: ... + def __delete__(self, __obj: Any) -> None: ... + +@final +class _NotImplementedType(Any): # type: ignore[misc] + # A little weird, but typing the __call__ as NotImplemented makes the error message + # for NotImplemented() much better + __call__: NotImplemented # type: ignore[valid-type] + +NotImplemented: _NotImplementedType + +def abs(__x: SupportsAbs[_T]) -> _T: ... +def all(__iterable: Iterable[object]) -> bool: ... +def any(__iterable: Iterable[object]) -> bool: ... +def ascii(__obj: object) -> str: ... +def bin(__number: int | SupportsIndex) -> str: ... +def breakpoint(*args: Any, **kws: Any) -> None: ... +def callable(__obj: object) -> TypeGuard[Callable[..., object]]: ... +def chr(__i: int) -> str: ... + +# We define this here instead of using os.PathLike to avoid import cycle issues. +# See https://github.com/python/typeshed/pull/991#issuecomment-288160993 +class _PathLike(Protocol[AnyStr_co]): + def __fspath__(self) -> AnyStr_co: ... + +if sys.version_info >= (3, 10): + def aiter(__async_iterable: SupportsAiter[_SupportsAnextT]) -> _SupportsAnextT: ... + + class _SupportsSynchronousAnext(Protocol[_AwaitableT_co]): + def __anext__(self) -> _AwaitableT_co: ... + + @overload + # `anext` is not, in fact, an async function. When default is not provided + # `anext` is just a passthrough for `obj.__anext__` + # See discussion in #7491 and pure-Python implementation of `anext` at https://github.com/python/cpython/blob/ea786a882b9ed4261eafabad6011bc7ef3b5bf94/Lib/test/test_asyncgen.py#L52-L80 + def anext(__i: _SupportsSynchronousAnext[_AwaitableT]) -> _AwaitableT: ... + @overload + async def anext(__i: SupportsAnext[_T], default: _VT) -> _T | _VT: ... + +# TODO: `compile` has a more precise return type in reality; work on a way of expressing that? +if sys.version_info >= (3, 8): + def compile( + source: str | ReadableBuffer | AST, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: int = ..., + dont_inherit: int = ..., + optimize: int = ..., + *, + _feature_version: int = ..., + ) -> Any: ... + +else: + def compile( + source: str | ReadableBuffer | AST, + filename: str | ReadableBuffer | _PathLike[Any], + mode: str, + flags: int = ..., + dont_inherit: int = ..., + optimize: int = ..., + ) -> Any: ... + +def copyright() -> None: ... +def credits() -> None: ... +def delattr(__obj: object, __name: str) -> None: ... +def dir(__o: object = ...) -> list[str]: ... +@overload +def divmod(__x: SupportsDivMod[_T_contra, _T_co], __y: _T_contra) -> _T_co: ... +@overload +def divmod(__x: _T_contra, __y: SupportsRDivMod[_T_contra, _T_co]) -> _T_co: ... + +# The `globals` argument to `eval` has to be `dict[str, Any]` rather than `dict[str, object]` due to invariance. +# (The `globals` argument has to be a "real dict", rather than any old mapping, unlike the `locals` argument.) +def eval( + __source: str | ReadableBuffer | CodeType, __globals: dict[str, Any] | None = ..., __locals: Mapping[str, object] | None = ... +) -> Any: ... + +# Comment above regarding `eval` applies to `exec` as well +if sys.version_info >= (3, 11): + def exec( + __source: str | ReadableBuffer | CodeType, + __globals: dict[str, Any] | None = ..., + __locals: Mapping[str, object] | None = ..., + *, + closure: tuple[_Cell, ...] | None = ..., + ) -> None: ... + +else: + def exec( + __source: str | ReadableBuffer | CodeType, + __globals: dict[str, Any] | None = ..., + __locals: Mapping[str, object] | None = ..., + ) -> None: ... +# JV Pathch +def exit(code: stdlib.sys._ExitCode = ...) -> NoReturn: ... +# end JV Patch +class filter(Iterator[_T], Generic[_T]): + @overload + def __init__(self, __function: None, __iterable: Iterable[_T | None]) -> None: ... + @overload + def __init__(self, __function: Callable[[_S], TypeGuard[_T]], __iterable: Iterable[_S]) -> None: ... + @overload + def __init__(self, __function: Callable[[_T], Any], __iterable: Iterable[_T]) -> None: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> _T: ... + +def format(__value: object, __format_spec: str = ...) -> str: ... +@overload +def getattr(__o: object, __name: str) -> Any: ... + +# While technically covered by the last overload, spelling out the types for None, bool +# and basic containers help mypy out in some tricky situations involving type context +# (aka bidirectional inference) +@overload +def getattr(__o: object, __name: str, __default: None) -> Any | None: ... +@overload +def getattr(__o: object, __name: str, __default: bool) -> Any | bool: ... +@overload +def getattr(__o: object, name: str, __default: list[Any]) -> Any | list[Any]: ... +@overload +def getattr(__o: object, name: str, __default: dict[Any, Any]) -> Any | dict[Any, Any]: ... +@overload +def getattr(__o: object, __name: str, __default: _T) -> Any | _T: ... +def globals() -> dict[str, Any]: ... +def hasattr(__obj: object, __name: str) -> bool: ... +def hash(__obj: object) -> int: ... +def help(request: object = ...) -> None: ... +def hex(__number: int | SupportsIndex) -> str: ... +def id(__obj: object) -> int: ... +def input(__prompt: object = ...) -> str: ... + +class _GetItemIterable(Protocol[_T_co]): + def __getitem__(self, __i: int) -> _T_co: ... + +@overload +def iter(__iterable: SupportsIter[_SupportsNextT]) -> _SupportsNextT: ... +@overload +def iter(__iterable: _GetItemIterable[_T]) -> Iterator[_T]: ... +@overload +def iter(__function: Callable[[], _T | None], __sentinel: None) -> Iterator[_T]: ... +@overload +def iter(__function: Callable[[], _T], __sentinel: object) -> Iterator[_T]: ... + +if sys.version_info >= (3, 10): + _ClassInfo: TypeAlias = type | types.UnionType | tuple[_ClassInfo, ...] +else: + _ClassInfo: TypeAlias = type | tuple[_ClassInfo, ...] + +def isinstance(__obj: object, __class_or_tuple: _ClassInfo) -> bool: ... +def issubclass(__cls: type, __class_or_tuple: _ClassInfo) -> bool: ... +def len(__obj: Sized) -> int: ... +def license() -> None: ... +def locals() -> dict[str, Any]: ... + +class map(Iterator[_S], Generic[_S]): + @overload + def __init__(self, __func: Callable[[_T1], _S], __iter1: Iterable[_T1]) -> None: ... + @overload + def __init__(self, __func: Callable[[_T1, _T2], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> None: ... + @overload + def __init__( + self, __func: Callable[[_T1, _T2, _T3], _S], __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3] + ) -> None: ... + @overload + def __init__( + self, + __func: Callable[[_T1, _T2, _T3, _T4], _S], + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + ) -> None: ... + @overload + def __init__( + self, + __func: Callable[[_T1, _T2, _T3, _T4, _T5], _S], + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + ) -> None: ... + @overload + def __init__( + self, + __func: Callable[..., _S], + __iter1: Iterable[Any], + __iter2: Iterable[Any], + __iter3: Iterable[Any], + __iter4: Iterable[Any], + __iter5: Iterable[Any], + __iter6: Iterable[Any], + *iterables: Iterable[Any], + ) -> None: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> _S: ... + +@overload +def max( + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = ... +) -> SupportsRichComparisonT: ... +@overload +def max(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ...) -> SupportsRichComparisonT: ... +@overload +def max(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def max(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., default: _T) -> SupportsRichComparisonT | _T: ... +@overload +def max(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... +@overload +def min( + __arg1: SupportsRichComparisonT, __arg2: SupportsRichComparisonT, *_args: SupportsRichComparisonT, key: None = ... +) -> SupportsRichComparisonT: ... +@overload +def min(__arg1: _T, __arg2: _T, *_args: _T, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ...) -> SupportsRichComparisonT: ... +@overload +def min(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison]) -> _T: ... +@overload +def min(__iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., default: _T) -> SupportsRichComparisonT | _T: ... +@overload +def min(__iterable: Iterable[_T1], *, key: Callable[[_T1], SupportsRichComparison], default: _T2) -> _T1 | _T2: ... +@overload +def next(__i: SupportsNext[_T]) -> _T: ... +@overload +def next(__i: SupportsNext[_T], __default: _VT) -> _T | _VT: ... +def oct(__number: int | SupportsIndex) -> str: ... + +_OpenFile = StrOrBytesPath | int # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed +_Opener: TypeAlias = Callable[[str, int], int] + +# Text mode: always returns a TextIOWrapper +@overload +def open( + file: _OpenFile, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> TextIOWrapper: ... + +# Unbuffered binary mode: returns a FileIO +@overload +def open( + file: _OpenFile, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> FileIO: ... + +# Buffering is on: return BufferedRandom, BufferedReader, or BufferedWriter +@overload +def open( + file: _OpenFile, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedRandom: ... +@overload +def open( + file: _OpenFile, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedWriter: ... +@overload +def open( + file: _OpenFile, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedReader: ... + +# Buffering cannot be determined: fall back to BinaryIO +@overload +def open( + file: _OpenFile, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BinaryIO: ... + +# Fallback if mode is not specified +@overload +def open( + file: _OpenFile, + mode: str, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> IO[Any]: ... +def ord(__c: str | bytes | bytearray) -> int: ... + +class _SupportsWriteAndFlush(SupportsWrite[_T_contra], Protocol[_T_contra]): + def flush(self) -> None: ... + +@overload +def print( + *values: object, + sep: str | None = ..., + end: str | None = ..., + file: SupportsWrite[str] | None = ..., + flush: Literal[False] = ..., +) -> None: ... +@overload +def print( + *values: object, sep: str | None = ..., end: str | None = ..., file: _SupportsWriteAndFlush[str] | None = ..., flush: bool +) -> None: ... + +_E = TypeVar("_E", contravariant=True) +_M = TypeVar("_M", contravariant=True) + +class _SupportsPow2(Protocol[_E, _T_co]): + def __pow__(self, __other: _E) -> _T_co: ... + +class _SupportsPow3NoneOnly(Protocol[_E, _T_co]): + def __pow__(self, __other: _E, __modulo: None = ...) -> _T_co: ... + +class _SupportsPow3(Protocol[_E, _M, _T_co]): + def __pow__(self, __other: _E, __modulo: _M) -> _T_co: ... + +_SupportsSomeKindOfPow = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed + _SupportsPow2[Any, Any] | _SupportsPow3NoneOnly[Any, Any] | _SupportsPow3[Any, Any, Any] +) + +if sys.version_info >= (3, 8): + # TODO: `pow(int, int, Literal[0])` fails at runtime, + # but adding a `NoReturn` overload isn't a good solution for expressing that (see #8566). + @overload + def pow(base: int, exp: int, mod: int) -> int: ... + @overload + def pow(base: int, exp: Literal[0], mod: None = ...) -> Literal[1]: ... # type: ignore[misc] + @overload + def pow(base: int, exp: _PositiveInteger, mod: None = ...) -> int: ... # type: ignore[misc] + @overload + def pow(base: int, exp: _NegativeInteger, mod: None = ...) -> float: ... # type: ignore[misc] + # int base & positive-int exp -> int; int base & negative-int exp -> float + # return type must be Any as `int | float` causes too many false-positive errors + @overload + def pow(base: int, exp: int, mod: None = ...) -> Any: ... + @overload + def pow(base: _PositiveInteger, exp: float, mod: None = ...) -> float: ... + @overload + def pow(base: _NegativeInteger, exp: float, mod: None = ...) -> complex: ... + @overload + def pow(base: float, exp: int, mod: None = ...) -> float: ... + # float base & float exp could return float or complex + # return type must be Any (same as complex base, complex exp), + # as `float | complex` causes too many false-positive errors + @overload + def pow(base: float, exp: complex | _SupportsSomeKindOfPow, mod: None = ...) -> Any: ... + @overload + def pow(base: complex, exp: complex | _SupportsSomeKindOfPow, mod: None = ...) -> complex: ... + @overload + def pow(base: _SupportsPow2[_E, _T_co], exp: _E, mod: None = ...) -> _T_co: ... + @overload + def pow(base: _SupportsPow3NoneOnly[_E, _T_co], exp: _E, mod: None = ...) -> _T_co: ... + @overload + def pow(base: _SupportsPow3[_E, _M, _T_co], exp: _E, mod: _M = ...) -> _T_co: ... + @overload + def pow(base: _SupportsSomeKindOfPow, exp: float, mod: None = ...) -> Any: ... + @overload + def pow(base: _SupportsSomeKindOfPow, exp: complex, mod: None = ...) -> complex: ... + +else: + @overload + def pow(__base: int, __exp: int, __mod: int) -> int: ... + @overload + def pow(__base: int, __exp: Literal[0], __mod: None = ...) -> Literal[1]: ... # type: ignore[misc] + @overload + def pow(__base: int, __exp: _PositiveInteger, __mod: None = ...) -> int: ... # type: ignore[misc] + @overload + def pow(__base: int, __exp: _NegativeInteger, __mod: None = ...) -> float: ... # type: ignore[misc] + @overload + def pow(__base: int, __exp: int, __mod: None = ...) -> Any: ... + @overload + def pow(__base: _PositiveInteger, __exp: float, __mod: None = ...) -> float: ... + @overload + def pow(__base: _NegativeInteger, __exp: float, __mod: None = ...) -> complex: ... + @overload + def pow(__base: float, __exp: int, __mod: None = ...) -> float: ... + @overload + def pow(__base: float, __exp: complex | _SupportsSomeKindOfPow, __mod: None = ...) -> Any: ... + @overload + def pow(__base: complex, __exp: complex | _SupportsSomeKindOfPow, __mod: None = ...) -> complex: ... + @overload + def pow(__base: _SupportsPow2[_E, _T_co], __exp: _E, __mod: None = ...) -> _T_co: ... + @overload + def pow(__base: _SupportsPow3NoneOnly[_E, _T_co], __exp: _E, __mod: None = ...) -> _T_co: ... + @overload + def pow(__base: _SupportsPow3[_E, _M, _T_co], __exp: _E, __mod: _M = ...) -> _T_co: ... + @overload + def pow(__base: _SupportsSomeKindOfPow, __exp: float, __mod: None = ...) -> Any: ... + @overload + def pow(__base: _SupportsSomeKindOfPow, __exp: complex, __mod: None = ...) -> complex: ... +# JV Patch +def quit(code: stdlib.sys._ExitCode = ...) -> NoReturn: ... +# JV Patch +class reversed(Iterator[_T], Generic[_T]): + @overload + def __init__(self, __sequence: Reversible[_T]) -> None: ... + @overload + def __init__(self, __sequence: SupportsLenAndGetItem[_T]) -> None: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> _T: ... + def __length_hint__(self) -> int: ... + +def repr(__obj: object) -> str: ... + +# See https://github.com/python/typeshed/pull/9141 +# and https://github.com/python/typeshed/pull/9151 +# on why we don't use `SupportsRound` from `typing.pyi` + +class _SupportsRound1(Protocol[_T_co]): + def __round__(self) -> _T_co: ... + +class _SupportsRound2(Protocol[_T_co]): + def __round__(self, __ndigits: int) -> _T_co: ... + +@overload +def round(number: _SupportsRound1[_T], ndigits: None = ...) -> _T: ... +@overload +def round(number: _SupportsRound2[_T], ndigits: SupportsIndex) -> _T: ... + +# See https://github.com/python/typeshed/pull/6292#discussion_r748875189 +# for why arg 3 of `setattr` should be annotated with `Any` and not `object` +def setattr(__obj: object, __name: str, __value: Any) -> None: ... +@overload +def sorted( + __iterable: Iterable[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ... +) -> list[SupportsRichComparisonT]: ... +@overload +def sorted(__iterable: Iterable[_T], *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> list[_T]: ... + +_AddableT1 = TypeVar("_AddableT1", bound=SupportsAdd[Any, Any]) +_AddableT2 = TypeVar("_AddableT2", bound=SupportsAdd[Any, Any]) + +class _SupportsSumWithNoDefaultGiven(SupportsAdd[Any, Any], SupportsRAdd[int, Any], Protocol): ... + +_SupportsSumNoDefaultT = TypeVar("_SupportsSumNoDefaultT", bound=_SupportsSumWithNoDefaultGiven) + +# In general, the return type of `x + x` is *not* guaranteed to be the same type as x. +# However, we can't express that in the stub for `sum()` +# without creating many false-positive errors (see #7578). +# Instead, we special-case the most common examples of this: bool and literal integers. +if sys.version_info >= (3, 8): + @overload + def sum(__iterable: Iterable[bool | _LiteralInteger], start: int = ...) -> int: ... # type: ignore[misc] + +else: + @overload + def sum(__iterable: Iterable[bool | _LiteralInteger], __start: int = ...) -> int: ... # type: ignore[misc] + +@overload +def sum(__iterable: Iterable[_SupportsSumNoDefaultT]) -> _SupportsSumNoDefaultT | Literal[0]: ... + +if sys.version_info >= (3, 8): + @overload + def sum(__iterable: Iterable[_AddableT1], start: _AddableT2) -> _AddableT1 | _AddableT2: ... + +else: + @overload + def sum(__iterable: Iterable[_AddableT1], __start: _AddableT2) -> _AddableT1 | _AddableT2: ... + +# The argument to `vars()` has to have a `__dict__` attribute, so the second overload can't be annotated with `object` +# (A "SupportsDunderDict" protocol doesn't work) +# Use a type: ignore to make complaints about overlapping overloads go away +@overload +def vars(__object: type) -> types.MappingProxyType[str, Any]: ... # type: ignore[misc] +@overload +def vars(__object: Any = ...) -> dict[str, Any]: ... + +class zip(Iterator[_T_co], Generic[_T_co]): + if sys.version_info >= (3, 10): + @overload + def __new__(cls, __iter1: Iterable[_T1], *, strict: bool = ...) -> zip[tuple[_T1]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], *, strict: bool = ...) -> zip[tuple[_T1, _T2]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], *, strict: bool = ... + ) -> zip[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + *, + strict: bool = ..., + ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + *, + strict: bool = ..., + ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[Any], + __iter2: Iterable[Any], + __iter3: Iterable[Any], + __iter4: Iterable[Any], + __iter5: Iterable[Any], + __iter6: Iterable[Any], + *iterables: Iterable[Any], + strict: bool = ..., + ) -> zip[tuple[Any, ...]]: ... + else: + @overload + def __new__(cls, __iter1: Iterable[_T1]) -> zip[tuple[_T1]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2]) -> zip[tuple[_T1, _T2]]: ... + @overload + def __new__(cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3]) -> zip[tuple[_T1, _T2, _T3]]: ... + @overload + def __new__( + cls, __iter1: Iterable[_T1], __iter2: Iterable[_T2], __iter3: Iterable[_T3], __iter4: Iterable[_T4] + ) -> zip[tuple[_T1, _T2, _T3, _T4]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[_T1], + __iter2: Iterable[_T2], + __iter3: Iterable[_T3], + __iter4: Iterable[_T4], + __iter5: Iterable[_T5], + ) -> zip[tuple[_T1, _T2, _T3, _T4, _T5]]: ... + @overload + def __new__( + cls, + __iter1: Iterable[Any], + __iter2: Iterable[Any], + __iter3: Iterable[Any], + __iter4: Iterable[Any], + __iter5: Iterable[Any], + __iter6: Iterable[Any], + *iterables: Iterable[Any], + ) -> zip[tuple[Any, ...]]: ... + + def __iter__(self: Self) -> Self: ... + def __next__(self) -> _T_co: ... + +# Signature of `builtins.__import__` should be kept identical to `importlib.__import__` +# Return type of `__import__` should be kept the same as return type of `importlib.import_module` +def __import__( + name: str, + globals: Mapping[str, object] | None = ..., + locals: Mapping[str, object] | None = ..., + fromlist: Sequence[str] = ..., + level: int = ..., +) -> types.ModuleType: ... +def __build_class__(__func: Callable[[], _Cell | Any], __name: str, *bases: Any, metaclass: Any = ..., **kwds: Any) -> Any: ... + +# Actually the type of Ellipsis is , but since it's +# not exposed anywhere under that name, we make it private here. +@final +@type_check_only +class ellipsis: ... + +Ellipsis: ellipsis + +class BaseException: + args: tuple[Any, ...] + __cause__: BaseException | None + __context__: BaseException | None + __suppress_context__: bool + __traceback__: TracebackType | None + def __init__(self, *args: object) -> None: ... + def __setstate__(self, __state: dict[str, Any] | None) -> None: ... + def with_traceback(self: Self, __tb: TracebackType | None) -> Self: ... + if sys.version_info >= (3, 11): + # only present after add_note() is called + __notes__: list[str] + def add_note(self, __note: str) -> None: ... + +class GeneratorExit(BaseException): ... +class KeyboardInterrupt(BaseException): ... + +class SystemExit(BaseException): +# JV Patch + code: stdlib.sys._ExitCode +# JV Patch end + +class Exception(BaseException): ... + +class StopIteration(Exception): + value: Any + +class OSError(Exception): + errno: int + strerror: str + # filename, filename2 are actually str | bytes | None + filename: Any + filename2: Any + if sys.platform == "win32": + winerror: int + +EnvironmentError = OSError +IOError = OSError +if sys.platform == "win32": + WindowsError = OSError + +class ArithmeticError(Exception): ... +class AssertionError(Exception): ... + +class AttributeError(Exception): + if sys.version_info >= (3, 10): + def __init__(self, *args: object, name: str | None = ..., obj: object = ...) -> None: ... + name: str + obj: object + +class BufferError(Exception): ... +class EOFError(Exception): ... + +class ImportError(Exception): + def __init__(self, *args: object, name: str | None = ..., path: str | None = ...) -> None: ... + name: str | None + path: str | None + msg: str # undocumented + +class LookupError(Exception): ... +class MemoryError(Exception): ... + +class NameError(Exception): + if sys.version_info >= (3, 10): + name: str + +class ReferenceError(Exception): ... +class RuntimeError(Exception): ... + +class StopAsyncIteration(Exception): + value: Any + +class SyntaxError(Exception): + msg: str + lineno: int | None + offset: int | None + text: str | None + filename: str | None + if sys.version_info >= (3, 10): + end_lineno: int | None + end_offset: int | None + +class SystemError(Exception): ... +class TypeError(Exception): ... +class ValueError(Exception): ... +class FloatingPointError(ArithmeticError): ... +class OverflowError(ArithmeticError): ... +class ZeroDivisionError(ArithmeticError): ... +class ModuleNotFoundError(ImportError): ... +class IndexError(LookupError): ... +class KeyError(LookupError): ... +class UnboundLocalError(NameError): ... + +class BlockingIOError(OSError): + characters_written: int + +class ChildProcessError(OSError): ... +class ConnectionError(OSError): ... +class BrokenPipeError(ConnectionError): ... +class ConnectionAbortedError(ConnectionError): ... +class ConnectionRefusedError(ConnectionError): ... +class ConnectionResetError(ConnectionError): ... +class FileExistsError(OSError): ... +class FileNotFoundError(OSError): ... +class InterruptedError(OSError): ... +class IsADirectoryError(OSError): ... +class NotADirectoryError(OSError): ... +class PermissionError(OSError): ... +class ProcessLookupError(OSError): ... +class TimeoutError(OSError): ... +class NotImplementedError(RuntimeError): ... +class RecursionError(RuntimeError): ... +class IndentationError(SyntaxError): ... +class TabError(IndentationError): ... +class UnicodeError(ValueError): ... + +class UnicodeDecodeError(UnicodeError): + encoding: str + object: bytes + start: int + end: int + reason: str + def __init__(self, __encoding: str, __object: ReadableBuffer, __start: int, __end: int, __reason: str) -> None: ... + +class UnicodeEncodeError(UnicodeError): + encoding: str + object: str + start: int + end: int + reason: str + def __init__(self, __encoding: str, __object: str, __start: int, __end: int, __reason: str) -> None: ... + +class UnicodeTranslateError(UnicodeError): + encoding: None + object: str + start: int + end: int + reason: str + def __init__(self, __object: str, __start: int, __end: int, __reason: str) -> None: ... + +class Warning(Exception): ... +class UserWarning(Warning): ... +class DeprecationWarning(Warning): ... +class SyntaxWarning(Warning): ... +class RuntimeWarning(Warning): ... +class FutureWarning(Warning): ... +class PendingDeprecationWarning(Warning): ... +class ImportWarning(Warning): ... +class UnicodeWarning(Warning): ... +class BytesWarning(Warning): ... +class ResourceWarning(Warning): ... + +if sys.version_info >= (3, 10): + class EncodingWarning(Warning): ... + +if sys.version_info >= (3, 11): + _BaseExceptionT_co = TypeVar("_BaseExceptionT_co", bound=BaseException, covariant=True) + _BaseExceptionT = TypeVar("_BaseExceptionT", bound=BaseException) + _ExceptionT_co = TypeVar("_ExceptionT_co", bound=Exception, covariant=True) + _ExceptionT = TypeVar("_ExceptionT", bound=Exception) + + class BaseExceptionGroup(BaseException, Generic[_BaseExceptionT_co]): + def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_BaseExceptionT_co]) -> Self: ... + @property + def message(self) -> str: ... + @property + def exceptions(self) -> tuple[_BaseExceptionT_co | BaseExceptionGroup[_BaseExceptionT_co], ...]: ... + @overload + def subgroup( + self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] + ) -> BaseExceptionGroup[_BaseExceptionT] | None: ... + @overload + def subgroup(self: Self, __condition: Callable[[_BaseExceptionT_co], bool]) -> Self | None: ... + @overload + def split( + self: Self, __condition: type[_BaseExceptionT] | tuple[type[_BaseExceptionT], ...] + ) -> tuple[BaseExceptionGroup[_BaseExceptionT] | None, Self | None]: ... + @overload + def split(self: Self, __condition: Callable[[_BaseExceptionT_co], bool]) -> tuple[Self | None, Self | None]: ... + def derive(self: Self, __excs: Sequence[_BaseExceptionT_co]) -> Self: ... + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + + class ExceptionGroup(BaseExceptionGroup[_ExceptionT_co], Exception): + def __new__(cls: type[Self], __message: str, __exceptions: Sequence[_ExceptionT_co]) -> Self: ... + @property + def exceptions(self) -> tuple[_ExceptionT_co | ExceptionGroup[_ExceptionT_co], ...]: ... + # We accept a narrower type, but that's OK. + @overload # type: ignore[override] + def subgroup( + self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> ExceptionGroup[_ExceptionT] | None: ... + @overload + def subgroup(self: Self, __condition: Callable[[_ExceptionT_co], bool]) -> Self | None: ... + @overload # type: ignore[override] + def split( + self: Self, __condition: type[_ExceptionT] | tuple[type[_ExceptionT], ...] + ) -> tuple[ExceptionGroup[_ExceptionT] | None, Self | None]: ... + @overload + def split(self: Self, __condition: Callable[[_ExceptionT_co], bool]) -> tuple[Self | None, Self | None]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/codecs.pyi b/.vscode/Pico-W-Stub/stdlib/codecs.pyi new file mode 100644 index 0000000..cd6ac00 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/codecs.pyi @@ -0,0 +1,277 @@ +import types +from _codecs import * +from _typeshed import ReadableBuffer, Self +from abc import abstractmethod +from collections.abc import Callable, Generator, Iterable +from typing import Any, BinaryIO, Protocol, TextIO +from typing_extensions import Literal + +__all__ = [ + "register", + "lookup", + "open", + "EncodedFile", + "BOM", + "BOM_BE", + "BOM_LE", + "BOM32_BE", + "BOM32_LE", + "BOM64_BE", + "BOM64_LE", + "BOM_UTF8", + "BOM_UTF16", + "BOM_UTF16_LE", + "BOM_UTF16_BE", + "BOM_UTF32", + "BOM_UTF32_LE", + "BOM_UTF32_BE", + "CodecInfo", + "Codec", + "IncrementalEncoder", + "IncrementalDecoder", + "StreamReader", + "StreamWriter", + "StreamReaderWriter", + "StreamRecoder", + "getencoder", + "getdecoder", + "getincrementalencoder", + "getincrementaldecoder", + "getreader", + "getwriter", + "encode", + "decode", + "iterencode", + "iterdecode", + "strict_errors", + "ignore_errors", + "replace_errors", + "xmlcharrefreplace_errors", + "backslashreplace_errors", + "namereplace_errors", + "register_error", + "lookup_error", +] + +BOM32_BE: Literal[b"\xfe\xff"] +BOM32_LE: Literal[b"\xff\xfe"] +BOM64_BE: Literal[b"\x00\x00\xfe\xff"] +BOM64_LE: Literal[b"\xff\xfe\x00\x00"] + +class _WritableStream(Protocol): + def write(self, __data: bytes) -> object: ... + def seek(self, __offset: int, __whence: int) -> object: ... + def close(self) -> object: ... + +class _ReadableStream(Protocol): + def read(self, __size: int = ...) -> bytes: ... + def seek(self, __offset: int, __whence: int) -> object: ... + def close(self) -> object: ... + +class _Stream(_WritableStream, _ReadableStream, Protocol): ... + +# TODO: this only satisfies the most common interface, where +# bytes is the raw form and str is the cooked form. +# In the long run, both should become template parameters maybe? +# There *are* bytes->bytes and str->str encodings in the standard library. +# They were much more common in Python 2 than in Python 3. + +class _Encoder(Protocol): + def __call__(self, input: str, errors: str = ...) -> tuple[bytes, int]: ... # signature of Codec().encode + +class _Decoder(Protocol): + def __call__(self, input: bytes, errors: str = ...) -> tuple[str, int]: ... # signature of Codec().decode + +class _StreamReader(Protocol): + def __call__(self, stream: _ReadableStream, errors: str = ...) -> StreamReader: ... + +class _StreamWriter(Protocol): + def __call__(self, stream: _WritableStream, errors: str = ...) -> StreamWriter: ... + +class _IncrementalEncoder(Protocol): + def __call__(self, errors: str = ...) -> IncrementalEncoder: ... + +class _IncrementalDecoder(Protocol): + def __call__(self, errors: str = ...) -> IncrementalDecoder: ... + +class CodecInfo(tuple[_Encoder, _Decoder, _StreamReader, _StreamWriter]): + @property + def encode(self) -> _Encoder: ... + @property + def decode(self) -> _Decoder: ... + @property + def streamreader(self) -> _StreamReader: ... + @property + def streamwriter(self) -> _StreamWriter: ... + @property + def incrementalencoder(self) -> _IncrementalEncoder: ... + @property + def incrementaldecoder(self) -> _IncrementalDecoder: ... + name: str + def __new__( + cls: type[Self], + encode: _Encoder, + decode: _Decoder, + streamreader: _StreamReader | None = ..., + streamwriter: _StreamWriter | None = ..., + incrementalencoder: _IncrementalEncoder | None = ..., + incrementaldecoder: _IncrementalDecoder | None = ..., + name: str | None = ..., + *, + _is_text_encoding: bool | None = ..., + ) -> Self: ... + +def getencoder(encoding: str) -> _Encoder: ... +def getdecoder(encoding: str) -> _Decoder: ... +def getincrementalencoder(encoding: str) -> _IncrementalEncoder: ... +def getincrementaldecoder(encoding: str) -> _IncrementalDecoder: ... +def getreader(encoding: str) -> _StreamReader: ... +def getwriter(encoding: str) -> _StreamWriter: ... +def open( + filename: str, mode: str = ..., encoding: str | None = ..., errors: str = ..., buffering: int = ... +) -> StreamReaderWriter: ... +def EncodedFile(file: _Stream, data_encoding: str, file_encoding: str | None = ..., errors: str = ...) -> StreamRecoder: ... +def iterencode(iterator: Iterable[str], encoding: str, errors: str = ...) -> Generator[bytes, None, None]: ... +def iterdecode(iterator: Iterable[bytes], encoding: str, errors: str = ...) -> Generator[str, None, None]: ... + +BOM: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` +BOM_BE: Literal[b"\xfe\xff"] +BOM_LE: Literal[b"\xff\xfe"] +BOM_UTF8: Literal[b"\xef\xbb\xbf"] +BOM_UTF16: Literal[b"\xff\xfe", b"\xfe\xff"] # depends on `sys.byteorder` +BOM_UTF16_BE: Literal[b"\xfe\xff"] +BOM_UTF16_LE: Literal[b"\xff\xfe"] +BOM_UTF32: Literal[b"\xff\xfe\x00\x00", b"\x00\x00\xfe\xff"] # depends on `sys.byteorder` +BOM_UTF32_BE: Literal[b"\x00\x00\xfe\xff"] +BOM_UTF32_LE: Literal[b"\xff\xfe\x00\x00"] + +def strict_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def replace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def ignore_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def xmlcharrefreplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def backslashreplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... +def namereplace_errors(exception: UnicodeError) -> tuple[str | bytes, int]: ... + +class Codec: + # These are sort of @abstractmethod but sort of not. + # The StreamReader and StreamWriter subclasses only implement one. + def encode(self, input: str, errors: str = ...) -> tuple[bytes, int]: ... + def decode(self, input: bytes, errors: str = ...) -> tuple[str, int]: ... + +class IncrementalEncoder: + errors: str + def __init__(self, errors: str = ...) -> None: ... + @abstractmethod + def encode(self, input: str, final: bool = ...) -> bytes: ... + def reset(self) -> None: ... + # documentation says int but str is needed for the subclass. + def getstate(self) -> int | str: ... + def setstate(self, state: int | str) -> None: ... + +class IncrementalDecoder: + errors: str + def __init__(self, errors: str = ...) -> None: ... + @abstractmethod + def decode(self, input: ReadableBuffer, final: bool = ...) -> str: ... + def reset(self) -> None: ... + def getstate(self) -> tuple[bytes, int]: ... + def setstate(self, state: tuple[bytes, int]) -> None: ... + +# These are not documented but used in encodings/*.py implementations. +class BufferedIncrementalEncoder(IncrementalEncoder): + buffer: str + def __init__(self, errors: str = ...) -> None: ... + @abstractmethod + def _buffer_encode(self, input: str, errors: str, final: bool) -> bytes: ... + def encode(self, input: str, final: bool = ...) -> bytes: ... + +class BufferedIncrementalDecoder(IncrementalDecoder): + buffer: bytes + def __init__(self, errors: str = ...) -> None: ... + @abstractmethod + def _buffer_decode(self, input: ReadableBuffer, errors: str, final: bool) -> tuple[str, int]: ... + def decode(self, input: ReadableBuffer, final: bool = ...) -> str: ... + +# TODO: it is not possible to specify the requirement that all other +# attributes and methods are passed-through from the stream. +class StreamWriter(Codec): + stream: _WritableStream + errors: str + def __init__(self, stream: _WritableStream, errors: str = ...) -> None: ... + def write(self, object: str) -> None: ... + def writelines(self, list: Iterable[str]) -> None: ... + def reset(self) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... + +class StreamReader(Codec): + stream: _ReadableStream + errors: str + def __init__(self, stream: _ReadableStream, errors: str = ...) -> None: ... + def read(self, size: int = ..., chars: int = ..., firstline: bool = ...) -> str: ... + def readline(self, size: int | None = ..., keepends: bool = ...) -> str: ... + def readlines(self, sizehint: int | None = ..., keepends: bool = ...) -> list[str]: ... + def reset(self) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __iter__(self: Self) -> Self: ... + def __next__(self) -> str: ... + def __getattr__(self, name: str, getattr: Callable[[str], Any] = ...) -> Any: ... + +# Doesn't actually inherit from TextIO, but wraps a BinaryIO to provide text reading and writing +# and delegates attributes to the underlying binary stream with __getattr__. +class StreamReaderWriter(TextIO): + stream: _Stream + def __init__(self, stream: _Stream, Reader: _StreamReader, Writer: _StreamWriter, errors: str = ...) -> None: ... + def read(self, size: int = ...) -> str: ... + def readline(self, size: int | None = ...) -> str: ... + def readlines(self, sizehint: int | None = ...) -> list[str]: ... + def __next__(self) -> str: ... + def __iter__(self: Self) -> Self: ... + def write(self, data: str) -> None: ... # type: ignore[override] + def writelines(self, list: Iterable[str]) -> None: ... + def reset(self) -> None: ... + def seek(self, offset: int, whence: int = ...) -> None: ... # type: ignore[override] + def __enter__(self: Self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def __getattr__(self, name: str) -> Any: ... + # These methods don't actually exist directly, but they are needed to satisfy the TextIO + # interface. At runtime, they are delegated through __getattr__. + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + def truncate(self, size: int | None = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def writable(self) -> bool: ... + +class StreamRecoder(BinaryIO): + def __init__( + self, stream: _Stream, encode: _Encoder, decode: _Decoder, Reader: _StreamReader, Writer: _StreamWriter, errors: str = ... + ) -> None: ... + def read(self, size: int = ...) -> bytes: ... + def readline(self, size: int | None = ...) -> bytes: ... + def readlines(self, sizehint: int | None = ...) -> list[bytes]: ... + def __next__(self) -> bytes: ... + def __iter__(self: Self) -> Self: ... + def write(self, data: bytes) -> None: ... # type: ignore[override] + def writelines(self, list: Iterable[bytes]) -> None: ... + def reset(self) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, type: type[BaseException] | None, value: BaseException | None, tb: types.TracebackType | None) -> None: ... + def seek(self, offset: int, whence: int = ...) -> None: ... # type: ignore[override] + # These methods don't actually exist directly, but they are needed to satisfy the BinaryIO + # interface. At runtime, they are delegated through __getattr__. + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + def truncate(self, size: int | None = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def writable(self) -> bool: ... diff --git a/.vscode/Pico-W-Stub/stdlib/collections/__init__.pyi b/.vscode/Pico-W-Stub/stdlib/collections/__init__.pyi new file mode 100644 index 0000000..37505c2 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/collections/__init__.pyi @@ -0,0 +1,432 @@ +import sys +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import Self, SupportsKeysAndGetItem, SupportsRichComparison, SupportsRichComparisonT +from typing import Any, Generic, NoReturn, TypeVar, overload +from typing_extensions import SupportsIndex, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +if sys.version_info >= (3, 10): + from collections.abc import ( + Callable, + ItemsView, + Iterable, + Iterator, + KeysView, + Mapping, + MutableMapping, + MutableSequence, + Reversible, + Sequence, + ValuesView, + ) +else: + from _collections_abc import * + +__all__ = ["ChainMap", "Counter", "OrderedDict", "UserDict", "UserList", "UserString", "defaultdict", "deque", "namedtuple"] + +_S = TypeVar("_S") +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_KT = TypeVar("_KT") +_VT = TypeVar("_VT") +_KT_co = TypeVar("_KT_co", covariant=True) +_VT_co = TypeVar("_VT_co", covariant=True) + +# namedtuple is special-cased in the type checker; the initializer is ignored. +def namedtuple( + typename: str, + field_names: str | Iterable[str], + *, + rename: bool = ..., + module: str | None = ..., + defaults: Iterable[Any] | None = ..., +) -> type[tuple[Any, ...]]: ... + +class UserDict(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + data: dict[_KT, _VT] + # __init__ should be kept roughly in line with `dict.__init__`, which has the same semantics + @overload + def __init__(self, __dict: None = ...) -> None: ... + @overload + def __init__(self: UserDict[str, _VT], __dict: None = ..., **kwargs: _VT) -> None: ... + @overload + def __init__(self, __dict: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... + @overload + def __init__(self: UserDict[str, _VT], __dict: SupportsKeysAndGetItem[str, _VT], **kwargs: _VT) -> None: ... + @overload + def __init__(self, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ... + @overload + def __init__(self: UserDict[str, _VT], __iterable: Iterable[tuple[str, _VT]], **kwargs: _VT) -> None: ... + @overload + def __init__(self: UserDict[str, str], __iterable: Iterable[list[str]]) -> None: ... + def __len__(self) -> int: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __setitem__(self, key: _KT, item: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __iter__(self) -> Iterator[_KT]: ... + def __contains__(self, key: object) -> bool: ... + def copy(self: Self) -> Self: ... + def __copy__(self: Self) -> Self: ... + + # `UserDict.fromkeys` has the same semantics as `dict.fromkeys`, so should be kept in line with `dict.fromkeys`. + # TODO: Much like `dict.fromkeys`, the true signature of `UserDict.fromkeys` is inexpressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: None = ...) -> UserDict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S) -> UserDict[_T, _S]: ... + if sys.version_info >= (3, 9): + def __or__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: UserDict[_T1, _T2] | dict[_T1, _T2]) -> UserDict[_KT | _T1, _VT | _T2]: ... # type: ignore[misc] + # UserDict.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... + +class UserList(MutableSequence[_T]): + data: list[_T] + @overload + def __init__(self, initlist: None = ...) -> None: ... + @overload + def __init__(self, initlist: Iterable[_T]) -> None: ... + def __lt__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __le__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __gt__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __ge__(self, other: list[_T] | UserList[_T]) -> bool: ... + def __eq__(self, other: object) -> bool: ... + def __contains__(self, item: object) -> bool: ... + def __len__(self) -> int: ... + @overload + def __getitem__(self, i: SupportsIndex) -> _T: ... + @overload + def __getitem__(self: Self, i: slice) -> Self: ... + @overload + def __setitem__(self, i: SupportsIndex, item: _T) -> None: ... + @overload + def __setitem__(self, i: slice, item: Iterable[_T]) -> None: ... + def __delitem__(self, i: SupportsIndex | slice) -> None: ... + def __add__(self: Self, other: Iterable[_T]) -> Self: ... + def __radd__(self: Self, other: Iterable[_T]) -> Self: ... + def __iadd__(self: Self, other: Iterable[_T]) -> Self: ... + def __mul__(self: Self, n: int) -> Self: ... + def __rmul__(self: Self, n: int) -> Self: ... + def __imul__(self: Self, n: int) -> Self: ... + def append(self, item: _T) -> None: ... + def insert(self, i: int, item: _T) -> None: ... + def pop(self, i: int = ...) -> _T: ... + def remove(self, item: _T) -> None: ... + def copy(self: Self) -> Self: ... + def __copy__(self: Self) -> Self: ... + def count(self, item: _T) -> int: ... + # All arguments are passed to `list.index` at runtime, so the signature should be kept in line with `list.index`. + def index(self, item: _T, __start: SupportsIndex = ..., __stop: SupportsIndex = ...) -> int: ... + # All arguments are passed to `list.sort` at runtime, so the signature should be kept in line with `list.sort`. + @overload + def sort(self: UserList[SupportsRichComparisonT], *, key: None = ..., reverse: bool = ...) -> None: ... + @overload + def sort(self, *, key: Callable[[_T], SupportsRichComparison], reverse: bool = ...) -> None: ... + def extend(self, other: Iterable[_T]) -> None: ... + +class UserString(Sequence[UserString]): + data: str + def __init__(self, seq: object) -> None: ... + def __int__(self) -> int: ... + def __float__(self) -> float: ... + def __complex__(self) -> complex: ... + def __getnewargs__(self) -> tuple[str]: ... + def __lt__(self, string: str | UserString) -> bool: ... + def __le__(self, string: str | UserString) -> bool: ... + def __gt__(self, string: str | UserString) -> bool: ... + def __ge__(self, string: str | UserString) -> bool: ... + def __eq__(self, string: object) -> bool: ... + def __contains__(self, char: object) -> bool: ... + def __len__(self) -> int: ... + def __getitem__(self: Self, index: SupportsIndex | slice) -> Self: ... + def __iter__(self: Self) -> Iterator[Self]: ... + def __reversed__(self: Self) -> Iterator[Self]: ... + def __add__(self: Self, other: object) -> Self: ... + def __radd__(self: Self, other: object) -> Self: ... + def __mul__(self: Self, n: int) -> Self: ... + def __rmul__(self: Self, n: int) -> Self: ... + def __mod__(self: Self, args: Any) -> Self: ... + if sys.version_info >= (3, 8): + def __rmod__(self: Self, template: object) -> Self: ... + else: + def __rmod__(self: Self, format: Any) -> Self: ... + + def capitalize(self: Self) -> Self: ... + def casefold(self: Self) -> Self: ... + def center(self: Self, width: int, *args: Any) -> Self: ... + def count(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... + if sys.version_info >= (3, 8): + def encode(self: UserString, encoding: str | None = ..., errors: str | None = ...) -> bytes: ... + else: + def encode(self: Self, encoding: str | None = ..., errors: str | None = ...) -> Self: ... + + def endswith(self, suffix: str | tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... + def expandtabs(self: Self, tabsize: int = ...) -> Self: ... + def find(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... + def format(self, *args: Any, **kwds: Any) -> str: ... + def format_map(self, mapping: Mapping[str, Any]) -> str: ... + def index(self, sub: str, start: int = ..., end: int = ...) -> int: ... + def isalpha(self) -> bool: ... + def isalnum(self) -> bool: ... + def isdecimal(self) -> bool: ... + def isdigit(self) -> bool: ... + def isidentifier(self) -> bool: ... + def islower(self) -> bool: ... + def isnumeric(self) -> bool: ... + def isprintable(self) -> bool: ... + def isspace(self) -> bool: ... + def istitle(self) -> bool: ... + def isupper(self) -> bool: ... + def isascii(self) -> bool: ... + def join(self, seq: Iterable[str]) -> str: ... + def ljust(self: Self, width: int, *args: Any) -> Self: ... + def lower(self: Self) -> Self: ... + def lstrip(self: Self, chars: str | None = ...) -> Self: ... + @staticmethod + @overload + def maketrans(x: dict[int, _T] | dict[str, _T] | dict[str | int, _T]) -> dict[int, _T]: ... + @staticmethod + @overload + def maketrans(x: str, y: str, z: str = ...) -> dict[int, int | None]: ... + def partition(self, sep: str) -> tuple[str, str, str]: ... + if sys.version_info >= (3, 9): + def removeprefix(self: Self, __prefix: str | UserString) -> Self: ... + def removesuffix(self: Self, __suffix: str | UserString) -> Self: ... + + def replace(self: Self, old: str | UserString, new: str | UserString, maxsplit: int = ...) -> Self: ... + def rfind(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... + def rindex(self, sub: str | UserString, start: int = ..., end: int = ...) -> int: ... + def rjust(self: Self, width: int, *args: Any) -> Self: ... + def rpartition(self, sep: str) -> tuple[str, str, str]: ... + def rstrip(self: Self, chars: str | None = ...) -> Self: ... + def split(self, sep: str | None = ..., maxsplit: int = ...) -> list[str]: ... + def rsplit(self, sep: str | None = ..., maxsplit: int = ...) -> list[str]: ... + def splitlines(self, keepends: bool = ...) -> list[str]: ... + def startswith(self, prefix: str | tuple[str, ...], start: int | None = ..., end: int | None = ...) -> bool: ... + def strip(self: Self, chars: str | None = ...) -> Self: ... + def swapcase(self: Self) -> Self: ... + def title(self: Self) -> Self: ... + def translate(self: Self, *args: Any) -> Self: ... + def upper(self: Self) -> Self: ... + def zfill(self: Self, width: int) -> Self: ... + +class deque(MutableSequence[_T], Generic[_T]): + @property + def maxlen(self) -> int | None: ... + @overload + def __init__(self, *, maxlen: int | None = ...) -> None: ... + @overload + def __init__(self, iterable: Iterable[_T], maxlen: int | None = ...) -> None: ... + def append(self, __x: _T) -> None: ... + def appendleft(self, __x: _T) -> None: ... + def copy(self: Self) -> Self: ... + def count(self, __x: _T) -> int: ... + def extend(self, __iterable: Iterable[_T]) -> None: ... + def extendleft(self, __iterable: Iterable[_T]) -> None: ... + def insert(self, __i: int, __x: _T) -> None: ... + def index(self, __x: _T, __start: int = ..., __stop: int = ...) -> int: ... + def pop(self) -> _T: ... # type: ignore[override] + def popleft(self) -> _T: ... + def remove(self, __value: _T) -> None: ... + def rotate(self, __n: int = ...) -> None: ... + def __copy__(self: Self) -> Self: ... + def __len__(self) -> int: ... + # These methods of deque don't take slices, unlike MutableSequence, hence the type: ignores + def __getitem__(self, __index: SupportsIndex) -> _T: ... # type: ignore[override] + def __setitem__(self, __i: SupportsIndex, __x: _T) -> None: ... # type: ignore[override] + def __delitem__(self, __i: SupportsIndex) -> None: ... # type: ignore[override] + def __contains__(self, __o: object) -> bool: ... + def __reduce__(self: Self) -> tuple[type[Self], tuple[()], None, Iterator[_T]]: ... + def __iadd__(self: Self, __iterable: Iterable[_T]) -> Self: ... + def __add__(self: Self, __other: Self) -> Self: ... + def __mul__(self: Self, __other: int) -> Self: ... + def __imul__(self: Self, __other: int) -> Self: ... + def __lt__(self, __other: deque[_T]) -> bool: ... + def __le__(self, __other: deque[_T]) -> bool: ... + def __gt__(self, __other: deque[_T]) -> bool: ... + def __ge__(self, __other: deque[_T]) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +class Counter(dict[_T, int], Generic[_T]): + @overload + def __init__(self, __iterable: None = ...) -> None: ... + @overload + def __init__(self: Counter[str], __iterable: None = ..., **kwargs: int) -> None: ... + @overload + def __init__(self, __mapping: SupportsKeysAndGetItem[_T, int]) -> None: ... + @overload + def __init__(self, __iterable: Iterable[_T]) -> None: ... + def copy(self: Self) -> Self: ... + def elements(self) -> Iterator[_T]: ... + def most_common(self, n: int | None = ...) -> list[tuple[_T, int]]: ... + @classmethod + def fromkeys(cls, iterable: Any, v: int | None = ...) -> NoReturn: ... # type: ignore[override] + @overload + def subtract(self, __iterable: None = ...) -> None: ... + @overload + def subtract(self, __mapping: Mapping[_T, int]) -> None: ... + @overload + def subtract(self, __iterable: Iterable[_T]) -> None: ... + # Unlike dict.update(), use Mapping instead of SupportsKeysAndGetItem for the first overload + # (source code does an `isinstance(other, Mapping)` check) + # + # The second overload is also deliberately different to dict.update() + # (if it were `Iterable[_T] | Iterable[tuple[_T, int]]`, + # the tuples would be added as keys, breaking type safety) + @overload # type: ignore[override] + def update(self, __m: Mapping[_T, int], **kwargs: int) -> None: ... + @overload + def update(self, __m: Iterable[_T], **kwargs: int) -> None: ... + @overload + def update(self, __m: None = ..., **kwargs: int) -> None: ... + def __missing__(self, key: _T) -> int: ... + def __delitem__(self, elem: object) -> None: ... + if sys.version_info >= (3, 10): + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + + def __add__(self, other: Counter[_S]) -> Counter[_T | _S]: ... + def __sub__(self, other: Counter[_T]) -> Counter[_T]: ... + def __and__(self, other: Counter[_T]) -> Counter[_T]: ... + def __or__(self, other: Counter[_S]) -> Counter[_T | _S]: ... # type: ignore[override] + def __pos__(self) -> Counter[_T]: ... + def __neg__(self) -> Counter[_T]: ... + # several type: ignores because __iadd__ is supposedly incompatible with __add__, etc. + def __iadd__(self: Self, other: Counter[_T]) -> Self: ... # type: ignore[misc] + def __isub__(self: Self, other: Counter[_T]) -> Self: ... + def __iand__(self: Self, other: Counter[_T]) -> Self: ... + def __ior__(self: Self, other: Counter[_T]) -> Self: ... # type: ignore[override,misc] + if sys.version_info >= (3, 10): + def total(self) -> int: ... + def __le__(self, other: Counter[Any]) -> bool: ... + def __lt__(self, other: Counter[Any]) -> bool: ... + def __ge__(self, other: Counter[Any]) -> bool: ... + def __gt__(self, other: Counter[Any]) -> bool: ... + +# The pure-Python implementations of the "views" classes +# These are exposed at runtime in `collections/__init__.py` +class _OrderedDictKeysView(KeysView[_KT_co], Reversible[_KT_co]): + def __reversed__(self) -> Iterator[_KT_co]: ... + +class _OrderedDictItemsView(ItemsView[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + +class _OrderedDictValuesView(ValuesView[_VT_co], Reversible[_VT_co]): + def __reversed__(self) -> Iterator[_VT_co]: ... + +# The C implementations of the "views" classes +# (At runtime, these are called `odict_keys`, `odict_items` and `odict_values`, +# but they are not exposed anywhere) +@final +class _odict_keys(dict_keys[_KT_co, _VT_co], Reversible[_KT_co]): # type: ignore[misc] + def __reversed__(self) -> Iterator[_KT_co]: ... + +@final +class _odict_items(dict_items[_KT_co, _VT_co], Reversible[tuple[_KT_co, _VT_co]]): # type: ignore[misc] + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + +@final +class _odict_values(dict_values[_KT_co, _VT_co], Reversible[_VT_co], Generic[_KT_co, _VT_co]): # type: ignore[misc] + def __reversed__(self) -> Iterator[_VT_co]: ... + +class OrderedDict(dict[_KT, _VT], Reversible[_KT], Generic[_KT, _VT]): + def popitem(self, last: bool = ...) -> tuple[_KT, _VT]: ... + def move_to_end(self, key: _KT, last: bool = ...) -> None: ... + def copy(self: Self) -> Self: ... + def __reversed__(self) -> Iterator[_KT]: ... + def keys(self) -> _odict_keys[_KT, _VT]: ... + def items(self) -> _odict_items[_KT, _VT]: ... + def values(self) -> _odict_values[_KT, _VT]: ... + # The signature of OrderedDict.fromkeys should be kept in line with `dict.fromkeys`, modulo positional-only differences. + # Like dict.fromkeys, its true signature is not expressible in the current type system. + # See #3800 & https://github.com/python/typing/issues/548#issuecomment-683336963. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: None = ...) -> OrderedDict[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], value: _S) -> OrderedDict[_T, _S]: ... + # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + @overload + def setdefault(self: OrderedDict[_KT, _T | None], key: _KT) -> _T | None: ... + @overload + def setdefault(self, key: _KT, default: _VT) -> _VT: ... + +class defaultdict(dict[_KT, _VT], Generic[_KT, _VT]): + default_factory: Callable[[], _VT] | None + @overload + def __init__(self) -> None: ... + @overload + def __init__(self: defaultdict[str, _VT], **kwargs: _VT) -> None: ... + @overload + def __init__(self, __default_factory: Callable[[], _VT] | None) -> None: ... + @overload + def __init__(self: defaultdict[str, _VT], __default_factory: Callable[[], _VT] | None, **kwargs: _VT) -> None: ... + @overload + def __init__(self, __default_factory: Callable[[], _VT] | None, __map: SupportsKeysAndGetItem[_KT, _VT]) -> None: ... + @overload + def __init__( + self: defaultdict[str, _VT], + __default_factory: Callable[[], _VT] | None, + __map: SupportsKeysAndGetItem[str, _VT], + **kwargs: _VT, + ) -> None: ... + @overload + def __init__(self, __default_factory: Callable[[], _VT] | None, __iterable: Iterable[tuple[_KT, _VT]]) -> None: ... + @overload + def __init__( + self: defaultdict[str, _VT], + __default_factory: Callable[[], _VT] | None, + __iterable: Iterable[tuple[str, _VT]], + **kwargs: _VT, + ) -> None: ... + def __missing__(self, __key: _KT) -> _VT: ... + def __copy__(self: Self) -> Self: ... + def copy(self: Self) -> Self: ... + +class ChainMap(MutableMapping[_KT, _VT], Generic[_KT, _VT]): + maps: list[MutableMapping[_KT, _VT]] + def __init__(self, *maps: MutableMapping[_KT, _VT]) -> None: ... + def new_child(self: Self, m: MutableMapping[_KT, _VT] | None = ...) -> Self: ... + @property + def parents(self: Self) -> Self: ... + def __setitem__(self, key: _KT, value: _VT) -> None: ... + def __delitem__(self, key: _KT) -> None: ... + def __getitem__(self, key: _KT) -> _VT: ... + def __iter__(self) -> Iterator[_KT]: ... + def __len__(self) -> int: ... + def __contains__(self, key: object) -> bool: ... + def __missing__(self, key: _KT) -> _VT: ... # undocumented + def __bool__(self) -> bool: ... + def setdefault(self, key: _KT, default: _VT = ...) -> _VT: ... + @overload + def pop(self, key: _KT) -> _VT: ... + @overload + def pop(self, key: _KT, default: _VT | _T = ...) -> _VT | _T: ... + def copy(self: Self) -> Self: ... + __copy__ = copy + # All arguments to `fromkeys` are passed to `dict.fromkeys` at runtime, so the signature should be kept in line with `dict.fromkeys`. + @classmethod + @overload + def fromkeys(cls, iterable: Iterable[_T], __value: None = ...) -> ChainMap[_T, Any | None]: ... + @classmethod + @overload + def fromkeys(cls, __iterable: Iterable[_T], __value: _S) -> ChainMap[_T, _S]: ... + if sys.version_info >= (3, 9): + def __or__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> ChainMap[_KT | _T1, _VT | _T2]: ... + # ChainMap.__ior__ should be kept roughly in line with MutableMapping.update() + @overload # type: ignore[misc] + def __ior__(self: Self, other: SupportsKeysAndGetItem[_KT, _VT]) -> Self: ... + @overload + def __ior__(self: Self, other: Iterable[tuple[_KT, _VT]]) -> Self: ... diff --git a/.vscode/Pico-W-Stub/stdlib/collections/abc.pyi b/.vscode/Pico-W-Stub/stdlib/collections/abc.pyi new file mode 100644 index 0000000..3df2a1d --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/collections/abc.pyi @@ -0,0 +1,2 @@ +from _collections_abc import * +from _collections_abc import __all__ as __all__ diff --git a/.vscode/Pico-W-Stub/stdlib/contextlib.pyi b/.vscode/Pico-W-Stub/stdlib/contextlib.pyi new file mode 100644 index 0000000..dc2101d --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/contextlib.pyi @@ -0,0 +1,207 @@ +import abc +import sys +from _typeshed import FileDescriptorOrPath, Unused +from abc import abstractmethod +from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable, Generator, Iterator +from types import TracebackType +from typing import IO, Any, Generic, Protocol, TypeVar, overload, runtime_checkable +from typing_extensions import ParamSpec, Self, TypeAlias + +__all__ = [ + "contextmanager", + "closing", + "AbstractContextManager", + "ContextDecorator", + "ExitStack", + "redirect_stdout", + "redirect_stderr", + "suppress", + "AbstractAsyncContextManager", + "AsyncExitStack", + "asynccontextmanager", + "nullcontext", +] + +if sys.version_info >= (3, 10): + __all__ += ["aclosing"] + +if sys.version_info >= (3, 11): + __all__ += ["chdir"] + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) +_T_io = TypeVar("_T_io", bound=IO[str] | None) +_F = TypeVar("_F", bound=Callable[..., Any]) +_P = ParamSpec("_P") + +_ExitFunc: TypeAlias = Callable[[type[BaseException] | None, BaseException | None, TracebackType | None], bool | None] +_CM_EF = TypeVar("_CM_EF", bound=AbstractContextManager[Any] | _ExitFunc) + +@runtime_checkable +class AbstractContextManager(Protocol[_T_co]): + def __enter__(self) -> _T_co: ... + @abstractmethod + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool | None: ... + +@runtime_checkable +class AbstractAsyncContextManager(Protocol[_T_co]): + async def __aenter__(self) -> _T_co: ... + @abstractmethod + async def __aexit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool | None: ... + +class ContextDecorator: + def __call__(self, func: _F) -> _F: ... + +class _GeneratorContextManager(AbstractContextManager[_T_co], ContextDecorator, Generic[_T_co]): + # __init__ and all instance attributes are actually inherited from _GeneratorContextManagerBase + # _GeneratorContextManagerBase is more trouble than it's worth to include in the stub; see #6676 + def __init__(self, func: Callable[..., Iterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: Generator[_T_co, Any, Any] + func: Callable[..., Generator[_T_co, Any, Any]] + args: tuple[Any, ...] + kwds: dict[str, Any] + if sys.version_info >= (3, 9): + def __exit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + else: + def __exit__( + self, type: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +def contextmanager(func: Callable[_P, Iterator[_T_co]]) -> Callable[_P, _GeneratorContextManager[_T_co]]: ... + +if sys.version_info >= (3, 10): + _AF = TypeVar("_AF", bound=Callable[..., Awaitable[Any]]) + + class AsyncContextDecorator: + def __call__(self, func: _AF) -> _AF: ... + + class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co], AsyncContextDecorator, Generic[_T_co]): + # __init__ and these attributes are actually defined in the base class _GeneratorContextManagerBase, + # which is more trouble than it's worth to include in the stub (see #6676) + def __init__(self, func: Callable[..., AsyncIterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: AsyncGenerator[_T_co, Any] + func: Callable[..., AsyncGenerator[_T_co, Any]] + args: tuple[Any, ...] + kwds: dict[str, Any] + async def __aexit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +else: + class _AsyncGeneratorContextManager(AbstractAsyncContextManager[_T_co], Generic[_T_co]): + def __init__(self, func: Callable[..., AsyncIterator[_T_co]], args: tuple[Any, ...], kwds: dict[str, Any]) -> None: ... + gen: AsyncGenerator[_T_co, Any] + func: Callable[..., AsyncGenerator[_T_co, Any]] + args: tuple[Any, ...] + kwds: dict[str, Any] + async def __aexit__( + self, typ: type[BaseException] | None, value: BaseException | None, traceback: TracebackType | None + ) -> bool | None: ... + +def asynccontextmanager(func: Callable[_P, AsyncIterator[_T_co]]) -> Callable[_P, _AsyncGeneratorContextManager[_T_co]]: ... + +class _SupportsClose(Protocol): + def close(self) -> object: ... + +_SupportsCloseT = TypeVar("_SupportsCloseT", bound=_SupportsClose) + +class closing(AbstractContextManager[_SupportsCloseT]): + def __init__(self, thing: _SupportsCloseT) -> None: ... + def __exit__(self, *exc_info: Unused) -> None: ... + +if sys.version_info >= (3, 10): + class _SupportsAclose(Protocol): + def aclose(self) -> Awaitable[object]: ... + _SupportsAcloseT = TypeVar("_SupportsAcloseT", bound=_SupportsAclose) + + class aclosing(AbstractAsyncContextManager[_SupportsAcloseT]): + def __init__(self, thing: _SupportsAcloseT) -> None: ... + async def __aexit__(self, *exc_info: Unused) -> None: ... + +class suppress(AbstractContextManager[None]): + def __init__(self, *exceptions: type[BaseException]) -> None: ... + def __exit__( + self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None + ) -> bool: ... + +class _RedirectStream(AbstractContextManager[_T_io]): + def __init__(self, new_target: _T_io) -> None: ... + def __exit__( + self, exctype: type[BaseException] | None, excinst: BaseException | None, exctb: TracebackType | None + ) -> None: ... + +class redirect_stdout(_RedirectStream[_T_io]): ... +class redirect_stderr(_RedirectStream[_T_io]): ... + +# In reality this is a subclass of `AbstractContextManager`; +# see #7961 for why we don't do that in the stub +class ExitStack(metaclass=abc.ABCMeta): + def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... + def push(self, exit: _CM_EF) -> _CM_EF: ... + def callback(self, __callback: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... + def pop_all(self) -> Self: ... + def close(self) -> None: ... + def __enter__(self) -> Self: ... + def __exit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool: ... + +_ExitCoroFunc: TypeAlias = Callable[ + [type[BaseException] | None, BaseException | None, TracebackType | None], Awaitable[bool | None] +] +_ACM_EF = TypeVar("_ACM_EF", bound=AbstractAsyncContextManager[Any] | _ExitCoroFunc) + +# In reality this is a subclass of `AbstractAsyncContextManager`; +# see #7961 for why we don't do that in the stub +class AsyncExitStack(metaclass=abc.ABCMeta): + def enter_context(self, cm: AbstractContextManager[_T]) -> _T: ... + async def enter_async_context(self, cm: AbstractAsyncContextManager[_T]) -> _T: ... + def push(self, exit: _CM_EF) -> _CM_EF: ... + def push_async_exit(self, exit: _ACM_EF) -> _ACM_EF: ... + def callback(self, __callback: Callable[_P, _T], *args: _P.args, **kwds: _P.kwargs) -> Callable[_P, _T]: ... + def push_async_callback( + self, __callback: Callable[_P, Awaitable[_T]], *args: _P.args, **kwds: _P.kwargs + ) -> Callable[_P, Awaitable[_T]]: ... + def pop_all(self) -> Self: ... + async def aclose(self) -> None: ... + async def __aenter__(self) -> Self: ... + async def __aexit__( + self, __exc_type: type[BaseException] | None, __exc_value: BaseException | None, __traceback: TracebackType | None + ) -> bool: ... + +if sys.version_info >= (3, 10): + class nullcontext(AbstractContextManager[_T], AbstractAsyncContextManager[_T]): + enter_result: _T + @overload + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... + @overload + def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... + def __enter__(self) -> _T: ... + def __exit__(self, *exctype: Unused) -> None: ... + async def __aenter__(self) -> _T: ... + async def __aexit__(self, *exctype: Unused) -> None: ... + +else: + class nullcontext(AbstractContextManager[_T]): + enter_result: _T + @overload + def __init__(self: nullcontext[None], enter_result: None = None) -> None: ... + @overload + def __init__(self: nullcontext[_T], enter_result: _T) -> None: ... + def __enter__(self) -> _T: ... + def __exit__(self, *exctype: Unused) -> None: ... + +if sys.version_info >= (3, 11): + _T_fd_or_any_path = TypeVar("_T_fd_or_any_path", bound=FileDescriptorOrPath) + + class chdir(AbstractContextManager[None], Generic[_T_fd_or_any_path]): + path: _T_fd_or_any_path + def __init__(self, path: _T_fd_or_any_path) -> None: ... + def __enter__(self) -> None: ... + def __exit__(self, *excinfo: Unused) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/contextvars.pyi b/.vscode/Pico-W-Stub/stdlib/contextvars.pyi new file mode 100644 index 0000000..63b5f80 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/contextvars.pyi @@ -0,0 +1,70 @@ +import sys +from collections.abc import Callable, Iterator, Mapping +from typing import Any, ClassVar, Generic, TypeVar, overload +from typing_extensions import ParamSpec, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ("Context", "ContextVar", "Token", "copy_context") + +_T = TypeVar("_T") +_D = TypeVar("_D") +_P = ParamSpec("_P") + +@final +class ContextVar(Generic[_T]): + @overload + def __init__(self, name: str) -> None: ... + @overload + def __init__(self, name: str, *, default: _T) -> None: ... + def __hash__(self) -> int: ... + @property + def name(self) -> str: ... + @overload + def get(self) -> _T: ... + if sys.version_info >= (3, 8): + @overload + def get(self, default: _T) -> _T: ... + @overload + def get(self, default: _D) -> _D | _T: ... + else: + @overload + def get(self, __default: _T) -> _T: ... + @overload + def get(self, __default: _D) -> _D | _T: ... + + def set(self, __value: _T) -> Token[_T]: ... + def reset(self, __token: Token[_T]) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +@final +class Token(Generic[_T]): + @property + def var(self) -> ContextVar[_T]: ... + @property + def old_value(self) -> Any: ... # returns either _T or MISSING, but that's hard to express + MISSING: ClassVar[object] + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +def copy_context() -> Context: ... + +# It doesn't make sense to make this generic, because for most Contexts each ContextVar will have +# a different value. +@final +class Context(Mapping[ContextVar[Any], Any]): + def __init__(self) -> None: ... + @overload + def get(self, __key: ContextVar[_T], __default: None = None) -> _T | None: ... # type: ignore[misc] # overlapping overloads + @overload + def get(self, __key: ContextVar[_T], __default: _T) -> _T: ... + @overload + def get(self, __key: ContextVar[_T], __default: _D) -> _T | _D: ... + def run(self, callable: Callable[_P, _T], *args: _P.args, **kwargs: _P.kwargs) -> _T: ... + def copy(self) -> Context: ... + def __getitem__(self, __key: ContextVar[_T]) -> _T: ... + def __iter__(self) -> Iterator[ContextVar[Any]]: ... + def __len__(self) -> int: ... + def __eq__(self, __value: object) -> bool: ... diff --git a/.vscode/Pico-W-Stub/stdlib/dataclasses.pyi b/.vscode/Pico-W-Stub/stdlib/dataclasses.pyi new file mode 100644 index 0000000..13cffcd --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/dataclasses.pyi @@ -0,0 +1,322 @@ +import enum +import sys +import types +from _typeshed import DataclassInstance +from builtins import type as Type # alias to avoid name clashes with fields named "type" +from collections.abc import Callable, Iterable, Mapping +from typing import Any, Generic, Protocol, TypeVar, overload +from typing_extensions import Literal, TypeAlias, TypeGuard + +if sys.version_info >= (3, 9): + from types import GenericAlias + +_T = TypeVar("_T") +_T_co = TypeVar("_T_co", covariant=True) + +__all__ = [ + "dataclass", + "field", + "Field", + "FrozenInstanceError", + "InitVar", + "MISSING", + "fields", + "asdict", + "astuple", + "make_dataclass", + "replace", + "is_dataclass", +] + +if sys.version_info >= (3, 10): + __all__ += ["KW_ONLY"] + +_DataclassT = TypeVar("_DataclassT", bound=DataclassInstance) + +# define _MISSING_TYPE as an enum within the type stubs, +# even though that is not really its type at runtime +# this allows us to use Literal[_MISSING_TYPE.MISSING] +# for background, see: +# https://github.com/python/typeshed/pull/5900#issuecomment-895513797 +class _MISSING_TYPE(enum.Enum): + MISSING = enum.auto() + +MISSING = _MISSING_TYPE.MISSING + +if sys.version_info >= (3, 10): + class KW_ONLY: ... + +@overload +def asdict(obj: DataclassInstance) -> dict[str, Any]: ... +@overload +def asdict(obj: DataclassInstance, *, dict_factory: Callable[[list[tuple[str, Any]]], _T]) -> _T: ... +@overload +def astuple(obj: DataclassInstance) -> tuple[Any, ...]: ... +@overload +def astuple(obj: DataclassInstance, *, tuple_factory: Callable[[list[Any]], _T]) -> _T: ... + +if sys.version_info >= (3, 8): + # cls argument is now positional-only + @overload + def dataclass(__cls: None) -> Callable[[type[_T]], type[_T]]: ... + @overload + def dataclass(__cls: type[_T]) -> type[_T]: ... + +else: + @overload + def dataclass(_cls: None) -> Callable[[type[_T]], type[_T]]: ... + @overload + def dataclass(_cls: type[_T]) -> type[_T]: ... + +if sys.version_info >= (3, 11): + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> Callable[[type[_T]], type[_T]]: ... + +elif sys.version_info >= (3, 10): + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + ) -> Callable[[type[_T]], type[_T]]: ... + +else: + @overload + def dataclass( + *, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + ) -> Callable[[type[_T]], type[_T]]: ... + +# See https://github.com/python/mypy/issues/10750 +class _DefaultFactory(Protocol[_T_co]): + def __call__(self) -> _T_co: ... + +class Field(Generic[_T]): + name: str + type: Type[_T] + default: _T | Literal[_MISSING_TYPE.MISSING] + default_factory: _DefaultFactory[_T] | Literal[_MISSING_TYPE.MISSING] + repr: bool + hash: bool | None + init: bool + compare: bool + metadata: types.MappingProxyType[Any, Any] + if sys.version_info >= (3, 10): + kw_only: bool | Literal[_MISSING_TYPE.MISSING] + def __init__( + self, + default: _T, + default_factory: Callable[[], _T], + init: bool, + repr: bool, + hash: bool | None, + compare: bool, + metadata: Mapping[Any, Any], + kw_only: bool, + ) -> None: ... + else: + def __init__( + self, + default: _T, + default_factory: Callable[[], _T], + init: bool, + repr: bool, + hash: bool | None, + compare: bool, + metadata: Mapping[Any, Any], + ) -> None: ... + + def __set_name__(self, owner: Type[Any], name: str) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# NOTE: Actual return type is 'Field[_T]', but we want to help type checkers +# to understand the magic that happens at runtime. +if sys.version_info >= (3, 10): + @overload # `default` and `default_factory` are optional and mutually exclusive. + def field( + *, + default: _T, + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool = ..., + ) -> _T: ... + @overload + def field( + *, + default_factory: Callable[[], _T], + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool = ..., + ) -> _T: ... + @overload + def field( + *, + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + kw_only: bool = ..., + ) -> Any: ... + +else: + @overload # `default` and `default_factory` are optional and mutually exclusive. + def field( + *, + default: _T, + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + ) -> _T: ... + @overload + def field( + *, + default_factory: Callable[[], _T], + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + ) -> _T: ... + @overload + def field( + *, + init: bool = True, + repr: bool = True, + hash: bool | None = None, + compare: bool = True, + metadata: Mapping[Any, Any] | None = None, + ) -> Any: ... + +def fields(class_or_instance: DataclassInstance | type[DataclassInstance]) -> tuple[Field[Any], ...]: ... +@overload +def is_dataclass(obj: DataclassInstance) -> Literal[True]: ... +@overload +def is_dataclass(obj: type) -> TypeGuard[type[DataclassInstance]]: ... +@overload +def is_dataclass(obj: object) -> TypeGuard[DataclassInstance | type[DataclassInstance]]: ... + +class FrozenInstanceError(AttributeError): ... + +if sys.version_info >= (3, 9): + _InitVarMeta: TypeAlias = type +else: + class _InitVarMeta(type): + # Not used, instead `InitVar.__class_getitem__` is called. + def __getitem__(self, params: Any) -> InitVar[Any]: ... + +class InitVar(Generic[_T], metaclass=_InitVarMeta): + type: Type[_T] + def __init__(self, type: Type[_T]) -> None: ... + if sys.version_info >= (3, 9): + @overload + def __class_getitem__(cls, type: Type[_T]) -> InitVar[_T]: ... + @overload + def __class_getitem__(cls, type: Any) -> InitVar[Any]: ... + +if sys.version_info >= (3, 12): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + module: str | None = None, + ) -> type: ... + +elif sys.version_info >= (3, 11): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + weakref_slot: bool = False, + ) -> type: ... + +elif sys.version_info >= (3, 10): + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + match_args: bool = True, + kw_only: bool = False, + slots: bool = False, + ) -> type: ... + +else: + def make_dataclass( + cls_name: str, + fields: Iterable[str | tuple[str, type] | tuple[str, type, Any]], + *, + bases: tuple[type, ...] = (), + namespace: dict[str, Any] | None = None, + init: bool = True, + repr: bool = True, + eq: bool = True, + order: bool = False, + unsafe_hash: bool = False, + frozen: bool = False, + ) -> type: ... + +def replace(__obj: _DataclassT, **changes: Any) -> _DataclassT: ... diff --git a/.vscode/Pico-W-Stub/stdlib/decimal.pyi b/.vscode/Pico-W-Stub/stdlib/decimal.pyi new file mode 100644 index 0000000..35fc440 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/decimal.pyi @@ -0,0 +1,2 @@ +from _decimal import * +from _decimal import __libmpdec_version__ as __libmpdec_version__, __version__ as __version__ diff --git a/.vscode/Pico-W-Stub/stdlib/enum.pyi b/.vscode/Pico-W-Stub/stdlib/enum.pyi new file mode 100644 index 0000000..a8ba7bf --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/enum.pyi @@ -0,0 +1,299 @@ +import _typeshed +import sys +import types +from _typeshed import SupportsKeysAndGetItem, Unused +from builtins import property as _builtins_property +from collections.abc import Callable, Iterable, Iterator, Mapping +from typing import Any, Generic, TypeVar, overload +from typing_extensions import Literal, Self, TypeAlias + +__all__ = ["EnumMeta", "Enum", "IntEnum", "Flag", "IntFlag", "auto", "unique"] + +if sys.version_info >= (3, 11): + __all__ += [ + "CONFORM", + "CONTINUOUS", + "EJECT", + "EnumCheck", + "EnumType", + "FlagBoundary", + "KEEP", + "NAMED_FLAGS", + "ReprEnum", + "STRICT", + "StrEnum", + "UNIQUE", + "global_enum", + "global_enum_repr", + "global_flag_repr", + "global_str", + "member", + "nonmember", + "property", + "verify", + ] + +if sys.version_info >= (3, 12): + __all__ += ["pickle_by_enum_name", "pickle_by_global_name"] + +_EnumMemberT = TypeVar("_EnumMemberT") +_EnumerationT = TypeVar("_EnumerationT", bound=type[Enum]) + +# The following all work: +# >>> from enum import Enum +# >>> from string import ascii_lowercase +# >>> Enum('Foo', names='RED YELLOW GREEN') +# +# >>> Enum('Foo', names=[('RED', 1), ('YELLOW, 2)]) +# +# >>> Enum('Foo', names=((x for x in (ascii_lowercase[i], i)) for i in range(5))) +# +# >>> Enum('Foo', names={'RED': 1, 'YELLOW': 2}) +# +_EnumNames: TypeAlias = str | Iterable[str] | Iterable[Iterable[str | Any]] | Mapping[str, Any] + +if sys.version_info >= (3, 11): + class nonmember(Generic[_EnumMemberT]): + value: _EnumMemberT + def __init__(self, value: _EnumMemberT) -> None: ... + + class member(Generic[_EnumMemberT]): + value: _EnumMemberT + def __init__(self, value: _EnumMemberT) -> None: ... + +class _EnumDict(dict[str, Any]): + def __init__(self) -> None: ... + def __setitem__(self, key: str, value: Any) -> None: ... + if sys.version_info >= (3, 11): + # See comment above `typing.MutableMapping.update` + # for why overloads are preferable to a Union here + # + # Unlike with MutableMapping.update(), the first argument is required, + # hence the type: ignore + @overload # type: ignore[override] + def update(self, members: SupportsKeysAndGetItem[str, Any], **more_members: Any) -> None: ... + @overload + def update(self, members: Iterable[tuple[str, Any]], **more_members: Any) -> None: ... + +# Structurally: Iterable[T], Reversible[T], Container[T] where T is the enum itself +class EnumMeta(type): + if sys.version_info >= (3, 11): + def __new__( + metacls: type[_typeshed.Self], + cls: str, + bases: tuple[type, ...], + classdict: _EnumDict, + *, + boundary: FlagBoundary | None = None, + _simple: bool = False, + **kwds: Any, + ) -> _typeshed.Self: ... + elif sys.version_info >= (3, 9): + def __new__( + metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict, **kwds: Any + ) -> _typeshed.Self: ... + else: + def __new__(metacls: type[_typeshed.Self], cls: str, bases: tuple[type, ...], classdict: _EnumDict) -> _typeshed.Self: ... + + if sys.version_info >= (3, 9): + @classmethod + def __prepare__(metacls, cls: str, bases: tuple[type, ...], **kwds: Any) -> _EnumDict: ... # type: ignore[override] + else: + @classmethod + def __prepare__(metacls, cls: str, bases: tuple[type, ...]) -> _EnumDict: ... # type: ignore[override] + + def __iter__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + def __reversed__(self: type[_EnumMemberT]) -> Iterator[_EnumMemberT]: ... + if sys.version_info >= (3, 12): + def __contains__(self: type[Any], value: object) -> bool: ... + elif sys.version_info >= (3, 11): + def __contains__(self: type[Any], member: object) -> bool: ... + elif sys.version_info >= (3, 10): + def __contains__(self: type[Any], obj: object) -> bool: ... + else: + def __contains__(self: type[Any], member: object) -> bool: ... + + def __getitem__(self: type[_EnumMemberT], name: str) -> _EnumMemberT: ... + @_builtins_property + def __members__(self: type[_EnumMemberT]) -> types.MappingProxyType[str, _EnumMemberT]: ... + def __len__(self) -> int: ... + def __bool__(self) -> Literal[True]: ... + def __dir__(self) -> list[str]: ... + # Simple value lookup + @overload + def __call__(cls: type[_EnumMemberT], value: Any, names: None = None) -> _EnumMemberT: ... + # Functional Enum API + if sys.version_info >= (3, 11): + @overload + def __call__( + cls, + value: str, + names: _EnumNames, + *, + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, + boundary: FlagBoundary | None = None, + ) -> type[Enum]: ... + else: + @overload + def __call__( + cls, + value: str, + names: _EnumNames, + *, + module: str | None = None, + qualname: str | None = None, + type: type | None = None, + start: int = 1, + ) -> type[Enum]: ... + _member_names_: list[str] # undocumented + _member_map_: dict[str, Enum] # undocumented + _value2member_map_: dict[Any, Enum] # undocumented + +if sys.version_info >= (3, 11): + # In 3.11 `EnumMeta` metaclass is renamed to `EnumType`, but old name also exists. + EnumType = EnumMeta + + class property(types.DynamicClassAttribute): + def __set_name__(self, ownerclass: type[Enum], name: str) -> None: ... + name: str + clsname: str + _magic_enum_attr = property +else: + _magic_enum_attr = types.DynamicClassAttribute + +class Enum(metaclass=EnumMeta): + @_magic_enum_attr + def name(self) -> str: ... + @_magic_enum_attr + def value(self) -> Any: ... + _name_: str + _value_: Any + _ignore_: str | list[str] + _order_: str + __order__: str + @classmethod + def _missing_(cls, value: object) -> Any: ... + @staticmethod + def _generate_next_value_(name: str, start: int, count: int, last_values: list[Any]) -> Any: ... + # It's not true that `__new__` will accept any argument type, + # so ideally we'd use `Any` to indicate that the argument type is inexpressible. + # However, using `Any` causes too many false-positives for those using mypy's `--disallow-any-expr` + # (see #7752, #2539, mypy/#5788), + # and in practice using `object` here has the same effect as using `Any`. + def __new__(cls, value: object) -> Self: ... + def __dir__(self) -> list[str]: ... + def __hash__(self) -> int: ... + def __format__(self, format_spec: str) -> str: ... + def __reduce_ex__(self, proto: Unused) -> tuple[Any, ...]: ... + if sys.version_info >= (3, 12): + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... + +if sys.version_info >= (3, 11): + class ReprEnum(Enum): ... + +if sys.version_info >= (3, 11): + _IntEnumBase = ReprEnum +else: + _IntEnumBase = Enum + +class IntEnum(int, _IntEnumBase): + _value_: int + @_magic_enum_attr + def value(self) -> int: ... + def __new__(cls, value: int) -> Self: ... + +def unique(enumeration: _EnumerationT) -> _EnumerationT: ... + +_auto_null: Any + +class Flag(Enum): + _name_: str | None # type: ignore[assignment] + _value_: int + @_magic_enum_attr + def name(self) -> str | None: ... # type: ignore[override] + @_magic_enum_attr + def value(self) -> int: ... + def __contains__(self, other: Self) -> bool: ... + def __bool__(self) -> bool: ... + def __or__(self, other: Self) -> Self: ... + def __and__(self, other: Self) -> Self: ... + def __xor__(self, other: Self) -> Self: ... + def __invert__(self) -> Self: ... + if sys.version_info >= (3, 11): + def __iter__(self) -> Iterator[Self]: ... + def __len__(self) -> int: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +if sys.version_info >= (3, 11): + class StrEnum(str, ReprEnum): + def __new__(cls, value: str) -> Self: ... + _value_: str + @_magic_enum_attr + def value(self) -> str: ... + @staticmethod + def _generate_next_value_(name: str, start: int, count: int, last_values: list[str]) -> str: ... + + class EnumCheck(StrEnum): + CONTINUOUS: str + NAMED_FLAGS: str + UNIQUE: str + CONTINUOUS = EnumCheck.CONTINUOUS + NAMED_FLAGS = EnumCheck.NAMED_FLAGS + UNIQUE = EnumCheck.UNIQUE + + class verify: + def __init__(self, *checks: EnumCheck) -> None: ... + def __call__(self, enumeration: _EnumerationT) -> _EnumerationT: ... + + class FlagBoundary(StrEnum): + STRICT: str + CONFORM: str + EJECT: str + KEEP: str + STRICT = FlagBoundary.STRICT + CONFORM = FlagBoundary.CONFORM + EJECT = FlagBoundary.EJECT + KEEP = FlagBoundary.KEEP + + def global_str(self: Enum) -> str: ... + def global_enum(cls: _EnumerationT, update_str: bool = False) -> _EnumerationT: ... + def global_enum_repr(self: Enum) -> str: ... + def global_flag_repr(self: Flag) -> str: ... + +if sys.version_info >= (3, 11): + # The body of the class is the same, but the base classes are different. + class IntFlag(int, ReprEnum, Flag, boundary=KEEP): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +else: + class IntFlag(int, Flag): # type: ignore[misc] # complaints about incompatible bases + def __new__(cls, value: int) -> Self: ... + def __or__(self, other: int) -> Self: ... + def __and__(self, other: int) -> Self: ... + def __xor__(self, other: int) -> Self: ... + __ror__ = __or__ + __rand__ = __and__ + __rxor__ = __xor__ + +# subclassing IntFlag so it picks up all implemented base functions, best modeling behavior of enum.auto() +class auto(IntFlag): + _value_: Any + @_magic_enum_attr + def value(self) -> Any: ... + def __new__(cls) -> Self: ... + +if sys.version_info >= (3, 12): + def pickle_by_global_name(self: Enum, proto: int) -> str: ... + def pickle_by_enum_name(self: _EnumMemberT, proto: int) -> tuple[Callable[..., Any], tuple[type[_EnumMemberT], str]]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/fractions.pyi b/.vscode/Pico-W-Stub/stdlib/fractions.pyi new file mode 100644 index 0000000..7ec8add --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/fractions.pyi @@ -0,0 +1,162 @@ +import sys +from collections.abc import Callable +from decimal import Decimal +from numbers import Integral, Rational, Real +from typing import Any, overload +from typing_extensions import Literal, Self, SupportsIndex, TypeAlias + +_ComparableNum: TypeAlias = int | float | Decimal | Real + +if sys.version_info >= (3, 9): + __all__ = ["Fraction"] +else: + __all__ = ["Fraction", "gcd"] + @overload + def gcd(a: int, b: int) -> int: ... + @overload + def gcd(a: Integral, b: int) -> Integral: ... + @overload + def gcd(a: int, b: Integral) -> Integral: ... + @overload + def gcd(a: Integral, b: Integral) -> Integral: ... + +class Fraction(Rational): + @overload + def __new__(cls, numerator: int | Rational = 0, denominator: int | Rational | None = None) -> Self: ... + @overload + def __new__(cls, __value: float | Decimal | str) -> Self: ... + @classmethod + def from_float(cls, f: float) -> Self: ... + @classmethod + def from_decimal(cls, dec: Decimal) -> Self: ... + def limit_denominator(self, max_denominator: int = 1000000) -> Fraction: ... + if sys.version_info >= (3, 8): + def as_integer_ratio(self) -> tuple[int, int]: ... + if sys.version_info >= (3, 12): + def is_integer(self) -> bool: ... + + @property + def numerator(a) -> int: ... + @property + def denominator(a) -> int: ... + @overload + def __add__(a, b: int | Fraction) -> Fraction: ... + @overload + def __add__(a, b: float) -> float: ... + @overload + def __add__(a, b: complex) -> complex: ... + @overload + def __radd__(b, a: int | Fraction) -> Fraction: ... + @overload + def __radd__(b, a: float) -> float: ... + @overload + def __radd__(b, a: complex) -> complex: ... + @overload + def __sub__(a, b: int | Fraction) -> Fraction: ... + @overload + def __sub__(a, b: float) -> float: ... + @overload + def __sub__(a, b: complex) -> complex: ... + @overload + def __rsub__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rsub__(b, a: float) -> float: ... + @overload + def __rsub__(b, a: complex) -> complex: ... + @overload + def __mul__(a, b: int | Fraction) -> Fraction: ... + @overload + def __mul__(a, b: float) -> float: ... + @overload + def __mul__(a, b: complex) -> complex: ... + @overload + def __rmul__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rmul__(b, a: float) -> float: ... + @overload + def __rmul__(b, a: complex) -> complex: ... + @overload + def __truediv__(a, b: int | Fraction) -> Fraction: ... + @overload + def __truediv__(a, b: float) -> float: ... + @overload + def __truediv__(a, b: complex) -> complex: ... + @overload + def __rtruediv__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rtruediv__(b, a: float) -> float: ... + @overload + def __rtruediv__(b, a: complex) -> complex: ... + @overload + def __floordiv__(a, b: int | Fraction) -> int: ... + @overload + def __floordiv__(a, b: float) -> float: ... + @overload + def __rfloordiv__(b, a: int | Fraction) -> int: ... + @overload + def __rfloordiv__(b, a: float) -> float: ... + @overload + def __mod__(a, b: int | Fraction) -> Fraction: ... + @overload + def __mod__(a, b: float) -> float: ... + @overload + def __rmod__(b, a: int | Fraction) -> Fraction: ... + @overload + def __rmod__(b, a: float) -> float: ... + if sys.version_info >= (3, 8): + @overload + def __divmod__(a, b: int | Fraction) -> tuple[int, Fraction]: ... + @overload + def __divmod__(a, b: float) -> tuple[float, Fraction]: ... + @overload + def __rdivmod__(a, b: int | Fraction) -> tuple[int, Fraction]: ... + @overload + def __rdivmod__(a, b: float) -> tuple[float, Fraction]: ... + else: + @overload + def __divmod__(self, other: int | Fraction) -> tuple[int, Fraction]: ... + @overload + def __divmod__(self, other: float) -> tuple[float, Fraction]: ... + @overload + def __rdivmod__(self, other: int | Fraction) -> tuple[int, Fraction]: ... + @overload + def __rdivmod__(self, other: float) -> tuple[float, Fraction]: ... + + @overload + def __pow__(a, b: int) -> Fraction: ... + @overload + def __pow__(a, b: float | Fraction) -> float: ... + @overload + def __pow__(a, b: complex) -> complex: ... + @overload + def __rpow__(b, a: float | Fraction) -> float: ... + @overload + def __rpow__(b, a: complex) -> complex: ... + def __pos__(a) -> Fraction: ... + def __neg__(a) -> Fraction: ... + def __abs__(a) -> Fraction: ... + def __trunc__(a) -> int: ... + def __floor__(a) -> int: ... + def __ceil__(a) -> int: ... + @overload + def __round__(self, ndigits: None = None) -> int: ... + @overload + def __round__(self, ndigits: int) -> Fraction: ... + def __hash__(self) -> int: ... + def __eq__(a, b: object) -> bool: ... + def __lt__(a, b: _ComparableNum) -> bool: ... + def __gt__(a, b: _ComparableNum) -> bool: ... + def __le__(a, b: _ComparableNum) -> bool: ... + def __ge__(a, b: _ComparableNum) -> bool: ... + def __bool__(a) -> bool: ... + def __copy__(self) -> Self: ... + def __deepcopy__(self, memo: Any) -> Self: ... + if sys.version_info >= (3, 11): + def __int__(a, _index: Callable[[SupportsIndex], int] = ...) -> int: ... + # Not actually defined within fractions.py, but provides more useful + # overrides + @property + def real(self) -> Fraction: ... + @property + def imag(self) -> Literal[0]: ... + def conjugate(self) -> Fraction: ... diff --git a/.vscode/Pico-W-Stub/stdlib/functools.pyi b/.vscode/Pico-W-Stub/stdlib/functools.pyi new file mode 100644 index 0000000..8adc3d8 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/functools.pyi @@ -0,0 +1,222 @@ +import sys +import types +from _typeshed import SupportsAllComparisons, SupportsItems +from collections.abc import Callable, Hashable, Iterable, Sequence, Sized +from typing import Any, Generic, NamedTuple, TypeVar, overload +from typing_extensions import Literal, ParamSpec, Self, TypeAlias, TypedDict, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "update_wrapper", + "wraps", + "WRAPPER_ASSIGNMENTS", + "WRAPPER_UPDATES", + "total_ordering", + "cmp_to_key", + "lru_cache", + "reduce", + "partial", + "partialmethod", + "singledispatch", +] + +if sys.version_info >= (3, 8): + __all__ += ["cached_property", "singledispatchmethod"] + +if sys.version_info >= (3, 9): + __all__ += ["cache"] + +_T = TypeVar("_T") +_S = TypeVar("_S") +_PWrapped = ParamSpec("_PWrapped") +_RWrapped = TypeVar("_RWrapped") +_PWrapper = ParamSpec("_PWrapper") +_RWapper = TypeVar("_RWapper") + +@overload +def reduce(function: Callable[[_T, _S], _T], sequence: Iterable[_S], initial: _T) -> _T: ... +@overload +def reduce(function: Callable[[_T, _T], _T], sequence: Iterable[_T]) -> _T: ... + +class _CacheInfo(NamedTuple): + hits: int + misses: int + maxsize: int | None + currsize: int + +if sys.version_info >= (3, 9): + class _CacheParameters(TypedDict): + maxsize: int + typed: bool + +@final +class _lru_cache_wrapper(Generic[_T]): + __wrapped__: Callable[..., _T] + def __call__(self, *args: Hashable, **kwargs: Hashable) -> _T: ... + def cache_info(self) -> _CacheInfo: ... + def cache_clear(self) -> None: ... + if sys.version_info >= (3, 9): + def cache_parameters(self) -> _CacheParameters: ... + + def __copy__(self) -> _lru_cache_wrapper[_T]: ... + def __deepcopy__(self, __memo: Any) -> _lru_cache_wrapper[_T]: ... + +if sys.version_info >= (3, 8): + @overload + def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... + @overload + def lru_cache(maxsize: Callable[..., _T], typed: bool = False) -> _lru_cache_wrapper[_T]: ... + +else: + def lru_cache(maxsize: int | None = 128, typed: bool = False) -> Callable[[Callable[..., _T]], _lru_cache_wrapper[_T]]: ... + +if sys.version_info >= (3, 12): + WRAPPER_ASSIGNMENTS: tuple[ + Literal["__module__"], + Literal["__name__"], + Literal["__qualname__"], + Literal["__doc__"], + Literal["__annotations__"], + Literal["__type_params__"], + ] +else: + WRAPPER_ASSIGNMENTS: tuple[ + Literal["__module__"], Literal["__name__"], Literal["__qualname__"], Literal["__doc__"], Literal["__annotations__"] + ] +WRAPPER_UPDATES: tuple[Literal["__dict__"]] + +class _Wrapped(Generic[_PWrapped, _RWrapped, _PWrapper, _RWapper]): + __wrapped__: Callable[_PWrapped, _RWrapped] + def __call__(self, *args: _PWrapper.args, **kwargs: _PWrapper.kwargs) -> _RWapper: ... + # as with ``Callable``, we'll assume that these attributes exist + __name__: str + __qualname__: str + +class _Wrapper(Generic[_PWrapped, _RWrapped]): + def __call__(self, f: Callable[_PWrapper, _RWapper]) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ... + +if sys.version_info >= (3, 12): + def update_wrapper( + wrapper: Callable[_PWrapper, _RWapper], + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), + updated: Sequence[str] = ("__dict__",), + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ... + def wraps( + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__", "__type_params__"), + updated: Sequence[str] = ("__dict__",), + ) -> _Wrapper[_PWrapped, _RWrapped]: ... + +else: + def update_wrapper( + wrapper: Callable[_PWrapper, _RWapper], + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Sequence[str] = ("__dict__",), + ) -> _Wrapped[_PWrapped, _RWrapped, _PWrapper, _RWapper]: ... + def wraps( + wrapped: Callable[_PWrapped, _RWrapped], + assigned: Sequence[str] = ("__module__", "__name__", "__qualname__", "__doc__", "__annotations__"), + updated: Sequence[str] = ("__dict__",), + ) -> _Wrapper[_PWrapped, _RWrapped]: ... + +def total_ordering(cls: type[_T]) -> type[_T]: ... +def cmp_to_key(mycmp: Callable[[_T, _T], int]) -> Callable[[_T], SupportsAllComparisons]: ... + +class partial(Generic[_T]): + @property + def func(self) -> Callable[..., _T]: ... + @property + def args(self) -> tuple[Any, ...]: ... + @property + def keywords(self) -> dict[str, Any]: ... + def __new__(cls, __func: Callable[..., _T], *args: Any, **kwargs: Any) -> Self: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# With protocols, this could change into a generic protocol that defines __get__ and returns _T +_Descriptor: TypeAlias = Any + +class partialmethod(Generic[_T]): + func: Callable[..., _T] | _Descriptor + args: tuple[Any, ...] + keywords: dict[str, Any] + @overload + def __init__(self, __func: Callable[..., _T], *args: Any, **keywords: Any) -> None: ... + @overload + def __init__(self, __func: _Descriptor, *args: Any, **keywords: Any) -> None: ... + if sys.version_info >= (3, 8): + def __get__(self, obj: Any, cls: type[Any] | None = None) -> Callable[..., _T]: ... + else: + def __get__(self, obj: Any, cls: type[Any] | None) -> Callable[..., _T]: ... + + @property + def __isabstractmethod__(self) -> bool: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class _SingleDispatchCallable(Generic[_T]): + registry: types.MappingProxyType[Any, Callable[..., _T]] + def dispatch(self, cls: Any) -> Callable[..., _T]: ... + # @fun.register(complex) + # def _(arg, verbose=False): ... + @overload + def register(self, cls: type[Any], func: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + # @fun.register + # def _(arg: int, verbose=False): + @overload + def register(self, cls: Callable[..., _T], func: None = None) -> Callable[..., _T]: ... + # fun.register(int, lambda x: x) + @overload + def register(self, cls: type[Any], func: Callable[..., _T]) -> Callable[..., _T]: ... + def _clear_cache(self) -> None: ... + def __call__(__self, *args: Any, **kwargs: Any) -> _T: ... + +def singledispatch(func: Callable[..., _T]) -> _SingleDispatchCallable[_T]: ... + +if sys.version_info >= (3, 8): + class singledispatchmethod(Generic[_T]): + dispatcher: _SingleDispatchCallable[_T] + func: Callable[..., _T] + def __init__(self, func: Callable[..., _T]) -> None: ... + @property + def __isabstractmethod__(self) -> bool: ... + @overload + def register(self, cls: type[Any], method: None = None) -> Callable[[Callable[..., _T]], Callable[..., _T]]: ... + @overload + def register(self, cls: Callable[..., _T], method: None = None) -> Callable[..., _T]: ... + @overload + def register(self, cls: type[Any], method: Callable[..., _T]) -> Callable[..., _T]: ... + def __get__(self, obj: _S, cls: type[_S] | None = None) -> Callable[..., _T]: ... + + class cached_property(Generic[_T]): + func: Callable[[Any], _T] + attrname: str | None + def __init__(self, func: Callable[[Any], _T]) -> None: ... + @overload + def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... + @overload + def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... + def __set_name__(self, owner: type[Any], name: str) -> None: ... + # __set__ is not defined at runtime, but @cached_property is designed to be settable + def __set__(self, instance: object, value: _T) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +if sys.version_info >= (3, 9): + def cache(__user_function: Callable[..., _T]) -> _lru_cache_wrapper[_T]: ... + +def _make_key( + args: tuple[Hashable, ...], + kwds: SupportsItems[Any, Any], + typed: bool, + kwd_mark: tuple[object, ...] = ..., + fasttypes: set[type] = ..., + tuple: type = ..., + type: Any = ..., + len: Callable[[Sized], int] = ..., +) -> Hashable: ... diff --git a/.vscode/Pico-W-Stub/stdlib/io.pyi b/.vscode/Pico-W-Stub/stdlib/io.pyi new file mode 100644 index 0000000..8a99217 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/io.pyi @@ -0,0 +1,196 @@ +import abc +import builtins +import codecs +import sys +from _typeshed import ReadableBuffer, Self, StrOrBytesPath, WriteableBuffer +from collections.abc import Callable, Iterable, Iterator +from stdlib.os import _Opener +from types import TracebackType +from typing import IO, Any, BinaryIO, TextIO +from typing_extensions import Literal + +__all__ = [ + "BlockingIOError", + "open", + "IOBase", + "RawIOBase", + "FileIO", + "BytesIO", + "StringIO", + "BufferedIOBase", + "BufferedReader", + "BufferedWriter", + "BufferedRWPair", + "BufferedRandom", + "TextIOBase", + "TextIOWrapper", + "UnsupportedOperation", + "SEEK_SET", + "SEEK_CUR", + "SEEK_END", +] + +if sys.version_info >= (3, 8): + __all__ += ["open_code"] + +DEFAULT_BUFFER_SIZE: Literal[8192] + +SEEK_SET: Literal[0] +SEEK_CUR: Literal[1] +SEEK_END: Literal[2] + +open = builtins.open + +if sys.version_info >= (3, 8): + def open_code(path: str) -> IO[bytes]: ... + +BlockingIOError = builtins.BlockingIOError + +class UnsupportedOperation(OSError, ValueError): ... + +class IOBase(metaclass=abc.ABCMeta): + def __iter__(self) -> Iterator[bytes]: ... + def __next__(self) -> bytes: ... + def __enter__(self: Self) -> Self: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: TracebackType | None + ) -> None: ... + def close(self) -> None: ... + def fileno(self) -> int: ... + def flush(self) -> None: ... + def isatty(self) -> bool: ... + def readable(self) -> bool: ... + read: Callable[..., Any] + def readlines(self, __hint: int = ...) -> list[bytes]: ... + def seek(self, __offset: int, __whence: int = ...) -> int: ... + def seekable(self) -> bool: ... + def tell(self) -> int: ... + def truncate(self, __size: int | None = ...) -> int: ... + def writable(self) -> bool: ... + write: Callable[..., Any] + def writelines(self, __lines: Iterable[ReadableBuffer]) -> None: ... + def readline(self, __size: int | None = ...) -> bytes: ... + def __del__(self) -> None: ... + @property + def closed(self) -> bool: ... + def _checkClosed(self, msg: str | None = ...) -> None: ... # undocumented + +class RawIOBase(IOBase): + def readall(self) -> bytes: ... + def readinto(self, __buffer: WriteableBuffer) -> int | None: ... + def write(self, __b: ReadableBuffer) -> int | None: ... + def read(self, __size: int = ...) -> bytes | None: ... + +class BufferedIOBase(IOBase): + raw: RawIOBase # This is not part of the BufferedIOBase API and may not exist on some implementations. + def detach(self) -> RawIOBase: ... + def readinto(self, __buffer: WriteableBuffer) -> int: ... + def write(self, __buffer: ReadableBuffer) -> int: ... + def readinto1(self, __buffer: WriteableBuffer) -> int: ... + def read(self, __size: int | None = ...) -> bytes: ... + def read1(self, __size: int = ...) -> bytes: ... + +class FileIO(RawIOBase, BinaryIO): + mode: str + name: StrOrBytesPath | int # type: ignore[assignment] + def __init__( + self, file: StrOrBytesPath | int, mode: str = ..., closefd: bool = ..., opener: _Opener | None = ... + ) -> None: ... + @property + def closefd(self) -> bool: ... + def write(self, __b: ReadableBuffer) -> int: ... + def read(self, __size: int = ...) -> bytes: ... + def __enter__(self: Self) -> Self: ... + +class BytesIO(BufferedIOBase, BinaryIO): + def __init__(self, initial_bytes: ReadableBuffer = ...) -> None: ... + # BytesIO does not contain a "name" field. This workaround is necessary + # to allow BytesIO sub-classes to add this field, as it is defined + # as a read-only property on IO[]. + name: Any + def __enter__(self: Self) -> Self: ... + def getvalue(self) -> bytes: ... + def getbuffer(self) -> memoryview: ... + def read1(self, __size: int | None = ...) -> bytes: ... + +class BufferedReader(BufferedIOBase, BinaryIO): + def __enter__(self: Self) -> Self: ... + def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... + def peek(self, __size: int = ...) -> bytes: ... + +class BufferedWriter(BufferedIOBase, BinaryIO): + def __enter__(self: Self) -> Self: ... + def __init__(self, raw: RawIOBase, buffer_size: int = ...) -> None: ... + def write(self, __buffer: ReadableBuffer) -> int: ... + +class BufferedRandom(BufferedReader, BufferedWriter): + def __enter__(self: Self) -> Self: ... + def seek(self, __target: int, __whence: int = ...) -> int: ... # stubtest needs this + +class BufferedRWPair(BufferedIOBase): + def __init__(self, reader: RawIOBase, writer: RawIOBase, buffer_size: int = ...) -> None: ... + def peek(self, __size: int = ...) -> bytes: ... + +class TextIOBase(IOBase): + encoding: str + errors: str | None + newlines: str | tuple[str, ...] | None + def __iter__(self) -> Iterator[str]: ... # type: ignore[override] + def __next__(self) -> str: ... # type: ignore[override] + def detach(self) -> BinaryIO: ... + def write(self, __s: str) -> int: ... + def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] + def readline(self, __size: int = ...) -> str: ... # type: ignore[override] + def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore[override] + def read(self, __size: int | None = ...) -> str: ... + +class TextIOWrapper(TextIOBase, TextIO): + def __init__( + self, + buffer: IO[bytes], + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + line_buffering: bool = ..., + write_through: bool = ..., + ) -> None: ... + @property + def buffer(self) -> BinaryIO: ... + @property + def closed(self) -> bool: ... + @property + def line_buffering(self) -> bool: ... + @property + def write_through(self) -> bool: ... + def reconfigure( + self, + *, + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + line_buffering: bool | None = ..., + write_through: bool | None = ..., + ) -> None: ... + # These are inherited from TextIOBase, but must exist in the stub to satisfy mypy. + def __enter__(self: Self) -> Self: ... + def __iter__(self) -> Iterator[str]: ... # type: ignore[override] + def __next__(self) -> str: ... # type: ignore[override] + def writelines(self, __lines: Iterable[str]) -> None: ... # type: ignore[override] + def readline(self, __size: int = ...) -> str: ... # type: ignore[override] + def readlines(self, __hint: int = ...) -> list[str]: ... # type: ignore[override] + def seek(self, __cookie: int, __whence: int = ...) -> int: ... # stubtest needs this + +class StringIO(TextIOWrapper): + def __init__(self, initial_value: str | None = ..., newline: str | None = ...) -> None: ... + # StringIO does not contain a "name" field. This workaround is necessary + # to allow StringIO sub-classes to add this field, as it is defined + # as a read-only property on IO[]. + name: Any + def getvalue(self) -> str: ... + +class IncrementalNewlineDecoder(codecs.IncrementalDecoder): + def __init__(self, decoder: codecs.IncrementalDecoder | None, translate: bool, errors: str = ...) -> None: ... + def decode(self, input: ReadableBuffer | str, final: bool = ...) -> str: ... + @property + def newlines(self) -> str | tuple[str, ...] | None: ... + def setstate(self, __state: tuple[bytes, int]) -> None: ... diff --git a/.vscode/Pico-W-Stub/stdlib/numbers.pyi b/.vscode/Pico-W-Stub/stdlib/numbers.pyi new file mode 100644 index 0000000..55f2104 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/numbers.pyi @@ -0,0 +1,129 @@ +# Note: these stubs are incomplete. The more complex type +# signatures are currently omitted. + +from abc import ABCMeta, abstractmethod +from typing import Any, SupportsFloat, overload + +__all__ = ["Number", "Complex", "Real", "Rational", "Integral"] + +class Number(metaclass=ABCMeta): + @abstractmethod + def __hash__(self) -> int: ... + +class Complex(Number): + @abstractmethod + def __complex__(self) -> complex: ... + def __bool__(self) -> bool: ... + @property + @abstractmethod + def real(self) -> Any: ... + @property + @abstractmethod + def imag(self) -> Any: ... + @abstractmethod + def __add__(self, other: Any) -> Any: ... + @abstractmethod + def __radd__(self, other: Any) -> Any: ... + @abstractmethod + def __neg__(self) -> Any: ... + @abstractmethod + def __pos__(self) -> Any: ... + def __sub__(self, other: Any) -> Any: ... + def __rsub__(self, other: Any) -> Any: ... + @abstractmethod + def __mul__(self, other: Any) -> Any: ... + @abstractmethod + def __rmul__(self, other: Any) -> Any: ... + @abstractmethod + def __truediv__(self, other: Any) -> Any: ... + @abstractmethod + def __rtruediv__(self, other: Any) -> Any: ... + @abstractmethod + def __pow__(self, exponent: Any) -> Any: ... + @abstractmethod + def __rpow__(self, base: Any) -> Any: ... + @abstractmethod + def __abs__(self) -> Real: ... + @abstractmethod + def conjugate(self) -> Any: ... + @abstractmethod + def __eq__(self, other: object) -> bool: ... + +class Real(Complex, SupportsFloat): + @abstractmethod + def __float__(self) -> float: ... + @abstractmethod + def __trunc__(self) -> int: ... + @abstractmethod + def __floor__(self) -> int: ... + @abstractmethod + def __ceil__(self) -> int: ... + @abstractmethod + @overload + def __round__(self, ndigits: None = None) -> int: ... + @abstractmethod + @overload + def __round__(self, ndigits: int) -> Any: ... + def __divmod__(self, other: Any) -> Any: ... + def __rdivmod__(self, other: Any) -> Any: ... + @abstractmethod + def __floordiv__(self, other: Any) -> int: ... + @abstractmethod + def __rfloordiv__(self, other: Any) -> int: ... + @abstractmethod + def __mod__(self, other: Any) -> Any: ... + @abstractmethod + def __rmod__(self, other: Any) -> Any: ... + @abstractmethod + def __lt__(self, other: Any) -> bool: ... + @abstractmethod + def __le__(self, other: Any) -> bool: ... + def __complex__(self) -> complex: ... + @property + def real(self) -> Any: ... + @property + def imag(self) -> Any: ... + def conjugate(self) -> Any: ... + +class Rational(Real): + @property + @abstractmethod + def numerator(self) -> int: ... + @property + @abstractmethod + def denominator(self) -> int: ... + def __float__(self) -> float: ... + +class Integral(Rational): + @abstractmethod + def __int__(self) -> int: ... + def __index__(self) -> int: ... + @abstractmethod + def __pow__(self, exponent: Any, modulus: Any | None = None) -> Any: ... + @abstractmethod + def __lshift__(self, other: Any) -> Any: ... + @abstractmethod + def __rlshift__(self, other: Any) -> Any: ... + @abstractmethod + def __rshift__(self, other: Any) -> Any: ... + @abstractmethod + def __rrshift__(self, other: Any) -> Any: ... + @abstractmethod + def __and__(self, other: Any) -> Any: ... + @abstractmethod + def __rand__(self, other: Any) -> Any: ... + @abstractmethod + def __xor__(self, other: Any) -> Any: ... + @abstractmethod + def __rxor__(self, other: Any) -> Any: ... + @abstractmethod + def __or__(self, other: Any) -> Any: ... + @abstractmethod + def __ror__(self, other: Any) -> Any: ... + @abstractmethod + def __invert__(self) -> Any: ... + def __float__(self) -> float: ... + @property + def numerator(self) -> int: ... + @property + def denominator(self) -> int: ... diff --git a/.vscode/Pico-W-Stub/stdlib/os/__init__.pyi b/.vscode/Pico-W-Stub/stdlib/os/__init__.pyi new file mode 100644 index 0000000..1f34a39 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/os/__init__.pyi @@ -0,0 +1,1132 @@ +import sys +from abc import abstractmethod # type: ignore[misc] +from builtins import OSError +from collections.abc import Callable, Iterable, Iterator, Mapping, MutableMapping, Sequence +from contextlib import AbstractContextManager +from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO +from io import TextIOWrapper as _TextIOWrapper + +# from subprocess import Popen +from typing import ( + IO, + Any, + AnyStr, + BinaryIO, + Generic, + NoReturn, + Protocol, + TypeVar, + overload, + runtime_checkable, +) + +from _typeshed import ( + AnyStr_co, + BytesPath, + FileDescriptorLike, + GenericPath, + OpenBinaryMode, + OpenBinaryModeReading, + OpenBinaryModeUpdating, + OpenBinaryModeWriting, + OpenTextMode, + ReadableBuffer, + Self, + StrOrBytesPath, + StrPath, + SupportsLenAndGetItem, + WriteableBuffer, + structseq, +) +from typing_extensions import Final, Literal, TypeAlias, final + +from . import path as _path + +if sys.version_info >= (3, 9): + from types import GenericAlias + +# This unnecessary alias is to work around various errors +path = _path # type: ignore[assignment] + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") + +# ----- os variables ----- + +error = OSError + +supports_bytes_environ: bool + +supports_dir_fd: set[Callable[..., Any]] +supports_fd: set[Callable[..., Any]] +supports_effective_ids: set[Callable[..., Any]] +supports_follow_symlinks: set[Callable[..., Any]] + +if sys.platform != "win32": + # Unix only + PRIO_PROCESS: int + PRIO_PGRP: int + PRIO_USER: int + + F_LOCK: int + F_TLOCK: int + F_ULOCK: int + F_TEST: int + + if sys.platform != "darwin": + POSIX_FADV_NORMAL: int + POSIX_FADV_SEQUENTIAL: int + POSIX_FADV_RANDOM: int + POSIX_FADV_NOREUSE: int + POSIX_FADV_WILLNEED: int + POSIX_FADV_DONTNEED: int + + SF_NODISKIO: int + SF_MNOWAIT: int + SF_SYNC: int + + if sys.platform == "linux": + XATTR_SIZE_MAX: int + XATTR_CREATE: int + XATTR_REPLACE: int + + P_PID: int + P_PGID: int + P_ALL: int + + if sys.platform == "linux" and sys.version_info >= (3, 9): + P_PIDFD: int + + WEXITED: int + WSTOPPED: int + WNOWAIT: int + + CLD_EXITED: int + CLD_DUMPED: int + CLD_TRAPPED: int + CLD_CONTINUED: int + + if sys.version_info >= (3, 9): + CLD_KILLED: int + CLD_STOPPED: int + + # TODO: SCHED_RESET_ON_FORK not available on darwin? + # TODO: SCHED_BATCH and SCHED_IDLE are linux only? + SCHED_OTHER: int # some flavors of Unix + SCHED_BATCH: int # some flavors of Unix + SCHED_IDLE: int # some flavors of Unix + SCHED_SPORADIC: int # some flavors of Unix + SCHED_FIFO: int # some flavors of Unix + SCHED_RR: int # some flavors of Unix + SCHED_RESET_ON_FORK: int # some flavors of Unix + +if sys.platform != "win32": + RTLD_LAZY: int + RTLD_NOW: int + RTLD_GLOBAL: int + RTLD_LOCAL: int + RTLD_NODELETE: int + RTLD_NOLOAD: int + +if sys.platform == "linux": + RTLD_DEEPBIND: int + GRND_NONBLOCK: int + GRND_RANDOM: int + +SEEK_SET: int +SEEK_CUR: int +SEEK_END: int +if sys.platform != "win32": + SEEK_DATA: int # some flavors of Unix + SEEK_HOLE: int # some flavors of Unix + +O_RDONLY: int +O_WRONLY: int +O_RDWR: int +O_APPEND: int +O_CREAT: int +O_EXCL: int +O_TRUNC: int +# We don't use sys.platform for O_* flags to denote platform-dependent APIs because some codes, +# including tests for mypy, use a more finer way than sys.platform before using these APIs +# See https://github.com/python/typeshed/pull/2286 for discussions +O_DSYNC: int # Unix only +O_RSYNC: int # Unix only +O_SYNC: int # Unix only +O_NDELAY: int # Unix only +O_NONBLOCK: int # Unix only +O_NOCTTY: int # Unix only +O_CLOEXEC: int # Unix only +O_SHLOCK: int # Unix only +O_EXLOCK: int # Unix only +O_BINARY: int # Windows only +O_NOINHERIT: int # Windows only +O_SHORT_LIVED: int # Windows only +O_TEMPORARY: int # Windows only +O_RANDOM: int # Windows only +O_SEQUENTIAL: int # Windows only +O_TEXT: int # Windows only +O_ASYNC: int # Gnu extension if in C library +O_DIRECT: int # Gnu extension if in C library +O_DIRECTORY: int # Gnu extension if in C library +O_NOFOLLOW: int # Gnu extension if in C library +O_NOATIME: int # Gnu extension if in C library +O_PATH: int # Gnu extension if in C library +O_TMPFILE: int # Gnu extension if in C library +O_LARGEFILE: int # Gnu extension if in C library +O_ACCMODE: int # TODO: when does this exist? + +if sys.platform != "win32" and sys.platform != "darwin": + # posix, but apparently missing on macos + ST_APPEND: int + ST_MANDLOCK: int + ST_NOATIME: int + ST_NODEV: int + ST_NODIRATIME: int + ST_NOEXEC: int + ST_RELATIME: int + ST_SYNCHRONOUS: int + ST_WRITE: int + +if sys.platform != "win32": + NGROUPS_MAX: int + ST_NOSUID: int + ST_RDONLY: int + +curdir: str +pardir: str +sep: str +if sys.platform == "win32": + altsep: str +else: + altsep: str | None +extsep: str +pathsep: str +defpath: str +linesep: str +devnull: str +name: str + +F_OK: int +R_OK: int +W_OK: int +X_OK: int + +_EnvironCodeFunc: TypeAlias = Callable[[AnyStr], AnyStr] + +class _Environ(MutableMapping[AnyStr, AnyStr], Generic[AnyStr]): + encodekey: _EnvironCodeFunc[AnyStr] + decodekey: _EnvironCodeFunc[AnyStr] + encodevalue: _EnvironCodeFunc[AnyStr] + decodevalue: _EnvironCodeFunc[AnyStr] + if sys.version_info >= (3, 9): + def __init__( + self, + data: MutableMapping[AnyStr, AnyStr], + encodekey: _EnvironCodeFunc[AnyStr], + decodekey: _EnvironCodeFunc[AnyStr], + encodevalue: _EnvironCodeFunc[AnyStr], + decodevalue: _EnvironCodeFunc[AnyStr], + ) -> None: ... + else: + putenv: Callable[[AnyStr, AnyStr], object] + unsetenv: Callable[[AnyStr, AnyStr], object] + def __init__( + self, + data: MutableMapping[AnyStr, AnyStr], + encodekey: _EnvironCodeFunc[AnyStr], + decodekey: _EnvironCodeFunc[AnyStr], + encodevalue: _EnvironCodeFunc[AnyStr], + decodevalue: _EnvironCodeFunc[AnyStr], + putenv: Callable[[AnyStr, AnyStr], object], + unsetenv: Callable[[AnyStr, AnyStr], object], + ) -> None: ... + + def setdefault(self, key: AnyStr, value: AnyStr) -> AnyStr: ... # type: ignore[override] + def copy(self) -> dict[AnyStr, AnyStr]: ... + def __delitem__(self, key: AnyStr) -> None: ... + def __getitem__(self, key: AnyStr) -> AnyStr: ... + def __setitem__(self, key: AnyStr, value: AnyStr) -> None: ... + def __iter__(self) -> Iterator[AnyStr]: ... + def __len__(self) -> int: ... + if sys.version_info >= (3, 9): + def __or__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + def __ror__(self, other: Mapping[_T1, _T2]) -> dict[AnyStr | _T1, AnyStr | _T2]: ... + # We use @overload instead of a Union for reasons similar to those given for + # overloading MutableMapping.update in stdlib/typing.pyi + # The type: ignore is needed due to incompatible __or__/__ior__ signatures + @overload # type: ignore[misc] + def __ior__(self: Self, other: Mapping[AnyStr, AnyStr]) -> Self: ... + @overload + def __ior__(self: Self, other: Iterable[tuple[AnyStr, AnyStr]]) -> Self: ... + +environ: _Environ[str] +if sys.platform != "win32": + environb: _Environ[bytes] + +if sys.platform != "win32": + confstr_names: dict[str, int] + pathconf_names: dict[str, int] + sysconf_names: dict[str, int] + + EX_OK: int + EX_USAGE: int + EX_DATAERR: int + EX_NOINPUT: int + EX_NOUSER: int + EX_NOHOST: int + EX_UNAVAILABLE: int + EX_SOFTWARE: int + EX_OSERR: int + EX_OSFILE: int + EX_CANTCREAT: int + EX_IOERR: int + EX_TEMPFAIL: int + EX_PROTOCOL: int + EX_NOPERM: int + EX_CONFIG: int + EX_NOTFOUND: int + +P_NOWAIT: int +P_NOWAITO: int +P_WAIT: int +if sys.platform == "win32": + P_DETACH: int + P_OVERLAY: int + +# wait()/waitpid() options +if sys.platform != "win32": + WNOHANG: int # Unix only + WCONTINUED: int # some Unix systems + WUNTRACED: int # Unix only + +TMP_MAX: int # Undocumented, but used by tempfile + +# ----- os classes (structures) ----- +@final +class stat_result(structseq[float], tuple[int, int, int, int, int, int, int, float, float, float]): + # The constructor of this class takes an iterable of variable length (though it must be at least 10). + # + # However, this class behaves like a tuple of 10 elements, + # no matter how long the iterable supplied to the constructor is. + # https://github.com/python/typeshed/pull/6560#discussion_r767162532 + # + # The 10 elements always present are st_mode, st_ino, st_dev, st_nlink, + # st_uid, st_gid, st_size, st_atime, st_mtime, st_ctime. + # + # More items may be added at the end by some implementations. + if sys.version_info >= (3, 10): + __match_args__: Final = ( + "st_mode", + "st_ino", + "st_dev", + "st_nlink", + "st_uid", + "st_gid", + "st_size", + ) + @property + def st_mode(self) -> int: ... # protection bits, + @property + def st_ino(self) -> int: ... # inode number, + @property + def st_dev(self) -> int: ... # device, + @property + def st_nlink(self) -> int: ... # number of hard links, + @property + def st_uid(self) -> int: ... # user id of owner, + @property + def st_gid(self) -> int: ... # group id of owner, + @property + def st_size(self) -> int: ... # size of file, in bytes, + @property + def st_atime(self) -> float: ... # time of most recent access, + @property + def st_mtime(self) -> float: ... # time of most recent content modification, + # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) + @property + def st_ctime(self) -> float: ... + @property + def st_atime_ns(self) -> int: ... # time of most recent access, in nanoseconds + @property + def st_mtime_ns(self) -> int: ... # time of most recent content modification in nanoseconds + # platform dependent (time of most recent metadata change on Unix, or the time of creation on Windows) in nanoseconds + @property + def st_ctime_ns(self) -> int: ... + if sys.platform == "win32": + @property + def st_file_attributes(self) -> int: ... + if sys.version_info >= (3, 8): + @property + def st_reparse_tag(self) -> int: ... + else: + @property + def st_blocks(self) -> int: ... # number of blocks allocated for file + @property + def st_blksize(self) -> int: ... # filesystem blocksize + @property + def st_rdev(self) -> int: ... # type of device if an inode device + if sys.platform != "linux": + # These properties are available on MacOS, but not on Windows or Ubuntu. + # On other Unix systems (such as FreeBSD), the following attributes may be + # available (but may be only filled out if root tries to use them): + @property + def st_gen(self) -> int: ... # file generation number + @property + def st_birthtime(self) -> int: ... # time of file creation + if sys.platform == "darwin": + @property + def st_flags(self) -> int: ... # user defined flags for file + # Attributes documented as sometimes appearing, but deliberately omitted from the stub: `st_creator`, `st_rsize`, `st_type`. + # See https://github.com/python/typeshed/pull/6560#issuecomment-991253327 + +@runtime_checkable +class PathLike(Protocol[AnyStr_co]): + @abstractmethod + def __fspath__(self) -> AnyStr_co: ... + +@overload +def listdir(path: StrPath | None = ...) -> list[str]: ... +@overload +def listdir(path: BytesPath) -> list[bytes]: ... +@overload +def listdir(path: int) -> list[str]: ... + +_FdOrAnyPath: TypeAlias = int | StrOrBytesPath + +@final +class DirEntry(Generic[AnyStr]): + # This is what the scandir iterator yields + # The constructor is hidden + + @property + def name(self) -> AnyStr: ... + @property + def path(self) -> AnyStr: ... + def inode(self) -> int: ... + def is_dir(self, *, follow_symlinks: bool = ...) -> bool: ... + def is_file(self, *, follow_symlinks: bool = ...) -> bool: ... + def is_symlink(self) -> bool: ... + def stat(self, *, follow_symlinks: bool = ...) -> stat_result: ... + def __fspath__(self) -> AnyStr: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +@final +class statvfs_result(structseq[int], tuple[int, int, int, int, int, int, int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ( + "f_bsize", + "f_frsize", + "f_blocks", + "f_bfree", + "f_bavail", + "f_files", + "f_ffree", + "f_favail", + "f_flag", + "f_namemax", + ) + @property + def f_bsize(self) -> int: ... + @property + def f_frsize(self) -> int: ... + @property + def f_blocks(self) -> int: ... + @property + def f_bfree(self) -> int: ... + @property + def f_bavail(self) -> int: ... + @property + def f_files(self) -> int: ... + @property + def f_ffree(self) -> int: ... + @property + def f_favail(self) -> int: ... + @property + def f_flag(self) -> int: ... + @property + def f_namemax(self) -> int: ... + @property + def f_fsid(self) -> int: ... + +# ----- os function stubs ----- +def fsencode(filename: StrOrBytesPath) -> bytes: ... +def fsdecode(filename: StrOrBytesPath) -> str: ... +@overload +def fspath(path: str) -> str: ... +@overload +def fspath(path: bytes) -> bytes: ... +@overload +def fspath(path: PathLike[AnyStr]) -> AnyStr: ... +def get_exec_path(env: Mapping[str, str] | None = ...) -> list[str]: ... +def getlogin() -> str: ... +def getpid() -> int: ... +def getppid() -> int: ... +def strerror(__code: int) -> str: ... +def umask(__mask: int) -> int: ... +@final +class uname_result(structseq[str], tuple[str, str, str, str, str]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("sysname", "nodename", "release", "version", "machine") + @property + def sysname(self) -> str: ... + @property + def nodename(self) -> str: ... + @property + def release(self) -> str: ... + @property + def version(self) -> str: ... + @property + def machine(self) -> str: ... + +if sys.platform != "win32": + def ctermid() -> str: ... + def getegid() -> int: ... + def geteuid() -> int: ... + def getgid() -> int: ... + def getgrouplist(__user: str, __group: int) -> list[int]: ... + def getgroups() -> list[int]: ... # Unix only, behaves differently on Mac + def initgroups(__username: str, __gid: int) -> None: ... + def getpgid(pid: int) -> int: ... + def getpgrp() -> int: ... + def getpriority(which: int, who: int) -> int: ... + def setpriority(which: int, who: int, priority: int) -> None: ... + if sys.platform != "darwin": + def getresuid() -> tuple[int, int, int]: ... + def getresgid() -> tuple[int, int, int]: ... + + def getuid() -> int: ... + def setegid(__egid: int) -> None: ... + def seteuid(__euid: int) -> None: ... + def setgid(__gid: int) -> None: ... + def setgroups(__groups: Sequence[int]) -> None: ... + def setpgrp() -> None: ... + def setpgid(__pid: int, __pgrp: int) -> None: ... + def setregid(__rgid: int, __egid: int) -> None: ... + if sys.platform != "darwin": + def setresgid(rgid: int, egid: int, sgid: int) -> None: ... + def setresuid(ruid: int, euid: int, suid: int) -> None: ... + + def setreuid(__ruid: int, __euid: int) -> None: ... + def getsid(__pid: int) -> int: ... + def setsid() -> None: ... + def setuid(__uid: int) -> None: ... + def uname() -> uname_result: ... + +@overload +def getenv(key: str) -> str | None: ... +@overload +def getenv(key: str, default: _T) -> str | _T: ... + +if sys.platform != "win32": + @overload + def getenvb(key: bytes) -> bytes | None: ... + @overload + def getenvb(key: bytes, default: _T) -> bytes | _T: ... + def putenv(__name: StrOrBytesPath, __value: StrOrBytesPath) -> None: ... + def unsetenv(__name: StrOrBytesPath) -> None: ... + +else: + def putenv(__name: str, __value: str) -> None: ... + + if sys.version_info >= (3, 9): + def unsetenv(__name: str) -> None: ... + +_Opener: TypeAlias = Callable[[str, int], int] + +@overload +def fdopen( + fd: int, + mode: OpenTextMode = ..., + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> _TextIOWrapper: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryMode, + buffering: Literal[0], + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> FileIO: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryModeUpdating, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedRandom: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryModeWriting, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedWriter: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryModeReading, + buffering: Literal[-1, 1] = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BufferedReader: ... +@overload +def fdopen( + fd: int, + mode: OpenBinaryMode, + buffering: int = ..., + encoding: None = ..., + errors: None = ..., + newline: None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> BinaryIO: ... +@overload +def fdopen( + fd: int, + mode: str, + buffering: int = ..., + encoding: str | None = ..., + errors: str | None = ..., + newline: str | None = ..., + closefd: bool = ..., + opener: _Opener | None = ..., +) -> IO[Any]: ... +def close(fd: int) -> None: ... +def closerange(__fd_low: int, __fd_high: int) -> None: ... +def device_encoding(fd: int) -> str | None: ... +def dup(__fd: int) -> int: ... +def dup2(fd: int, fd2: int, inheritable: bool = ...) -> int: ... +def fstat(fd: int) -> stat_result: ... +def ftruncate(__fd: int, __length: int) -> None: ... +def fsync(fd: FileDescriptorLike) -> None: ... +def isatty(__fd: int) -> bool: ... + +if sys.platform != "win32" and sys.version_info >= (3, 11): + def login_tty(__fd: int) -> None: ... + +def lseek(__fd: int, __position: int, __how: int) -> int: ... +def open( + path: StrOrBytesPath, flags: int, mode: int = ..., *, dir_fd: int | None = ... +) -> int: ... +def pipe() -> tuple[int, int]: ... +def read(__fd: int, __length: int) -> bytes: ... + +if sys.platform != "win32": + def fchmod(fd: int, mode: int) -> None: ... + def fchown(fd: int, uid: int, gid: int) -> None: ... + def fpathconf(__fd: int, __name: str | int) -> int: ... + def fstatvfs(__fd: int) -> statvfs_result: ... + def get_blocking(__fd: int) -> bool: ... + def set_blocking(__fd: int, __blocking: bool) -> None: ... + def lockf(__fd: int, __command: int, __length: int) -> None: ... + def openpty() -> tuple[int, int]: ... # some flavors of Unix + if sys.platform != "darwin": + def fdatasync(fd: FileDescriptorLike) -> None: ... + def pipe2(__flags: int) -> tuple[int, int]: ... # some flavors of Unix + def posix_fallocate(__fd: int, __offset: int, __length: int) -> None: ... + def posix_fadvise(__fd: int, __offset: int, __length: int, __advice: int) -> None: ... + + def pread(__fd: int, __length: int, __offset: int) -> bytes: ... + def pwrite(__fd: int, __buffer: ReadableBuffer, __offset: int) -> int: ... + # In CI, stubtest sometimes reports that these are available on MacOS, sometimes not + def preadv( + __fd: int, + __buffers: SupportsLenAndGetItem[WriteableBuffer], + __offset: int, + __flags: int = ..., + ) -> int: ... + def pwritev( + __fd: int, + __buffers: SupportsLenAndGetItem[ReadableBuffer], + __offset: int, + __flags: int = ..., + ) -> int: ... + if sys.platform != "darwin": + if sys.version_info >= (3, 10): + RWF_APPEND: int # docs say available on 3.7+, stubtest says otherwise + RWF_DSYNC: int + RWF_SYNC: int + RWF_HIPRI: int + RWF_NOWAIT: int + @overload + def sendfile(out_fd: int, in_fd: int, offset: int | None, count: int) -> int: ... + @overload + def sendfile( + out_fd: int, + in_fd: int, + offset: int, + count: int, + headers: Sequence[ReadableBuffer] = ..., + trailers: Sequence[ReadableBuffer] = ..., + flags: int = ..., + ) -> int: ... # FreeBSD and Mac OS X only + def readv(__fd: int, __buffers: SupportsLenAndGetItem[WriteableBuffer]) -> int: ... + def writev(__fd: int, __buffers: SupportsLenAndGetItem[ReadableBuffer]) -> int: ... + +@final +class terminal_size(structseq[int], tuple[int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("columns", "lines") + @property + def columns(self) -> int: ... + @property + def lines(self) -> int: ... + +def get_terminal_size(__fd: int = ...) -> terminal_size: ... +def get_inheritable(__fd: int) -> bool: ... +def set_inheritable(__fd: int, __inheritable: bool) -> None: ... + +if sys.platform == "win32": + def get_handle_inheritable(__handle: int) -> bool: ... + def set_handle_inheritable(__handle: int, __inheritable: bool) -> None: ... + +if sys.platform != "win32": + # Unix only + def tcgetpgrp(__fd: int) -> int: ... + def tcsetpgrp(__fd: int, __pgid: int) -> None: ... + def ttyname(__fd: int) -> str: ... + +def write(__fd: int, __data: ReadableBuffer) -> int: ... +def access( + path: _FdOrAnyPath, + mode: int, + *, + dir_fd: int | None = ..., + effective_ids: bool = ..., + follow_symlinks: bool = ..., +) -> bool: ... +def chdir(path: _FdOrAnyPath) -> None: ... + +if sys.platform != "win32": + def fchdir(fd: FileDescriptorLike) -> None: ... + +def getcwd() -> str: ... +def getcwdb() -> bytes: ... +def chmod( + path: _FdOrAnyPath, mode: int, *, dir_fd: int | None = ..., follow_symlinks: bool = ... +) -> None: ... + +if sys.platform != "win32" and sys.platform != "linux": + def chflags( + path: StrOrBytesPath, flags: int, follow_symlinks: bool = ... + ) -> None: ... # some flavors of Unix + def lchflags(path: StrOrBytesPath, flags: int) -> None: ... + def lchmod(path: StrOrBytesPath, mode: int) -> None: ... + +if sys.platform != "win32": + def chroot(path: StrOrBytesPath) -> None: ... + def chown( + path: _FdOrAnyPath, + uid: int, + gid: int, + *, + dir_fd: int | None = ..., + follow_symlinks: bool = ..., + ) -> None: ... + def lchown(path: StrOrBytesPath, uid: int, gid: int) -> None: ... + +def link( + src: StrOrBytesPath, + dst: StrOrBytesPath, + *, + src_dir_fd: int | None = ..., + dst_dir_fd: int | None = ..., + follow_symlinks: bool = ..., +) -> None: ... +def lstat(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> stat_result: ... +def mkdir(path: StrOrBytesPath, mode: int = ..., *, dir_fd: int | None = ...) -> None: ... + +if sys.platform != "win32": + def mkfifo( + path: StrOrBytesPath, mode: int = ..., *, dir_fd: int | None = ... + ) -> None: ... # Unix only + +def makedirs(name: StrOrBytesPath, mode: int = ..., exist_ok: bool = ...) -> None: ... + +if sys.platform != "win32": + def mknod( + path: StrOrBytesPath, mode: int = ..., device: int = ..., *, dir_fd: int | None = ... + ) -> None: ... + def major(__device: int) -> int: ... + def minor(__device: int) -> int: ... + def makedev(__major: int, __minor: int) -> int: ... + def pathconf(path: _FdOrAnyPath, name: str | int) -> int: ... # Unix only + +def readlink(path: GenericPath[AnyStr], *, dir_fd: int | None = ...) -> AnyStr: ... +def remove(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... +def removedirs(name: StrOrBytesPath) -> None: ... +def rename( + src: StrOrBytesPath, + dst: StrOrBytesPath, + *, + src_dir_fd: int | None = ..., + dst_dir_fd: int | None = ..., +) -> None: ... +def renames(old: StrOrBytesPath, new: StrOrBytesPath) -> None: ... +def replace( + src: StrOrBytesPath, + dst: StrOrBytesPath, + *, + src_dir_fd: int | None = ..., + dst_dir_fd: int | None = ..., +) -> None: ... +def rmdir(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... + +class _ScandirIterator( + Iterator[DirEntry[AnyStr]], AbstractContextManager[_ScandirIterator[AnyStr]] +): + def __next__(self) -> DirEntry[AnyStr]: ... + def __exit__(self, *args: object) -> None: ... + def close(self) -> None: ... + +@overload +def scandir(path: None = ...) -> _ScandirIterator[str]: ... +@overload +def scandir(path: int) -> _ScandirIterator[str]: ... +@overload +def scandir(path: GenericPath[AnyStr]) -> _ScandirIterator[AnyStr]: ... + +if sys.platform != "win32": + def statvfs(path: _FdOrAnyPath) -> statvfs_result: ... # Unix only + +def symlink( + src: StrOrBytesPath, + dst: StrOrBytesPath, + target_is_directory: bool = ..., + *, + dir_fd: int | None = ..., +) -> None: ... + +if sys.platform != "win32": + def sync() -> None: ... # Unix only + +def truncate(path: _FdOrAnyPath, length: int) -> None: ... # Unix only up to version 3.4 +def unlink(path: StrOrBytesPath, *, dir_fd: int | None = ...) -> None: ... +def utime( + path: _FdOrAnyPath, + times: tuple[int, int] | tuple[float, float] | None = ..., + *, + ns: tuple[int, int] = ..., + dir_fd: int | None = ..., + follow_symlinks: bool = ..., +) -> None: ... + +_OnError: TypeAlias = Callable[[OSError], object] + +def walk( + top: GenericPath[AnyStr], + topdown: bool = ..., + onerror: _OnError | None = ..., + followlinks: bool = ..., +) -> Iterator[tuple[AnyStr, list[AnyStr], list[AnyStr]]]: ... + +if sys.platform != "win32": + @overload + def fwalk( + top: StrPath = ..., + topdown: bool = ..., + onerror: _OnError | None = ..., + *, + follow_symlinks: bool = ..., + dir_fd: int | None = ..., + ) -> Iterator[tuple[str, list[str], list[str], int]]: ... + @overload + def fwalk( + top: BytesPath, + topdown: bool = ..., + onerror: _OnError | None = ..., + *, + follow_symlinks: bool = ..., + dir_fd: int | None = ..., + ) -> Iterator[tuple[bytes, list[bytes], list[bytes], int]]: ... + if sys.platform == "linux": + def getxattr( + path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ... + ) -> bytes: ... + def listxattr( + path: _FdOrAnyPath | None = ..., *, follow_symlinks: bool = ... + ) -> list[str]: ... + def removexattr( + path: _FdOrAnyPath, attribute: StrOrBytesPath, *, follow_symlinks: bool = ... + ) -> None: ... + def setxattr( + path: _FdOrAnyPath, + attribute: StrOrBytesPath, + value: ReadableBuffer, + flags: int = ..., + *, + follow_symlinks: bool = ..., + ) -> None: ... + +def abort() -> NoReturn: ... + +# These are defined as execl(file, *args) but the first *arg is mandatory. +def execl(file: StrOrBytesPath, __arg0: StrOrBytesPath, *args: StrOrBytesPath) -> NoReturn: ... +def execlp(file: StrOrBytesPath, __arg0: StrOrBytesPath, *args: StrOrBytesPath) -> NoReturn: ... + +# These are: execle(file, *args, env) but env is pulled from the last element of the args. +def execle(file: StrOrBytesPath, __arg0: StrOrBytesPath, *args: Any) -> NoReturn: ... +def execlpe(file: StrOrBytesPath, __arg0: StrOrBytesPath, *args: Any) -> NoReturn: ... + +# The docs say `args: tuple or list of strings` +# The implementation enforces tuple or list so we can't use Sequence. +# Not separating out PathLike[str] and PathLike[bytes] here because it doesn't make much difference +# in practice, and doing so would explode the number of combinations in this already long union. +# All these combinations are necessary due to list being invariant. +_ExecVArgs: TypeAlias = ( + tuple[StrOrBytesPath, ...] + | list[bytes] + | list[str] + | list[PathLike[Any]] + | list[bytes | str] + | list[bytes | PathLike[Any]] + | list[str | PathLike[Any]] + | list[bytes | str | PathLike[Any]] +) +# Depending on the OS, the keys and values are passed either to +# PyUnicode_FSDecoder (which accepts str | ReadableBuffer) or to +# PyUnicode_FSConverter (which accepts StrOrBytesPath). For simplicity, +# we limit to str | bytes. +_ExecEnv: TypeAlias = Mapping[bytes, bytes | str] | Mapping[str, bytes | str] + +def execv(__path: StrOrBytesPath, __argv: _ExecVArgs) -> NoReturn: ... +def execve(path: _FdOrAnyPath, argv: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def execvp(file: StrOrBytesPath, args: _ExecVArgs) -> NoReturn: ... +def execvpe(file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> NoReturn: ... +def _exit(status: int) -> NoReturn: ... +def kill(__pid: int, __signal: int) -> None: ... + +if sys.platform != "win32": + # Unix only + def fork() -> int: ... + def forkpty() -> tuple[int, int]: ... # some flavors of Unix + def killpg(__pgid: int, __signal: int) -> None: ... + def nice(__increment: int) -> int: ... + if sys.platform != "darwin": + def plock(__op: int) -> None: ... # ???op is int? + +class _wrap_close(_TextIOWrapper): + def __init__(self, stream: _TextIOWrapper, proc) -> None: ... + def close(self) -> int | None: ... # type: ignore[override] + +def popen(cmd: str, mode: str = ..., buffering: int = ...) -> _wrap_close: ... +def spawnl( + mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath +) -> int: ... +def spawnle( + mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any +) -> int: ... # Imprecise sig + +if sys.platform != "win32": + def spawnv(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnve(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + +else: + def spawnv(__mode: int, __path: StrOrBytesPath, __argv: _ExecVArgs) -> int: ... + def spawnve( + __mode: int, __path: StrOrBytesPath, __argv: _ExecVArgs, __env: _ExecEnv + ) -> int: ... + +def system(command: StrOrBytesPath) -> int: ... +@final +class times_result(structseq[float], tuple[float, float, float, float, float]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("user", "system", "children_user", "children_system", "elapsed") + @property + def user(self) -> float: ... + @property + def system(self) -> float: ... + @property + def children_user(self) -> float: ... + @property + def children_system(self) -> float: ... + @property + def elapsed(self) -> float: ... + +def times() -> times_result: ... +def waitpid(__pid: int, __options: int) -> tuple[int, int]: ... + +if sys.platform == "win32": + def startfile(path: StrOrBytesPath, operation: str | None = ...) -> None: ... + +else: + def spawnlp( + mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: StrOrBytesPath + ) -> int: ... + def spawnlpe( + mode: int, file: StrOrBytesPath, arg0: StrOrBytesPath, *args: Any + ) -> int: ... # Imprecise signature + def spawnvp(mode: int, file: StrOrBytesPath, args: _ExecVArgs) -> int: ... + def spawnvpe(mode: int, file: StrOrBytesPath, args: _ExecVArgs, env: _ExecEnv) -> int: ... + def wait() -> tuple[int, int]: ... # Unix only + if sys.platform != "darwin": + @final + class waitid_result(structseq[int], tuple[int, int, int, int, int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("si_pid", "si_uid", "si_signo", "si_status", "si_code") + @property + def si_pid(self) -> int: ... + @property + def si_uid(self) -> int: ... + @property + def si_signo(self) -> int: ... + @property + def si_status(self) -> int: ... + @property + def si_code(self) -> int: ... + + def waitid(__idtype: int, __ident: int, __options: int) -> waitid_result: ... + + def wait3(options: int) -> tuple[int, int, Any]: ... + def wait4(pid: int, options: int) -> tuple[int, int, Any]: ... + def WCOREDUMP(__status: int) -> bool: ... + def WIFCONTINUED(status: int) -> bool: ... + def WIFSTOPPED(status: int) -> bool: ... + def WIFSIGNALED(status: int) -> bool: ... + def WIFEXITED(status: int) -> bool: ... + def WEXITSTATUS(status: int) -> int: ... + def WSTOPSIG(status: int) -> int: ... + def WTERMSIG(status: int) -> int: ... + if sys.version_info >= (3, 8): + def posix_spawn( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + def posix_spawnp( + path: StrOrBytesPath, + argv: _ExecVArgs, + env: _ExecEnv, + *, + file_actions: Sequence[tuple[Any, ...]] | None = ..., + setpgroup: int | None = ..., + resetids: bool = ..., + setsid: bool = ..., + setsigmask: Iterable[int] = ..., + setsigdef: Iterable[int] = ..., + scheduler: tuple[Any, sched_param] | None = ..., + ) -> int: ... + POSIX_SPAWN_OPEN: int + POSIX_SPAWN_CLOSE: int + POSIX_SPAWN_DUP2: int + +if sys.platform != "win32": + @final + class sched_param(structseq[int], tuple[int]): + if sys.version_info >= (3, 10): + __match_args__: Final = ("sched_priority",) + def __new__(cls: type[Self], sched_priority: int) -> Self: ... + @property + def sched_priority(self) -> int: ... + + def sched_get_priority_min(policy: int) -> int: ... # some flavors of Unix + def sched_get_priority_max(policy: int) -> int: ... # some flavors of Unix + def sched_yield() -> None: ... # some flavors of Unix + if sys.platform != "darwin": + def sched_setscheduler( + __pid: int, __policy: int, __param: sched_param + ) -> None: ... # some flavors of Unix + def sched_getscheduler(__pid: int) -> int: ... # some flavors of Unix + def sched_rr_get_interval(__pid: int) -> float: ... # some flavors of Unix + def sched_setparam(__pid: int, __param: sched_param) -> None: ... # some flavors of Unix + def sched_getparam(__pid: int) -> sched_param: ... # some flavors of Unix + def sched_setaffinity( + __pid: int, __mask: Iterable[int] + ) -> None: ... # some flavors of Unix + def sched_getaffinity(__pid: int) -> set[int]: ... # some flavors of Unix + +def cpu_count() -> int | None: ... + +if sys.platform != "win32": + # Unix only + def confstr(__name: str | int) -> str | None: ... + def getloadavg() -> tuple[float, float, float]: ... + def sysconf(__name: str | int) -> int: ... + +if sys.platform == "linux": + def getrandom(size: int, flags: int = ...) -> bytes: ... + +def urandom(__size: int) -> bytes: ... + +if sys.platform != "win32": + def register_at_fork( + *, + before: Callable[..., Any] | None = ..., + after_in_parent: Callable[..., Any] | None = ..., + after_in_child: Callable[..., Any] | None = ..., + ) -> None: ... + +if sys.version_info >= (3, 8): + if sys.platform == "win32": + class _AddedDllDirectory: + path: str | None + def __init__( + self, path: str | None, cookie: _T, remove_dll_directory: Callable[[_T], object] + ) -> None: ... + def close(self) -> None: ... + def __enter__(self: Self) -> Self: ... + def __exit__(self, *args: object) -> None: ... + + def add_dll_directory(path: str) -> _AddedDllDirectory: ... + if sys.platform == "linux": + MFD_CLOEXEC: int + MFD_ALLOW_SEALING: int + MFD_HUGETLB: int + MFD_HUGE_SHIFT: int + MFD_HUGE_MASK: int + MFD_HUGE_64KB: int + MFD_HUGE_512KB: int + MFD_HUGE_1MB: int + MFD_HUGE_2MB: int + MFD_HUGE_8MB: int + MFD_HUGE_16MB: int + MFD_HUGE_32MB: int + MFD_HUGE_256MB: int + MFD_HUGE_512MB: int + MFD_HUGE_1GB: int + MFD_HUGE_2GB: int + MFD_HUGE_16GB: int + def memfd_create(name: str, flags: int = ...) -> int: ... + def copy_file_range( + src: int, + dst: int, + count: int, + offset_src: int | None = ..., + offset_dst: int | None = ..., + ) -> int: ... + +if sys.version_info >= (3, 9): + def waitstatus_to_exitcode(status: int) -> int: ... + + if sys.platform == "linux": + def pidfd_open(pid: int, flags: int = ...) -> int: ... diff --git a/.vscode/Pico-W-Stub/stdlib/queue.pyi b/.vscode/Pico-W-Stub/stdlib/queue.pyi new file mode 100644 index 0000000..315a15c --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/queue.pyi @@ -0,0 +1,58 @@ +import sys +from threading import Condition, Lock # type: ignore +from typing import Any, Generic, TypeVar + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = ["Empty", "Full", "Queue", "PriorityQueue", "LifoQueue", "SimpleQueue"] + +_T = TypeVar("_T") + +class Empty(Exception): ... +class Full(Exception): ... + +class Queue(Generic[_T]): + maxsize: int + + mutex: Lock # undocumented # type: ignore + not_empty: Condition # undocumented + not_full: Condition # undocumented + all_tasks_done: Condition # undocumented + unfinished_tasks: int # undocumented + # Despite the fact that `queue` has `deque` type, + # we treat it as `Any` to allow different implementations in subtypes. + queue: Any # undocumented + def __init__(self, maxsize: int = 0) -> None: ... + def _init(self, maxsize: int) -> None: ... + def empty(self) -> bool: ... + def full(self) -> bool: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def get_nowait(self) -> _T: ... + def _get(self) -> _T: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... + def put_nowait(self, item: _T) -> None: ... + def _put(self, item: _T) -> None: ... + def join(self) -> None: ... + def qsize(self) -> int: ... + def _qsize(self) -> int: ... + def task_done(self) -> None: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +class PriorityQueue(Queue[_T]): + queue: list[_T] + +class LifoQueue(Queue[_T]): + queue: list[_T] + +class SimpleQueue(Generic[_T]): + def __init__(self) -> None: ... + def empty(self) -> bool: ... + def get(self, block: bool = True, timeout: float | None = None) -> _T: ... + def get_nowait(self) -> _T: ... + def put(self, item: _T, block: bool = True, timeout: float | None = None) -> None: ... + def put_nowait(self, item: _T) -> None: ... + def qsize(self) -> int: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... diff --git a/.vscode/Pico-W-Stub/stdlib/re.pyi b/.vscode/Pico-W-Stub/stdlib/re.pyi new file mode 100644 index 0000000..3e52d20 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/re.pyi @@ -0,0 +1,270 @@ +import enum +import sre_compile +import sys +from _typeshed import ReadableBuffer +from collections.abc import Callable, Iterator, Mapping +from sre_constants import error as error +from typing import Any, AnyStr, Generic, TypeVar, overload +from typing_extensions import Literal, TypeAlias, final + +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "match", + "fullmatch", + "search", + "sub", + "subn", + "split", + "findall", + "finditer", + "compile", + "purge", + "template", + "escape", + "error", + "A", + "I", + "L", + "M", + "S", + "X", + "U", + "ASCII", + "IGNORECASE", + "LOCALE", + "MULTILINE", + "DOTALL", + "VERBOSE", + "UNICODE", + "Match", + "Pattern", +] + +if sys.version_info >= (3, 11): + __all__ += ["NOFLAG", "RegexFlag"] + +_T = TypeVar("_T") + +@final +class Match(Generic[AnyStr]): + @property + def pos(self) -> int: ... + @property + def endpos(self) -> int: ... + @property + def lastindex(self) -> int | None: ... + @property + def lastgroup(self) -> str | None: ... + @property + def string(self) -> AnyStr: ... + + # The regular expression object whose match() or search() method produced + # this match instance. + @property + def re(self) -> Pattern[AnyStr]: ... + @overload + def expand(self: Match[str], template: str) -> str: ... + @overload + def expand(self: Match[bytes], template: ReadableBuffer) -> bytes: ... + # group() returns "AnyStr" or "AnyStr | None", depending on the pattern. + @overload + def group(self, __group: Literal[0] = ...) -> AnyStr: ... + @overload + def group(self, __group: str | int) -> AnyStr | Any: ... + @overload + def group(self, __group1: str | int, __group2: str | int, *groups: str | int) -> tuple[AnyStr | Any, ...]: ... + # Each item of groups()'s return tuple is either "AnyStr" or + # "AnyStr | None", depending on the pattern. + @overload + def groups(self) -> tuple[AnyStr | Any, ...]: ... + @overload + def groups(self, default: _T) -> tuple[AnyStr | _T, ...]: ... + # Each value in groupdict()'s return dict is either "AnyStr" or + # "AnyStr | None", depending on the pattern. + @overload + def groupdict(self) -> dict[str, AnyStr | Any]: ... + @overload + def groupdict(self, default: _T) -> dict[str, AnyStr | _T]: ... + def start(self, __group: int | str = ...) -> int: ... + def end(self, __group: int | str = ...) -> int: ... + def span(self, __group: int | str = ...) -> tuple[int, int]: ... + @property + def regs(self) -> tuple[tuple[int, int], ...]: ... # undocumented + # __getitem__() returns "AnyStr" or "AnyStr | None", depending on the pattern. + @overload + def __getitem__(self, __key: Literal[0]) -> AnyStr: ... + @overload + def __getitem__(self, __key: int | str) -> AnyStr | Any: ... + def __copy__(self) -> Match[AnyStr]: ... + def __deepcopy__(self, __memo: Any) -> Match[AnyStr]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +@final +class Pattern(Generic[AnyStr]): + @property + def flags(self) -> int: ... + @property + def groupindex(self) -> Mapping[str, int]: ... + @property + def groups(self) -> int: ... + @property + def pattern(self) -> AnyStr: ... + @overload + def search(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Match[str] | None: ... + @overload + def search(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Match[bytes] | None: ... + @overload + def match(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Match[str] | None: ... + @overload + def match(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Match[bytes] | None: ... + @overload + def fullmatch(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Match[str] | None: ... + @overload + def fullmatch(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Match[bytes] | None: ... + @overload + def split(self: Pattern[str], string: str, maxsplit: int = ...) -> list[str | Any]: ... + @overload + def split(self: Pattern[bytes], string: ReadableBuffer, maxsplit: int = ...) -> list[bytes | Any]: ... + # return type depends on the number of groups in the pattern + @overload + def findall(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> list[Any]: ... + @overload + def findall(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> list[Any]: ... + @overload + def finditer(self: Pattern[str], string: str, pos: int = ..., endpos: int = ...) -> Iterator[Match[str]]: ... + @overload + def finditer(self: Pattern[bytes], string: ReadableBuffer, pos: int = ..., endpos: int = ...) -> Iterator[Match[bytes]]: ... + @overload + def sub(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ...) -> str: ... + @overload + def sub( + self: Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + ) -> bytes: ... + @overload + def subn(self: Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ...) -> tuple[str, int]: ... + @overload + def subn( + self: Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + ) -> tuple[bytes, int]: ... + def __copy__(self) -> Pattern[AnyStr]: ... + def __deepcopy__(self, __memo: Any) -> Pattern[AnyStr]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + +# ----- re variables and constants ----- + +class RegexFlag(enum.IntFlag): + A = sre_compile.SRE_FLAG_ASCII + ASCII = A + DEBUG = sre_compile.SRE_FLAG_DEBUG + I = sre_compile.SRE_FLAG_IGNORECASE + IGNORECASE = I + L = sre_compile.SRE_FLAG_LOCALE + LOCALE = L + M = sre_compile.SRE_FLAG_MULTILINE + MULTILINE = M + S = sre_compile.SRE_FLAG_DOTALL + DOTALL = S + X = sre_compile.SRE_FLAG_VERBOSE + VERBOSE = X + U = sre_compile.SRE_FLAG_UNICODE + UNICODE = U + T = sre_compile.SRE_FLAG_TEMPLATE + TEMPLATE = T + if sys.version_info >= (3, 11): + NOFLAG: int + +A = RegexFlag.A +ASCII = RegexFlag.ASCII +DEBUG = RegexFlag.DEBUG +I = RegexFlag.I +IGNORECASE = RegexFlag.IGNORECASE +L = RegexFlag.L +LOCALE = RegexFlag.LOCALE +M = RegexFlag.M +MULTILINE = RegexFlag.MULTILINE +S = RegexFlag.S +DOTALL = RegexFlag.DOTALL +X = RegexFlag.X +VERBOSE = RegexFlag.VERBOSE +U = RegexFlag.U +UNICODE = RegexFlag.UNICODE +T = RegexFlag.T +TEMPLATE = RegexFlag.TEMPLATE +if sys.version_info >= (3, 11): + NOFLAG = RegexFlag.NOFLAG +_FlagsType: TypeAlias = int | RegexFlag + +# Type-wise the compile() overloads are unnecessary, they could also be modeled using +# unions in the parameter types. However mypy has a bug regarding TypeVar +# constraints (https://github.com/python/mypy/issues/11880), +# which limits us here because AnyStr is a constrained TypeVar. + +# pattern arguments do *not* accept arbitrary buffers such as bytearray, +# because the pattern must be hashable. +@overload +def compile(pattern: AnyStr, flags: _FlagsType = ...) -> Pattern[AnyStr]: ... +@overload +def compile(pattern: Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... +@overload +def search(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Match[str] | None: ... +@overload +def search(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Match[bytes] | None: ... +@overload +def match(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Match[str] | None: ... +@overload +def match(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Match[bytes] | None: ... +@overload +def fullmatch(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Match[str] | None: ... +@overload +def fullmatch(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Match[bytes] | None: ... +@overload +def split(pattern: str | Pattern[str], string: str, maxsplit: int = ..., flags: _FlagsType = ...) -> list[str | Any]: ... +@overload +def split( + pattern: bytes | Pattern[bytes], string: ReadableBuffer, maxsplit: int = ..., flags: _FlagsType = ... +) -> list[bytes | Any]: ... +@overload +def findall(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> list[Any]: ... +@overload +def findall(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> list[Any]: ... +@overload +def finditer(pattern: str | Pattern[str], string: str, flags: _FlagsType = ...) -> Iterator[Match[str]]: ... +@overload +def finditer(pattern: bytes | Pattern[bytes], string: ReadableBuffer, flags: _FlagsType = ...) -> Iterator[Match[bytes]]: ... +@overload +def sub( + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ..., flags: _FlagsType = ... +) -> str: ... +@overload +def sub( + pattern: bytes | Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: _FlagsType = ..., +) -> bytes: ... +@overload +def subn( + pattern: str | Pattern[str], repl: str | Callable[[Match[str]], str], string: str, count: int = ..., flags: _FlagsType = ... +) -> tuple[str, int]: ... +@overload +def subn( + pattern: bytes | Pattern[bytes], + repl: ReadableBuffer | Callable[[Match[bytes]], ReadableBuffer], + string: ReadableBuffer, + count: int = ..., + flags: _FlagsType = ..., +) -> tuple[bytes, int]: ... +def escape(pattern: AnyStr) -> AnyStr: ... +def purge() -> None: ... +def template(pattern: AnyStr | Pattern[AnyStr], flags: _FlagsType = ...) -> Pattern[AnyStr]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/selectors.pyi b/.vscode/Pico-W-Stub/stdlib/selectors.pyi new file mode 100644 index 0000000..90a923f --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/selectors.pyi @@ -0,0 +1,73 @@ +import sys +from _typeshed import FileDescriptor, FileDescriptorLike, Unused +from abc import ABCMeta, abstractmethod +from collections.abc import Mapping +from typing import Any, NamedTuple +from typing_extensions import Self, TypeAlias + +_EventMask: TypeAlias = int + +EVENT_READ: _EventMask +EVENT_WRITE: _EventMask + +class SelectorKey(NamedTuple): + fileobj: FileDescriptorLike + fd: FileDescriptor + events: _EventMask + data: Any + +class BaseSelector(metaclass=ABCMeta): + @abstractmethod + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + @abstractmethod + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def modify(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + @abstractmethod + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def close(self) -> None: ... + def get_key(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + @abstractmethod + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + +class SelectSelector(BaseSelector): + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +if sys.platform != "win32": + class PollSelector(BaseSelector): + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +if sys.platform == "linux": + class EpollSelector(BaseSelector): + def fileno(self) -> int: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +class DevpollSelector(BaseSelector): + def fileno(self) -> int: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = ...) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = ...) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +class KqueueSelector(BaseSelector): + def fileno(self) -> int: ... + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... + +class DefaultSelector(BaseSelector): + def register(self, fileobj: FileDescriptorLike, events: _EventMask, data: Any = None) -> SelectorKey: ... + def unregister(self, fileobj: FileDescriptorLike) -> SelectorKey: ... + def select(self, timeout: float | None = None) -> list[tuple[SelectorKey, _EventMask]]: ... + def get_map(self) -> Mapping[FileDescriptorLike, SelectorKey]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/socket.pyi b/.vscode/Pico-W-Stub/stdlib/socket.pyi new file mode 100644 index 0000000..da06ce2 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/socket.pyi @@ -0,0 +1,831 @@ +# Ideally, we'd just do "from _socket import *". Unfortunately, socket +# overrides some definitions from _socket incompatibly. mypy incorrectly +# prefers the definitions from _socket over those defined here. +import _socket +import sys +from _socket import ( + _FD, + EAI_AGAIN as EAI_AGAIN, + EAI_BADFLAGS as EAI_BADFLAGS, + EAI_FAIL as EAI_FAIL, + EAI_FAMILY as EAI_FAMILY, + EAI_MEMORY as EAI_MEMORY, + EAI_NODATA as EAI_NODATA, + EAI_NONAME as EAI_NONAME, + EAI_SERVICE as EAI_SERVICE, + EAI_SOCKTYPE as EAI_SOCKTYPE, + INADDR_ALLHOSTS_GROUP as INADDR_ALLHOSTS_GROUP, + INADDR_ANY as INADDR_ANY, + INADDR_BROADCAST as INADDR_BROADCAST, + INADDR_LOOPBACK as INADDR_LOOPBACK, + INADDR_MAX_LOCAL_GROUP as INADDR_MAX_LOCAL_GROUP, + INADDR_NONE as INADDR_NONE, + INADDR_UNSPEC_GROUP as INADDR_UNSPEC_GROUP, + IP_ADD_MEMBERSHIP as IP_ADD_MEMBERSHIP, + IP_DROP_MEMBERSHIP as IP_DROP_MEMBERSHIP, + IP_HDRINCL as IP_HDRINCL, + IP_MULTICAST_IF as IP_MULTICAST_IF, + IP_MULTICAST_LOOP as IP_MULTICAST_LOOP, + IP_MULTICAST_TTL as IP_MULTICAST_TTL, + IP_OPTIONS as IP_OPTIONS, + IP_RECVDSTADDR as IP_RECVDSTADDR, + IP_TOS as IP_TOS, + IP_TTL as IP_TTL, + IPPORT_RESERVED as IPPORT_RESERVED, + IPPORT_USERRESERVED as IPPORT_USERRESERVED, + IPPROTO_ICMP as IPPROTO_ICMP, + IPPROTO_IP as IPPROTO_IP, + IPPROTO_RAW as IPPROTO_RAW, + IPPROTO_TCP as IPPROTO_TCP, + IPPROTO_UDP as IPPROTO_UDP, + IPV6_CHECKSUM as IPV6_CHECKSUM, + IPV6_JOIN_GROUP as IPV6_JOIN_GROUP, + IPV6_LEAVE_GROUP as IPV6_LEAVE_GROUP, + IPV6_MULTICAST_HOPS as IPV6_MULTICAST_HOPS, + IPV6_MULTICAST_IF as IPV6_MULTICAST_IF, + IPV6_MULTICAST_LOOP as IPV6_MULTICAST_LOOP, + IPV6_RECVTCLASS as IPV6_RECVTCLASS, + IPV6_TCLASS as IPV6_TCLASS, + IPV6_UNICAST_HOPS as IPV6_UNICAST_HOPS, + IPV6_V6ONLY as IPV6_V6ONLY, + NI_DGRAM as NI_DGRAM, + NI_MAXHOST as NI_MAXHOST, + NI_MAXSERV as NI_MAXSERV, + NI_NAMEREQD as NI_NAMEREQD, + NI_NOFQDN as NI_NOFQDN, + NI_NUMERICHOST as NI_NUMERICHOST, + NI_NUMERICSERV as NI_NUMERICSERV, + SHUT_RD as SHUT_RD, + SHUT_RDWR as SHUT_RDWR, + SHUT_WR as SHUT_WR, + SO_ACCEPTCONN as SO_ACCEPTCONN, + SO_BROADCAST as SO_BROADCAST, + SO_DEBUG as SO_DEBUG, + SO_DONTROUTE as SO_DONTROUTE, + SO_ERROR as SO_ERROR, + SO_KEEPALIVE as SO_KEEPALIVE, + SO_LINGER as SO_LINGER, + SO_OOBINLINE as SO_OOBINLINE, + SO_RCVBUF as SO_RCVBUF, + SO_RCVLOWAT as SO_RCVLOWAT, + SO_RCVTIMEO as SO_RCVTIMEO, + SO_REUSEADDR as SO_REUSEADDR, + SO_SNDBUF as SO_SNDBUF, + SO_SNDLOWAT as SO_SNDLOWAT, + SO_SNDTIMEO as SO_SNDTIMEO, + SO_TYPE as SO_TYPE, + SO_USELOOPBACK as SO_USELOOPBACK, + SOL_IP as SOL_IP, + SOL_SOCKET as SOL_SOCKET, + SOL_TCP as SOL_TCP, + SOL_UDP as SOL_UDP, + SOMAXCONN as SOMAXCONN, + TCP_FASTOPEN as TCP_FASTOPEN, + TCP_KEEPCNT as TCP_KEEPCNT, + TCP_MAXSEG as TCP_MAXSEG, + TCP_NODELAY as TCP_NODELAY, + SocketType as SocketType, + _Address as _Address, + _RetAddress as _RetAddress, + dup as dup, + error as error, + gaierror as gaierror, + getdefaulttimeout as getdefaulttimeout, + gethostbyaddr as gethostbyaddr, + gethostbyname as gethostbyname, + gethostbyname_ex as gethostbyname_ex, + gethostname as gethostname, + getnameinfo as getnameinfo, + getprotobyname as getprotobyname, + getservbyname as getservbyname, + getservbyport as getservbyport, + has_ipv6 as has_ipv6, + herror as herror, + htonl as htonl, + htons as htons, + inet_aton as inet_aton, + inet_ntoa as inet_ntoa, + inet_ntop as inet_ntop, + inet_pton as inet_pton, + ntohl as ntohl, + ntohs as ntohs, + setdefaulttimeout as setdefaulttimeout, + timeout as timeout, +) +from _typeshed import ReadableBuffer, Unused, WriteableBuffer +from collections.abc import Iterable +from enum import IntEnum, IntFlag +from io import BufferedReader, BufferedRWPair, BufferedWriter, IOBase, RawIOBase, TextIOWrapper +from typing import Any, Protocol, overload +from typing_extensions import Literal, Self + +if sys.platform != "darwin" or sys.version_info >= (3, 9): + from _socket import ( + IPV6_DONTFRAG as IPV6_DONTFRAG, + IPV6_HOPLIMIT as IPV6_HOPLIMIT, + IPV6_HOPOPTS as IPV6_HOPOPTS, + IPV6_PKTINFO as IPV6_PKTINFO, + IPV6_RECVRTHDR as IPV6_RECVRTHDR, + IPV6_RTHDR as IPV6_RTHDR, + ) + +if sys.platform != "darwin": + from _socket import SO_EXCLUSIVEADDRUSE as SO_EXCLUSIVEADDRUSE + +if sys.version_info >= (3, 10): + from _socket import IP_RECVTOS as IP_RECVTOS +elif sys.platform != "darwin" and sys.platform != "win32": + from _socket import IP_RECVTOS as IP_RECVTOS + +from _socket import TCP_KEEPINTVL as TCP_KEEPINTVL, close as close + +if sys.platform != "darwin": + from _socket import TCP_KEEPIDLE as TCP_KEEPIDLE + +if sys.platform != "win32" or sys.version_info >= (3, 8): + from _socket import ( + IPPROTO_AH as IPPROTO_AH, + IPPROTO_DSTOPTS as IPPROTO_DSTOPTS, + IPPROTO_EGP as IPPROTO_EGP, + IPPROTO_ESP as IPPROTO_ESP, + IPPROTO_FRAGMENT as IPPROTO_FRAGMENT, + IPPROTO_GGP as IPPROTO_GGP, + IPPROTO_HOPOPTS as IPPROTO_HOPOPTS, + IPPROTO_ICMPV6 as IPPROTO_ICMPV6, + IPPROTO_IDP as IPPROTO_IDP, + IPPROTO_IGMP as IPPROTO_IGMP, + IPPROTO_IPV4 as IPPROTO_IPV4, + IPPROTO_IPV6 as IPPROTO_IPV6, + IPPROTO_MAX as IPPROTO_MAX, + IPPROTO_ND as IPPROTO_ND, + IPPROTO_NONE as IPPROTO_NONE, + IPPROTO_PIM as IPPROTO_PIM, + IPPROTO_PUP as IPPROTO_PUP, + IPPROTO_ROUTING as IPPROTO_ROUTING, + IPPROTO_SCTP as IPPROTO_SCTP, + ) + + if sys.platform != "darwin": + from _socket import ( + IPPROTO_CBT as IPPROTO_CBT, + IPPROTO_ICLFXBM as IPPROTO_ICLFXBM, + IPPROTO_IGP as IPPROTO_IGP, + IPPROTO_L2TP as IPPROTO_L2TP, + IPPROTO_PGM as IPPROTO_PGM, + IPPROTO_RDP as IPPROTO_RDP, + IPPROTO_ST as IPPROTO_ST, + ) +if sys.platform != "win32" and sys.platform != "darwin": + from _socket import ( + IP_TRANSPARENT as IP_TRANSPARENT, + IPPROTO_BIP as IPPROTO_BIP, + IPPROTO_MOBILE as IPPROTO_MOBILE, + IPPROTO_VRRP as IPPROTO_VRRP, + IPX_TYPE as IPX_TYPE, + SCM_CREDENTIALS as SCM_CREDENTIALS, + SO_BINDTODEVICE as SO_BINDTODEVICE, + SO_MARK as SO_MARK, + SO_PASSCRED as SO_PASSCRED, + SO_PEERCRED as SO_PEERCRED, + SO_PRIORITY as SO_PRIORITY, + SO_SETFIB as SO_SETFIB, + SOL_ATALK as SOL_ATALK, + SOL_AX25 as SOL_AX25, + SOL_HCI as SOL_HCI, + SOL_IPX as SOL_IPX, + SOL_NETROM as SOL_NETROM, + SOL_ROSE as SOL_ROSE, + TCP_CORK as TCP_CORK, + TCP_DEFER_ACCEPT as TCP_DEFER_ACCEPT, + TCP_INFO as TCP_INFO, + TCP_LINGER2 as TCP_LINGER2, + TCP_QUICKACK as TCP_QUICKACK, + TCP_SYNCNT as TCP_SYNCNT, + TCP_USER_TIMEOUT as TCP_USER_TIMEOUT, + TCP_WINDOW_CLAMP as TCP_WINDOW_CLAMP, + ) +if sys.platform != "win32": + from _socket import ( + CMSG_LEN as CMSG_LEN, + CMSG_SPACE as CMSG_SPACE, + EAI_ADDRFAMILY as EAI_ADDRFAMILY, + EAI_BADHINTS as EAI_BADHINTS, + EAI_MAX as EAI_MAX, + EAI_OVERFLOW as EAI_OVERFLOW, + EAI_PROTOCOL as EAI_PROTOCOL, + EAI_SYSTEM as EAI_SYSTEM, + IP_DEFAULT_MULTICAST_LOOP as IP_DEFAULT_MULTICAST_LOOP, + IP_DEFAULT_MULTICAST_TTL as IP_DEFAULT_MULTICAST_TTL, + IP_MAX_MEMBERSHIPS as IP_MAX_MEMBERSHIPS, + IP_RECVOPTS as IP_RECVOPTS, + IP_RECVRETOPTS as IP_RECVRETOPTS, + IP_RETOPTS as IP_RETOPTS, + IPPROTO_EON as IPPROTO_EON, + IPPROTO_GRE as IPPROTO_GRE, + IPPROTO_HELLO as IPPROTO_HELLO, + IPPROTO_IPCOMP as IPPROTO_IPCOMP, + IPPROTO_IPIP as IPPROTO_IPIP, + IPPROTO_RSVP as IPPROTO_RSVP, + IPPROTO_TP as IPPROTO_TP, + IPPROTO_XTP as IPPROTO_XTP, + IPV6_RTHDR_TYPE_0 as IPV6_RTHDR_TYPE_0, + LOCAL_PEERCRED as LOCAL_PEERCRED, + SCM_CREDS as SCM_CREDS, + SCM_RIGHTS as SCM_RIGHTS, + SO_REUSEPORT as SO_REUSEPORT, + sethostname as sethostname, + ) + + if sys.platform != "darwin" or sys.version_info >= (3, 9): + from _socket import ( + IPV6_DSTOPTS as IPV6_DSTOPTS, + IPV6_NEXTHOP as IPV6_NEXTHOP, + IPV6_PATHMTU as IPV6_PATHMTU, + IPV6_RECVDSTOPTS as IPV6_RECVDSTOPTS, + IPV6_RECVHOPLIMIT as IPV6_RECVHOPLIMIT, + IPV6_RECVHOPOPTS as IPV6_RECVHOPOPTS, + IPV6_RECVPATHMTU as IPV6_RECVPATHMTU, + IPV6_RECVPKTINFO as IPV6_RECVPKTINFO, + IPV6_RTHDRDSTOPTS as IPV6_RTHDRDSTOPTS, + IPV6_USE_MIN_MTU as IPV6_USE_MIN_MTU, + ) + +if sys.platform != "win32" or sys.version_info >= (3, 8): + from _socket import if_indextoname as if_indextoname, if_nameindex as if_nameindex, if_nametoindex as if_nametoindex + +if sys.platform != "darwin": + if sys.platform != "win32" or sys.version_info >= (3, 9): + from _socket import BDADDR_ANY as BDADDR_ANY, BDADDR_LOCAL as BDADDR_LOCAL, BTPROTO_RFCOMM as BTPROTO_RFCOMM + +if sys.platform == "darwin" and sys.version_info >= (3, 10): + from _socket import TCP_KEEPALIVE as TCP_KEEPALIVE + +if sys.platform == "linux": + from _socket import ( + ALG_OP_DECRYPT as ALG_OP_DECRYPT, + ALG_OP_ENCRYPT as ALG_OP_ENCRYPT, + ALG_OP_SIGN as ALG_OP_SIGN, + ALG_OP_VERIFY as ALG_OP_VERIFY, + ALG_SET_AEAD_ASSOCLEN as ALG_SET_AEAD_ASSOCLEN, + ALG_SET_AEAD_AUTHSIZE as ALG_SET_AEAD_AUTHSIZE, + ALG_SET_IV as ALG_SET_IV, + ALG_SET_KEY as ALG_SET_KEY, + ALG_SET_OP as ALG_SET_OP, + ALG_SET_PUBKEY as ALG_SET_PUBKEY, + CAN_BCM as CAN_BCM, + CAN_BCM_RX_CHANGED as CAN_BCM_RX_CHANGED, + CAN_BCM_RX_DELETE as CAN_BCM_RX_DELETE, + CAN_BCM_RX_READ as CAN_BCM_RX_READ, + CAN_BCM_RX_SETUP as CAN_BCM_RX_SETUP, + CAN_BCM_RX_STATUS as CAN_BCM_RX_STATUS, + CAN_BCM_RX_TIMEOUT as CAN_BCM_RX_TIMEOUT, + CAN_BCM_TX_DELETE as CAN_BCM_TX_DELETE, + CAN_BCM_TX_EXPIRED as CAN_BCM_TX_EXPIRED, + CAN_BCM_TX_READ as CAN_BCM_TX_READ, + CAN_BCM_TX_SEND as CAN_BCM_TX_SEND, + CAN_BCM_TX_SETUP as CAN_BCM_TX_SETUP, + CAN_BCM_TX_STATUS as CAN_BCM_TX_STATUS, + CAN_EFF_FLAG as CAN_EFF_FLAG, + CAN_EFF_MASK as CAN_EFF_MASK, + CAN_ERR_FLAG as CAN_ERR_FLAG, + CAN_ERR_MASK as CAN_ERR_MASK, + CAN_RAW as CAN_RAW, + CAN_RAW_ERR_FILTER as CAN_RAW_ERR_FILTER, + CAN_RAW_FD_FRAMES as CAN_RAW_FD_FRAMES, + CAN_RAW_FILTER as CAN_RAW_FILTER, + CAN_RAW_LOOPBACK as CAN_RAW_LOOPBACK, + CAN_RAW_RECV_OWN_MSGS as CAN_RAW_RECV_OWN_MSGS, + CAN_RTR_FLAG as CAN_RTR_FLAG, + CAN_SFF_MASK as CAN_SFF_MASK, + NETLINK_ARPD as NETLINK_ARPD, + NETLINK_CRYPTO as NETLINK_CRYPTO, + NETLINK_DNRTMSG as NETLINK_DNRTMSG, + NETLINK_FIREWALL as NETLINK_FIREWALL, + NETLINK_IP6_FW as NETLINK_IP6_FW, + NETLINK_NFLOG as NETLINK_NFLOG, + NETLINK_ROUTE as NETLINK_ROUTE, + NETLINK_ROUTE6 as NETLINK_ROUTE6, + NETLINK_SKIP as NETLINK_SKIP, + NETLINK_TAPBASE as NETLINK_TAPBASE, + NETLINK_TCPDIAG as NETLINK_TCPDIAG, + NETLINK_USERSOCK as NETLINK_USERSOCK, + NETLINK_W1 as NETLINK_W1, + NETLINK_XFRM as NETLINK_XFRM, + PACKET_BROADCAST as PACKET_BROADCAST, + PACKET_FASTROUTE as PACKET_FASTROUTE, + PACKET_HOST as PACKET_HOST, + PACKET_LOOPBACK as PACKET_LOOPBACK, + PACKET_MULTICAST as PACKET_MULTICAST, + PACKET_OTHERHOST as PACKET_OTHERHOST, + PACKET_OUTGOING as PACKET_OUTGOING, + PF_CAN as PF_CAN, + PF_PACKET as PF_PACKET, + PF_RDS as PF_RDS, + RDS_CANCEL_SENT_TO as RDS_CANCEL_SENT_TO, + RDS_CMSG_RDMA_ARGS as RDS_CMSG_RDMA_ARGS, + RDS_CMSG_RDMA_DEST as RDS_CMSG_RDMA_DEST, + RDS_CMSG_RDMA_MAP as RDS_CMSG_RDMA_MAP, + RDS_CMSG_RDMA_STATUS as RDS_CMSG_RDMA_STATUS, + RDS_CMSG_RDMA_UPDATE as RDS_CMSG_RDMA_UPDATE, + RDS_CONG_MONITOR as RDS_CONG_MONITOR, + RDS_FREE_MR as RDS_FREE_MR, + RDS_GET_MR as RDS_GET_MR, + RDS_GET_MR_FOR_DEST as RDS_GET_MR_FOR_DEST, + RDS_RDMA_DONTWAIT as RDS_RDMA_DONTWAIT, + RDS_RDMA_FENCE as RDS_RDMA_FENCE, + RDS_RDMA_INVALIDATE as RDS_RDMA_INVALIDATE, + RDS_RDMA_NOTIFY_ME as RDS_RDMA_NOTIFY_ME, + RDS_RDMA_READWRITE as RDS_RDMA_READWRITE, + RDS_RDMA_SILENT as RDS_RDMA_SILENT, + RDS_RDMA_USE_ONCE as RDS_RDMA_USE_ONCE, + RDS_RECVERR as RDS_RECVERR, + SOL_ALG as SOL_ALG, + SOL_CAN_BASE as SOL_CAN_BASE, + SOL_CAN_RAW as SOL_CAN_RAW, + SOL_RDS as SOL_RDS, + SOL_TIPC as SOL_TIPC, + TIPC_ADDR_ID as TIPC_ADDR_ID, + TIPC_ADDR_NAME as TIPC_ADDR_NAME, + TIPC_ADDR_NAMESEQ as TIPC_ADDR_NAMESEQ, + TIPC_CFG_SRV as TIPC_CFG_SRV, + TIPC_CLUSTER_SCOPE as TIPC_CLUSTER_SCOPE, + TIPC_CONN_TIMEOUT as TIPC_CONN_TIMEOUT, + TIPC_CRITICAL_IMPORTANCE as TIPC_CRITICAL_IMPORTANCE, + TIPC_DEST_DROPPABLE as TIPC_DEST_DROPPABLE, + TIPC_HIGH_IMPORTANCE as TIPC_HIGH_IMPORTANCE, + TIPC_IMPORTANCE as TIPC_IMPORTANCE, + TIPC_LOW_IMPORTANCE as TIPC_LOW_IMPORTANCE, + TIPC_MEDIUM_IMPORTANCE as TIPC_MEDIUM_IMPORTANCE, + TIPC_NODE_SCOPE as TIPC_NODE_SCOPE, + TIPC_PUBLISHED as TIPC_PUBLISHED, + TIPC_SRC_DROPPABLE as TIPC_SRC_DROPPABLE, + TIPC_SUB_CANCEL as TIPC_SUB_CANCEL, + TIPC_SUB_PORTS as TIPC_SUB_PORTS, + TIPC_SUB_SERVICE as TIPC_SUB_SERVICE, + TIPC_SUBSCR_TIMEOUT as TIPC_SUBSCR_TIMEOUT, + TIPC_TOP_SRV as TIPC_TOP_SRV, + TIPC_WAIT_FOREVER as TIPC_WAIT_FOREVER, + TIPC_WITHDRAWN as TIPC_WITHDRAWN, + TIPC_ZONE_SCOPE as TIPC_ZONE_SCOPE, + ) +if sys.platform == "linux": + from _socket import ( + CAN_ISOTP as CAN_ISOTP, + IOCTL_VM_SOCKETS_GET_LOCAL_CID as IOCTL_VM_SOCKETS_GET_LOCAL_CID, + SO_VM_SOCKETS_BUFFER_MAX_SIZE as SO_VM_SOCKETS_BUFFER_MAX_SIZE, + SO_VM_SOCKETS_BUFFER_MIN_SIZE as SO_VM_SOCKETS_BUFFER_MIN_SIZE, + SO_VM_SOCKETS_BUFFER_SIZE as SO_VM_SOCKETS_BUFFER_SIZE, + VM_SOCKETS_INVALID_VERSION as VM_SOCKETS_INVALID_VERSION, + VMADDR_CID_ANY as VMADDR_CID_ANY, + VMADDR_CID_HOST as VMADDR_CID_HOST, + VMADDR_PORT_ANY as VMADDR_PORT_ANY, + ) +if sys.platform != "win32": + from _socket import TCP_NOTSENT_LOWAT as TCP_NOTSENT_LOWAT +if sys.platform == "linux" and sys.version_info >= (3, 8): + from _socket import ( + CAN_BCM_CAN_FD_FRAME as CAN_BCM_CAN_FD_FRAME, + CAN_BCM_RX_ANNOUNCE_RESUME as CAN_BCM_RX_ANNOUNCE_RESUME, + CAN_BCM_RX_CHECK_DLC as CAN_BCM_RX_CHECK_DLC, + CAN_BCM_RX_FILTER_ID as CAN_BCM_RX_FILTER_ID, + CAN_BCM_RX_NO_AUTOTIMER as CAN_BCM_RX_NO_AUTOTIMER, + CAN_BCM_RX_RTR_FRAME as CAN_BCM_RX_RTR_FRAME, + CAN_BCM_SETTIMER as CAN_BCM_SETTIMER, + CAN_BCM_STARTTIMER as CAN_BCM_STARTTIMER, + CAN_BCM_TX_ANNOUNCE as CAN_BCM_TX_ANNOUNCE, + CAN_BCM_TX_COUNTEVT as CAN_BCM_TX_COUNTEVT, + CAN_BCM_TX_CP_CAN_ID as CAN_BCM_TX_CP_CAN_ID, + CAN_BCM_TX_RESET_MULTI_IDX as CAN_BCM_TX_RESET_MULTI_IDX, + ) +if sys.platform == "linux" and sys.version_info >= (3, 9): + from _socket import ( + CAN_J1939 as CAN_J1939, + CAN_RAW_JOIN_FILTERS as CAN_RAW_JOIN_FILTERS, + J1939_EE_INFO_NONE as J1939_EE_INFO_NONE, + J1939_EE_INFO_TX_ABORT as J1939_EE_INFO_TX_ABORT, + J1939_FILTER_MAX as J1939_FILTER_MAX, + J1939_IDLE_ADDR as J1939_IDLE_ADDR, + J1939_MAX_UNICAST_ADDR as J1939_MAX_UNICAST_ADDR, + J1939_NLA_BYTES_ACKED as J1939_NLA_BYTES_ACKED, + J1939_NLA_PAD as J1939_NLA_PAD, + J1939_NO_ADDR as J1939_NO_ADDR, + J1939_NO_NAME as J1939_NO_NAME, + J1939_NO_PGN as J1939_NO_PGN, + J1939_PGN_ADDRESS_CLAIMED as J1939_PGN_ADDRESS_CLAIMED, + J1939_PGN_ADDRESS_COMMANDED as J1939_PGN_ADDRESS_COMMANDED, + J1939_PGN_MAX as J1939_PGN_MAX, + J1939_PGN_PDU1_MAX as J1939_PGN_PDU1_MAX, + J1939_PGN_REQUEST as J1939_PGN_REQUEST, + SCM_J1939_DEST_ADDR as SCM_J1939_DEST_ADDR, + SCM_J1939_DEST_NAME as SCM_J1939_DEST_NAME, + SCM_J1939_ERRQUEUE as SCM_J1939_ERRQUEUE, + SCM_J1939_PRIO as SCM_J1939_PRIO, + SO_J1939_ERRQUEUE as SO_J1939_ERRQUEUE, + SO_J1939_FILTER as SO_J1939_FILTER, + SO_J1939_PROMISC as SO_J1939_PROMISC, + SO_J1939_SEND_PRIO as SO_J1939_SEND_PRIO, + ) +if sys.platform == "linux" and sys.version_info >= (3, 10): + from _socket import IPPROTO_MPTCP as IPPROTO_MPTCP +if sys.platform == "linux" and sys.version_info >= (3, 11): + from _socket import SO_INCOMING_CPU as SO_INCOMING_CPU +if sys.platform == "win32": + from _socket import ( + RCVALL_MAX as RCVALL_MAX, + RCVALL_OFF as RCVALL_OFF, + RCVALL_ON as RCVALL_ON, + RCVALL_SOCKETLEVELONLY as RCVALL_SOCKETLEVELONLY, + SIO_KEEPALIVE_VALS as SIO_KEEPALIVE_VALS, + SIO_LOOPBACK_FAST_PATH as SIO_LOOPBACK_FAST_PATH, + SIO_RCVALL as SIO_RCVALL, + ) +if sys.version_info >= (3, 12): + from _socket import ( + IP_ADD_SOURCE_MEMBERSHIP as IP_ADD_SOURCE_MEMBERSHIP, + IP_BLOCK_SOURCE as IP_BLOCK_SOURCE, + IP_DROP_SOURCE_MEMBERSHIP as IP_DROP_SOURCE_MEMBERSHIP, + IP_PKTINFO as IP_PKTINFO, + IP_UNBLOCK_SOURCE as IP_UNBLOCK_SOURCE, + ) + + if sys.platform == "win32": + from _socket import ( + HV_GUID_BROADCAST as HV_GUID_BROADCAST, + HV_GUID_CHILDREN as HV_GUID_CHILDREN, + HV_GUID_LOOPBACK as HV_GUID_LOOPBACK, + HV_GUID_PARENT as HV_GUID_PARENT, + HV_GUID_WILDCARD as HV_GUID_WILDCARD, + HV_GUID_ZERO as HV_GUID_ZERO, + HV_PROTOCOL_RAW as HV_PROTOCOL_RAW, + HVSOCKET_ADDRESS_FLAG_PASSTHRU as HVSOCKET_ADDRESS_FLAG_PASSTHRU, + HVSOCKET_CONNECT_TIMEOUT as HVSOCKET_CONNECT_TIMEOUT, + HVSOCKET_CONNECT_TIMEOUT_MAX as HVSOCKET_CONNECT_TIMEOUT_MAX, + HVSOCKET_CONNECTED_SUSPEND as HVSOCKET_CONNECTED_SUSPEND, + ) + else: + from _socket import ( + ETHERTYPE_ARP as ETHERTYPE_ARP, + ETHERTYPE_IP as ETHERTYPE_IP, + ETHERTYPE_IPV6 as ETHERTYPE_IPV6, + ETHERTYPE_VLAN as ETHERTYPE_VLAN, + ) +if sys.version_info >= (3, 11) and sys.platform == "darwin": + from _socket import TCP_CONNECTION_INFO as TCP_CONNECTION_INFO + +# Re-exported from errno +EBADF: int +EAGAIN: int +EWOULDBLOCK: int + +class AddressFamily(IntEnum): + AF_INET: int + AF_INET6: int + AF_APPLETALK: int + AF_DECnet: int + AF_IPX: int + AF_SNA: int + AF_UNSPEC: int + if sys.platform != "darwin": + AF_IRDA: int + if sys.platform != "win32": + AF_ROUTE: int + AF_SYSTEM: int + AF_UNIX: int + if sys.platform != "darwin" and sys.platform != "win32": + AF_AAL5: int + AF_ASH: int + AF_ATMPVC: int + AF_ATMSVC: int + AF_AX25: int + AF_BRIDGE: int + AF_ECONET: int + AF_KEY: int + AF_LLC: int + AF_NETBEUI: int + AF_NETROM: int + AF_PPPOX: int + AF_ROSE: int + AF_SECURITY: int + AF_WANPIPE: int + AF_X25: int + if sys.platform == "linux": + AF_CAN: int + AF_PACKET: int + AF_RDS: int + AF_TIPC: int + AF_ALG: int + AF_NETLINK: int + AF_VSOCK: int + if sys.version_info >= (3, 8): + AF_QIPCRTR: int + if sys.platform != "win32" or sys.version_info >= (3, 9): + AF_LINK: int + if sys.platform != "darwin": + AF_BLUETOOTH: int + if sys.platform == "win32" and sys.version_info >= (3, 12): + AF_HYPERV: int + +AF_INET = AddressFamily.AF_INET +AF_INET6 = AddressFamily.AF_INET6 +AF_APPLETALK = AddressFamily.AF_APPLETALK +AF_DECnet = AddressFamily.AF_DECnet +AF_IPX = AddressFamily.AF_IPX +AF_SNA = AddressFamily.AF_SNA +AF_UNSPEC = AddressFamily.AF_UNSPEC + +if sys.platform != "darwin": + AF_IRDA = AddressFamily.AF_IRDA + +if sys.platform != "win32": + AF_ROUTE = AddressFamily.AF_ROUTE + AF_SYSTEM = AddressFamily.AF_SYSTEM + AF_UNIX = AddressFamily.AF_UNIX + +if sys.platform != "win32" and sys.platform != "darwin": + AF_AAL5 = AddressFamily.AF_AAL5 + AF_ASH = AddressFamily.AF_ASH + AF_ATMPVC = AddressFamily.AF_ATMPVC + AF_ATMSVC = AddressFamily.AF_ATMSVC + AF_AX25 = AddressFamily.AF_AX25 + AF_BRIDGE = AddressFamily.AF_BRIDGE + AF_ECONET = AddressFamily.AF_ECONET + AF_KEY = AddressFamily.AF_KEY + AF_LLC = AddressFamily.AF_LLC + AF_NETBEUI = AddressFamily.AF_NETBEUI + AF_NETROM = AddressFamily.AF_NETROM + AF_PPPOX = AddressFamily.AF_PPPOX + AF_ROSE = AddressFamily.AF_ROSE + AF_SECURITY = AddressFamily.AF_SECURITY + AF_WANPIPE = AddressFamily.AF_WANPIPE + AF_X25 = AddressFamily.AF_X25 + +if sys.platform == "linux": + AF_CAN = AddressFamily.AF_CAN + AF_PACKET = AddressFamily.AF_PACKET + AF_RDS = AddressFamily.AF_RDS + AF_TIPC = AddressFamily.AF_TIPC + AF_ALG = AddressFamily.AF_ALG + AF_NETLINK = AddressFamily.AF_NETLINK + AF_VSOCK = AddressFamily.AF_VSOCK + if sys.version_info >= (3, 8): + AF_QIPCRTR = AddressFamily.AF_QIPCRTR + +if sys.platform != "win32" or sys.version_info >= (3, 9): + AF_LINK = AddressFamily.AF_LINK + if sys.platform != "darwin": + AF_BLUETOOTH = AddressFamily.AF_BLUETOOTH + +if sys.platform == "win32" and sys.version_info >= (3, 12): + AF_HYPERV = AddressFamily.AF_HYPERV + +class SocketKind(IntEnum): + SOCK_STREAM: int + SOCK_DGRAM: int + SOCK_RAW: int + SOCK_RDM: int + SOCK_SEQPACKET: int + if sys.platform == "linux": + SOCK_CLOEXEC: int + SOCK_NONBLOCK: int + +SOCK_STREAM = SocketKind.SOCK_STREAM +SOCK_DGRAM = SocketKind.SOCK_DGRAM +SOCK_RAW = SocketKind.SOCK_RAW +SOCK_RDM = SocketKind.SOCK_RDM +SOCK_SEQPACKET = SocketKind.SOCK_SEQPACKET +if sys.platform == "linux": + SOCK_CLOEXEC = SocketKind.SOCK_CLOEXEC + SOCK_NONBLOCK = SocketKind.SOCK_NONBLOCK + +class MsgFlag(IntFlag): + MSG_CTRUNC: int + MSG_DONTROUTE: int + MSG_OOB: int + MSG_PEEK: int + MSG_TRUNC: int + MSG_WAITALL: int + + if sys.platform != "darwin": + MSG_BCAST: int + MSG_MCAST: int + MSG_ERRQUEUE: int + + if sys.platform != "win32" and sys.platform != "darwin": + MSG_BTAG: int + MSG_CMSG_CLOEXEC: int + MSG_CONFIRM: int + MSG_ETAG: int + MSG_FASTOPEN: int + MSG_MORE: int + MSG_NOTIFICATION: int + + if sys.platform != "win32": + MSG_DONTWAIT: int + MSG_EOF: int + MSG_EOR: int + MSG_NOSIGNAL: int # sometimes this exists on darwin, sometimes not + +MSG_CTRUNC = MsgFlag.MSG_CTRUNC +MSG_DONTROUTE = MsgFlag.MSG_DONTROUTE +MSG_OOB = MsgFlag.MSG_OOB +MSG_PEEK = MsgFlag.MSG_PEEK +MSG_TRUNC = MsgFlag.MSG_TRUNC +MSG_WAITALL = MsgFlag.MSG_WAITALL + +if sys.platform != "darwin": + MSG_BCAST = MsgFlag.MSG_BCAST + MSG_MCAST = MsgFlag.MSG_MCAST + MSG_ERRQUEUE = MsgFlag.MSG_ERRQUEUE + +if sys.platform != "win32": + MSG_DONTWAIT = MsgFlag.MSG_DONTWAIT + MSG_EOF = MsgFlag.MSG_EOF + MSG_EOR = MsgFlag.MSG_EOR + MSG_NOSIGNAL = MsgFlag.MSG_NOSIGNAL # Sometimes this exists on darwin, sometimes not + +if sys.platform != "win32" and sys.platform != "darwin": + MSG_BTAG = MsgFlag.MSG_BTAG + MSG_CMSG_CLOEXEC = MsgFlag.MSG_CMSG_CLOEXEC + MSG_CONFIRM = MsgFlag.MSG_CONFIRM + MSG_ETAG = MsgFlag.MSG_ETAG + MSG_FASTOPEN = MsgFlag.MSG_FASTOPEN + MSG_MORE = MsgFlag.MSG_MORE + MSG_NOTIFICATION = MsgFlag.MSG_NOTIFICATION + +class AddressInfo(IntFlag): + AI_ADDRCONFIG: int + AI_ALL: int + AI_CANONNAME: int + AI_NUMERICHOST: int + AI_NUMERICSERV: int + AI_PASSIVE: int + AI_V4MAPPED: int + if sys.platform != "win32": + AI_DEFAULT: int + AI_MASK: int + AI_V4MAPPED_CFG: int + +AI_ADDRCONFIG = AddressInfo.AI_ADDRCONFIG +AI_ALL = AddressInfo.AI_ALL +AI_CANONNAME = AddressInfo.AI_CANONNAME +AI_NUMERICHOST = AddressInfo.AI_NUMERICHOST +AI_NUMERICSERV = AddressInfo.AI_NUMERICSERV +AI_PASSIVE = AddressInfo.AI_PASSIVE +AI_V4MAPPED = AddressInfo.AI_V4MAPPED + +if sys.platform != "win32": + AI_DEFAULT = AddressInfo.AI_DEFAULT + AI_MASK = AddressInfo.AI_MASK + AI_V4MAPPED_CFG = AddressInfo.AI_V4MAPPED_CFG + +if sys.platform == "win32": + errorTab: dict[int, str] # undocumented + +class _SendableFile(Protocol): + def read(self, __size: int) -> bytes: ... + def seek(self, __offset: int) -> object: ... + + # optional fields: + # + # @property + # def mode(self) -> str: ... + # def fileno(self) -> int: ... + +class socket(_socket.socket): + def __init__( + self, family: AddressFamily | int = -1, type: SocketKind | int = -1, proto: int = -1, fileno: int | None = None + ) -> None: ... + def __enter__(self) -> Self: ... + def __exit__(self, *args: Unused) -> None: ... + def dup(self) -> Self: ... # noqa: F811 + def accept(self) -> tuple[socket, _RetAddress]: ... + # Note that the makefile's documented windows-specific behavior is not represented + # mode strings with duplicates are intentionally excluded + @overload + def makefile( + self, + mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: Literal[0], + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> SocketIO: ... + @overload + def makefile( + self, + mode: Literal["rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: Literal[-1, 1] | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> BufferedRWPair: ... + @overload + def makefile( + self, + mode: Literal["rb", "br"], + buffering: Literal[-1, 1] | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> BufferedReader: ... + @overload + def makefile( + self, + mode: Literal["wb", "bw"], + buffering: Literal[-1, 1] | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> BufferedWriter: ... + @overload + def makefile( + self, + mode: Literal["b", "rb", "br", "wb", "bw", "rwb", "rbw", "wrb", "wbr", "brw", "bwr"], + buffering: int, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> IOBase: ... + @overload + def makefile( + self, + mode: Literal["r", "w", "rw", "wr", ""] = "r", + buffering: int | None = None, + *, + encoding: str | None = None, + errors: str | None = None, + newline: str | None = None, + ) -> TextIOWrapper: ... + def sendfile(self, file: _SendableFile, offset: int = 0, count: int | None = None) -> int: ... + @property + def family(self) -> AddressFamily: ... + @property + def type(self) -> SocketKind: ... + def get_inheritable(self) -> bool: ... + def set_inheritable(self, inheritable: bool) -> None: ... + +def fromfd(fd: _FD, family: AddressFamily | int, type: SocketKind | int, proto: int = 0) -> socket: ... + +if sys.platform != "win32": + if sys.version_info >= (3, 9): + def send_fds( + sock: socket, buffers: Iterable[ReadableBuffer], fds: Iterable[int], flags: Unused = 0, address: Unused = None + ) -> int: ... + def recv_fds(sock: socket, bufsize: int, maxfds: int, flags: int = 0) -> tuple[bytes, list[int], int, Any]: ... + +if sys.platform == "win32": + def fromshare(info: bytes) -> socket: ... + +if sys.platform == "win32": + def socketpair(family: int = ..., type: int = ..., proto: int = 0) -> tuple[socket, socket]: ... + +else: + def socketpair( + family: int | AddressFamily | None = None, type: SocketType | int = ..., proto: int = 0 + ) -> tuple[socket, socket]: ... + +class SocketIO(RawIOBase): + def __init__(self, sock: socket, mode: Literal["r", "w", "rw", "rb", "wb", "rwb"]) -> None: ... + def readinto(self, b: WriteableBuffer) -> int | None: ... + def write(self, b: ReadableBuffer) -> int | None: ... + @property + def name(self) -> int: ... # return value is really "int" + @property + def mode(self) -> Literal["rb", "wb", "rwb"]: ... + +def getfqdn(name: str = "") -> str: ... + +if sys.version_info >= (3, 11): + def create_connection( + address: tuple[str | None, int], + timeout: float | None = ..., # noqa: F811 + source_address: _Address | None = None, + *, + all_errors: bool = False, + ) -> socket: ... + +else: + def create_connection( + address: tuple[str | None, int], timeout: float | None = ..., source_address: _Address | None = None # noqa: F811 + ) -> socket: ... + +if sys.version_info >= (3, 8): + def has_dualstack_ipv6() -> bool: ... + def create_server( + address: _Address, + *, + family: int = ..., + backlog: int | None = None, + reuse_port: bool = False, + dualstack_ipv6: bool = False, + ) -> socket: ... + +# the 5th tuple item is an address +def getaddrinfo( + host: bytes | str | None, port: bytes | str | int | None, family: int = 0, type: int = 0, proto: int = 0, flags: int = 0 +) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/sre_compile.pyi b/.vscode/Pico-W-Stub/stdlib/sre_compile.pyi new file mode 100644 index 0000000..2d04a88 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/sre_compile.pyi @@ -0,0 +1,11 @@ +from re import Pattern +from sre_constants import * +from sre_constants import _NamedIntConstant +from sre_parse import SubPattern +from typing import Any + +MAXCODE: int + +def dis(code: list[_NamedIntConstant]) -> None: ... +def isstring(obj: Any) -> bool: ... +def compile(p: str | bytes | SubPattern, flags: int = 0) -> Pattern[Any]: ... diff --git a/.vscode/Pico-W-Stub/stdlib/sre_constants.pyi b/.vscode/Pico-W-Stub/stdlib/sre_constants.pyi new file mode 100644 index 0000000..d522372 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/sre_constants.pyi @@ -0,0 +1,130 @@ +import sys +from typing import Any +from typing_extensions import Self + +MAXGROUPS: int + +MAGIC: int + +class error(Exception): + msg: str + pattern: str | bytes | None + pos: int | None + lineno: int + colno: int + def __init__(self, msg: str, pattern: str | bytes | None = None, pos: int | None = None) -> None: ... + +class _NamedIntConstant(int): + name: Any + def __new__(cls, value: int, name: str) -> Self: ... + +MAXREPEAT: _NamedIntConstant +OPCODES: list[_NamedIntConstant] +ATCODES: list[_NamedIntConstant] +CHCODES: list[_NamedIntConstant] +OP_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +OP_LOCALE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +OP_UNICODE_IGNORE: dict[_NamedIntConstant, _NamedIntConstant] +AT_MULTILINE: dict[_NamedIntConstant, _NamedIntConstant] +AT_LOCALE: dict[_NamedIntConstant, _NamedIntConstant] +AT_UNICODE: dict[_NamedIntConstant, _NamedIntConstant] +CH_LOCALE: dict[_NamedIntConstant, _NamedIntConstant] +CH_UNICODE: dict[_NamedIntConstant, _NamedIntConstant] +SRE_FLAG_TEMPLATE: int +SRE_FLAG_IGNORECASE: int +SRE_FLAG_LOCALE: int +SRE_FLAG_MULTILINE: int +SRE_FLAG_DOTALL: int +SRE_FLAG_UNICODE: int +SRE_FLAG_VERBOSE: int +SRE_FLAG_DEBUG: int +SRE_FLAG_ASCII: int +SRE_INFO_PREFIX: int +SRE_INFO_LITERAL: int +SRE_INFO_CHARSET: int + +# Stubgen above; manually defined constants below (dynamic at runtime) + +# from OPCODES +FAILURE: _NamedIntConstant +SUCCESS: _NamedIntConstant +ANY: _NamedIntConstant +ANY_ALL: _NamedIntConstant +ASSERT: _NamedIntConstant +ASSERT_NOT: _NamedIntConstant +AT: _NamedIntConstant +BRANCH: _NamedIntConstant +if sys.version_info < (3, 11): + CALL: _NamedIntConstant +CATEGORY: _NamedIntConstant +CHARSET: _NamedIntConstant +BIGCHARSET: _NamedIntConstant +GROUPREF: _NamedIntConstant +GROUPREF_EXISTS: _NamedIntConstant +GROUPREF_IGNORE: _NamedIntConstant +IN: _NamedIntConstant +IN_IGNORE: _NamedIntConstant +INFO: _NamedIntConstant +JUMP: _NamedIntConstant +LITERAL: _NamedIntConstant +LITERAL_IGNORE: _NamedIntConstant +MARK: _NamedIntConstant +MAX_UNTIL: _NamedIntConstant +MIN_UNTIL: _NamedIntConstant +NOT_LITERAL: _NamedIntConstant +NOT_LITERAL_IGNORE: _NamedIntConstant +NEGATE: _NamedIntConstant +RANGE: _NamedIntConstant +REPEAT: _NamedIntConstant +REPEAT_ONE: _NamedIntConstant +SUBPATTERN: _NamedIntConstant +MIN_REPEAT_ONE: _NamedIntConstant +if sys.version_info >= (3, 11): + ATOMIC_GROUP: _NamedIntConstant + POSSESSIVE_REPEAT: _NamedIntConstant + POSSESSIVE_REPEAT_ONE: _NamedIntConstant +RANGE_UNI_IGNORE: _NamedIntConstant +GROUPREF_LOC_IGNORE: _NamedIntConstant +GROUPREF_UNI_IGNORE: _NamedIntConstant +IN_LOC_IGNORE: _NamedIntConstant +IN_UNI_IGNORE: _NamedIntConstant +LITERAL_LOC_IGNORE: _NamedIntConstant +LITERAL_UNI_IGNORE: _NamedIntConstant +NOT_LITERAL_LOC_IGNORE: _NamedIntConstant +NOT_LITERAL_UNI_IGNORE: _NamedIntConstant +MIN_REPEAT: _NamedIntConstant +MAX_REPEAT: _NamedIntConstant + +# from ATCODES +AT_BEGINNING: _NamedIntConstant +AT_BEGINNING_LINE: _NamedIntConstant +AT_BEGINNING_STRING: _NamedIntConstant +AT_BOUNDARY: _NamedIntConstant +AT_NON_BOUNDARY: _NamedIntConstant +AT_END: _NamedIntConstant +AT_END_LINE: _NamedIntConstant +AT_END_STRING: _NamedIntConstant +AT_LOC_BOUNDARY: _NamedIntConstant +AT_LOC_NON_BOUNDARY: _NamedIntConstant +AT_UNI_BOUNDARY: _NamedIntConstant +AT_UNI_NON_BOUNDARY: _NamedIntConstant + +# from CHCODES +CATEGORY_DIGIT: _NamedIntConstant +CATEGORY_NOT_DIGIT: _NamedIntConstant +CATEGORY_SPACE: _NamedIntConstant +CATEGORY_NOT_SPACE: _NamedIntConstant +CATEGORY_WORD: _NamedIntConstant +CATEGORY_NOT_WORD: _NamedIntConstant +CATEGORY_LINEBREAK: _NamedIntConstant +CATEGORY_NOT_LINEBREAK: _NamedIntConstant +CATEGORY_LOC_WORD: _NamedIntConstant +CATEGORY_LOC_NOT_WORD: _NamedIntConstant +CATEGORY_UNI_DIGIT: _NamedIntConstant +CATEGORY_UNI_NOT_DIGIT: _NamedIntConstant +CATEGORY_UNI_SPACE: _NamedIntConstant +CATEGORY_UNI_NOT_SPACE: _NamedIntConstant +CATEGORY_UNI_WORD: _NamedIntConstant +CATEGORY_UNI_NOT_WORD: _NamedIntConstant +CATEGORY_UNI_LINEBREAK: _NamedIntConstant +CATEGORY_UNI_NOT_LINEBREAK: _NamedIntConstant diff --git a/.vscode/Pico-W-Stub/stdlib/sre_parse.pyi b/.vscode/Pico-W-Stub/stdlib/sre_parse.pyi new file mode 100644 index 0000000..8ef6522 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/sre_parse.pyi @@ -0,0 +1,125 @@ +import sys +from collections.abc import Iterable +from re import Match, Pattern as _Pattern +from sre_constants import * +from sre_constants import _NamedIntConstant as _NIC, error as _Error +from typing import Any, overload +from typing_extensions import TypeAlias + +SPECIAL_CHARS: str +REPEAT_CHARS: str +DIGITS: frozenset[str] +OCTDIGITS: frozenset[str] +HEXDIGITS: frozenset[str] +ASCIILETTERS: frozenset[str] +WHITESPACE: frozenset[str] +ESCAPES: dict[str, tuple[_NIC, int]] +CATEGORIES: dict[str, tuple[_NIC, _NIC] | tuple[_NIC, list[tuple[_NIC, _NIC]]]] +FLAGS: dict[str, int] +TYPE_FLAGS: int +GLOBAL_FLAGS: int + +if sys.version_info < (3, 11): + class Verbose(Exception): ... + +class _State: + flags: int + groupdict: dict[str, int] + groupwidths: list[int | None] + lookbehindgroups: int | None + @property + def groups(self) -> int: ... + def opengroup(self, name: str | None = ...) -> int: ... + def closegroup(self, gid: int, p: SubPattern) -> None: ... + def checkgroup(self, gid: int) -> bool: ... + def checklookbehindgroup(self, gid: int, source: Tokenizer) -> None: ... + +if sys.version_info >= (3, 8): + State: TypeAlias = _State +else: + Pattern: TypeAlias = _State + +_OpSubpatternType: TypeAlias = tuple[int | None, int, int, SubPattern] +_OpGroupRefExistsType: TypeAlias = tuple[int, SubPattern, SubPattern] +_OpInType: TypeAlias = list[tuple[_NIC, int]] +_OpBranchType: TypeAlias = tuple[None, list[SubPattern]] +_AvType: TypeAlias = _OpInType | _OpBranchType | Iterable[SubPattern] | _OpGroupRefExistsType | _OpSubpatternType +_CodeType: TypeAlias = tuple[_NIC, _AvType] + +class SubPattern: + data: list[_CodeType] + width: int | None + + if sys.version_info >= (3, 8): + state: State + def __init__(self, state: State, data: list[_CodeType] | None = None) -> None: ... + else: + pattern: Pattern + def __init__(self, pattern: Pattern, data: list[_CodeType] | None = None) -> None: ... + + def dump(self, level: int = 0) -> None: ... + def __len__(self) -> int: ... + def __delitem__(self, index: int | slice) -> None: ... + def __getitem__(self, index: int | slice) -> SubPattern | _CodeType: ... + def __setitem__(self, index: int | slice, code: _CodeType) -> None: ... + def insert(self, index: int, code: _CodeType) -> None: ... + def append(self, code: _CodeType) -> None: ... + def getwidth(self) -> tuple[int, int]: ... + +class Tokenizer: + istext: bool + string: Any + decoded_string: str + index: int + next: str | None + def __init__(self, string: Any) -> None: ... + def match(self, char: str) -> bool: ... + def get(self) -> str | None: ... + def getwhile(self, n: int, charset: Iterable[str]) -> str: ... + if sys.version_info >= (3, 8): + def getuntil(self, terminator: str, name: str) -> str: ... + else: + def getuntil(self, terminator: str) -> str: ... + + @property + def pos(self) -> int: ... + def tell(self) -> int: ... + def seek(self, index: int) -> None: ... + def error(self, msg: str, offset: int = 0) -> _Error: ... + + if sys.version_info >= (3, 12): + def checkgroupname(self, name: str, offset: int) -> None: ... + elif sys.version_info >= (3, 11): + def checkgroupname(self, name: str, offset: int, nested: int) -> None: ... + +def fix_flags(src: str | bytes, flags: int) -> int: ... + +_TemplateType: TypeAlias = tuple[list[tuple[int, int]], list[str | None]] +_TemplateByteType: TypeAlias = tuple[list[tuple[int, int]], list[bytes | None]] + +if sys.version_info >= (3, 12): + @overload + def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ... + @overload + def parse_template(source: bytes, pattern: _Pattern[Any]) -> _TemplateByteType: ... + +elif sys.version_info >= (3, 8): + @overload + def parse_template(source: str, state: _Pattern[Any]) -> _TemplateType: ... + @overload + def parse_template(source: bytes, state: _Pattern[Any]) -> _TemplateByteType: ... + +else: + @overload + def parse_template(source: str, pattern: _Pattern[Any]) -> _TemplateType: ... + @overload + def parse_template(source: bytes, pattern: _Pattern[Any]) -> _TemplateByteType: ... + +if sys.version_info >= (3, 8): + def parse(str: str, flags: int = 0, state: State | None = None) -> SubPattern: ... + +else: + def parse(str: str, flags: int = 0, pattern: Pattern | None = None) -> SubPattern: ... + +if sys.version_info < (3, 12): + def expand_template(template: _TemplateType, match: Match[Any]) -> str: ... diff --git a/.vscode/Pico-W-Stub/stdlib/sys.pyi b/.vscode/Pico-W-Stub/stdlib/sys.pyi new file mode 100644 index 0000000..9045752 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/sys.pyi @@ -0,0 +1,369 @@ +import sys +from builtins import object as _object +from collections.abc import AsyncGenerator, Callable, Coroutine, Sequence +from importlib.abc import PathEntryFinder # type: ignore +from importlib.machinery import ModuleSpec # type: ignore +from io import TextIOWrapper +from types import FrameType, ModuleType, TracebackType +from typing import Any, NoReturn, Protocol, TextIO, TypeVar, overload + +from _typeshed import OptExcInfo, ProfileFunction, TraceFunction, structseq +from typing_extensions import Literal, TypeAlias, final + +_T = TypeVar("_T") + +# see https://github.com/python/typeshed/issues/8513#issue-1333671093 for the rationale behind this alias +_ExitCode: TypeAlias = str | int | None +_OptExcInfo: TypeAlias = OptExcInfo # noqa: Y047 # TODO: obsolete, remove fall 2022 or later + +# Intentionally omits one deprecated and one optional method of `importlib.abc.MetaPathFinder` +class _MetaPathFinder(Protocol): + def find_spec( + self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = ... + ) -> ModuleSpec | None: ... + +# ----- sys variables ----- +if sys.platform != "win32": + abiflags: str +argv: list[str] +base_exec_prefix: str +base_prefix: str +byteorder: Literal["little", "big"] +builtin_module_names: Sequence[str] # actually a tuple of strings +copyright: str +if sys.platform == "win32": + dllhandle: int +dont_write_bytecode: bool +displayhook: Callable[[object], Any] +excepthook: Callable[[type[BaseException], BaseException, TracebackType | None], Any] +exec_prefix: str +executable: str +float_repr_style: Literal["short", "legacy"] +hexversion: int +last_type: type[BaseException] | None +last_value: BaseException | None +last_traceback: TracebackType | None +maxsize: int +maxunicode: int +meta_path: list[_MetaPathFinder] +modules: dict[str, ModuleType] +if sys.version_info >= (3, 10): + orig_argv: list[str] +path: list[str] +path_hooks: list[Callable[[str], PathEntryFinder]] +path_importer_cache: dict[str, PathEntryFinder | None] +platform: str +if sys.version_info >= (3, 9): + platlibdir: str +prefix: str +if sys.version_info >= (3, 8): + pycache_prefix: str | None +ps1: object +ps2: object +stdin: TextIO +stdout: TextIO +stderr: TextIO +if sys.version_info >= (3, 10): + stdlib_module_names: frozenset[str] +__stdin__: TextIOWrapper +__stdout__: TextIOWrapper +__stderr__: TextIOWrapper +tracebacklimit: int +version: str +api_version: int +warnoptions: Any +# Each entry is a tuple of the form (action, message, category, module, +# lineno) +if sys.platform == "win32": + winver: str +_xoptions: dict[Any, Any] + +# Type alias used as a mixin for structseq classes that cannot be instantiated at runtime +# This can't be represented in the type system, so we just use `structseq[Any]` +_UninstantiableStructseq: TypeAlias = structseq[Any] + +flags: _flags + +if sys.version_info >= (3, 10): + _FlagTuple: TypeAlias = tuple[ + int, int, int, int, int, int, int, int, int, int, int, int, int, bool, int, int + ] +else: + _FlagTuple: TypeAlias = tuple[ + int, int, int, int, int, int, int, int, int, int, int, int, int, bool, int + ] + +@final +class _flags(_UninstantiableStructseq, _FlagTuple): + @property + def debug(self) -> int: ... + @property + def inspect(self) -> int: ... + @property + def interactive(self) -> int: ... + @property + def optimize(self) -> int: ... + @property + def dont_write_bytecode(self) -> int: ... + @property + def no_user_site(self) -> int: ... + @property + def no_site(self) -> int: ... + @property + def ignore_environment(self) -> int: ... + @property + def verbose(self) -> int: ... + @property + def bytes_warning(self) -> int: ... + @property + def quiet(self) -> int: ... + @property + def hash_randomization(self) -> int: ... + @property + def isolated(self) -> int: ... + @property + def dev_mode(self) -> bool: ... + @property + def utf8_mode(self) -> int: ... + if sys.version_info >= (3, 10): + @property + def warn_default_encoding(self) -> int: ... # undocumented + if sys.version_info >= (3, 11): + @property + def safe_path(self) -> bool: ... + +float_info: _float_info + +@final +class _float_info( + structseq[float], tuple[float, int, int, float, int, int, int, int, float, int, int] +): + @property + def max(self) -> float: ... # DBL_MAX + @property + def max_exp(self) -> int: ... # DBL_MAX_EXP + @property + def max_10_exp(self) -> int: ... # DBL_MAX_10_EXP + @property + def min(self) -> float: ... # DBL_MIN + @property + def min_exp(self) -> int: ... # DBL_MIN_EXP + @property + def min_10_exp(self) -> int: ... # DBL_MIN_10_EXP + @property + def dig(self) -> int: ... # DBL_DIG + @property + def mant_dig(self) -> int: ... # DBL_MANT_DIG + @property + def epsilon(self) -> float: ... # DBL_EPSILON + @property + def radix(self) -> int: ... # FLT_RADIX + @property + def rounds(self) -> int: ... # FLT_ROUNDS + +hash_info: _hash_info + +@final +class _hash_info(structseq[Any | int], tuple[int, int, int, int, int, str, int, int, int]): + @property + def width(self) -> int: ... + @property + def modulus(self) -> int: ... + @property + def inf(self) -> int: ... + @property + def nan(self) -> int: ... + @property + def imag(self) -> int: ... + @property + def algorithm(self) -> str: ... + @property + def hash_bits(self) -> int: ... + @property + def seed_bits(self) -> int: ... + @property + def cutoff(self) -> int: ... # undocumented + +implementation: _implementation + +class _implementation: + name: str + version: _version_info + hexversion: int + cache_tag: str + # Define __getattr__, as the documentation states: + # > sys.implementation may contain additional attributes specific to the Python implementation. + # > These non-standard attributes must start with an underscore, and are not described here. + def __getattr__(self, name: str) -> Any: ... + +int_info: _int_info + +@final +class _int_info(structseq[int], tuple[int, int, int, int]): + @property + def bits_per_digit(self) -> int: ... + @property + def sizeof_digit(self) -> int: ... + @property + def default_max_str_digits(self) -> int: ... + @property + def str_digits_check_threshold(self) -> int: ... + +@final +class _version_info(_UninstantiableStructseq, tuple[int, int, int, str, int]): + @property + def major(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... + @property + def serial(self) -> int: ... + +version_info: _version_info + +def call_tracing(__func: Callable[..., _T], __args: Any) -> _T: ... +def _clear_type_cache() -> None: ... +def _current_frames() -> dict[int, FrameType]: ... +def _getframe(__depth: int = ...) -> FrameType: ... +def _debugmallocstats() -> None: ... +def __displayhook__(__value: object) -> None: ... +def __excepthook__( + __exctype: type[BaseException], __value: BaseException, __traceback: TracebackType | None +) -> None: ... +def exc_info() -> OptExcInfo: ... + +if sys.version_info >= (3, 11): + def exception() -> BaseException | None: ... + +def exit(__status: _ExitCode = ...) -> NoReturn: ... +def getallocatedblocks() -> int: ... +def getdefaultencoding() -> str: ... + +if sys.platform != "win32": + def getdlopenflags() -> int: ... + +def getfilesystemencoding() -> str: ... +def getfilesystemencodeerrors() -> str: ... +def getrefcount(__object: Any) -> int: ... +def getrecursionlimit() -> int: ... +@overload +def getsizeof(obj: object) -> int: ... +@overload +def getsizeof(obj: object, default: int) -> int: ... +def getswitchinterval() -> float: ... +def getprofile() -> ProfileFunction | None: ... +def setprofile(profilefunc: ProfileFunction | None) -> None: ... +def gettrace() -> TraceFunction | None: ... +def settrace(tracefunc: TraceFunction | None) -> None: ... + +if sys.platform == "win32": + # A tuple of length 5, even though it has more than 5 attributes. + @final + class _WinVersion(_UninstantiableStructseq, tuple[int, int, int, int, str]): + @property + def major(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def build(self) -> int: ... + @property + def platform(self) -> int: ... + @property + def service_pack(self) -> str: ... + @property + def service_pack_minor(self) -> int: ... + @property + def service_pack_major(self) -> int: ... + @property + def suite_mask(self) -> int: ... + @property + def product_type(self) -> int: ... + @property + def platform_version(self) -> tuple[int, int, int]: ... + + def getwindowsversion() -> _WinVersion: ... + +def intern(__string: str) -> str: ... +def is_finalizing() -> bool: ... + +__breakpointhook__: Any # contains the original value of breakpointhook + +def breakpointhook(*args: Any, **kwargs: Any) -> Any: ... + +if sys.platform != "win32": + def setdlopenflags(__flags: int) -> None: ... + +def setrecursionlimit(__limit: int) -> None: ... +def setswitchinterval(__interval: float) -> None: ... +def gettotalrefcount() -> int: ... # Debug builds only + +if sys.version_info < (3, 9): + def getcheckinterval() -> int: ... # deprecated + def setcheckinterval(__n: int) -> None: ... # deprecated + +if sys.version_info < (3, 9): + # An 11-tuple or None + def callstats() -> tuple[int, int, int, int, int, int, int, int, int, int, int] | None: ... + +if sys.version_info >= (3, 8): + # Doesn't exist at runtime, but exported in the stubs so pytest etc. can annotate their code more easily. + class UnraisableHookArgs: + exc_type: type[BaseException] + exc_value: BaseException | None + exc_traceback: TracebackType | None + err_msg: str | None + object: _object | None + unraisablehook: Callable[[UnraisableHookArgs], Any] + def __unraisablehook__(__unraisable: UnraisableHookArgs) -> Any: ... + def addaudithook(hook: Callable[[str, tuple[Any, ...]], Any]) -> None: ... + def audit(__event: str, *args: Any) -> None: ... + +_AsyncgenHook: TypeAlias = Callable[[AsyncGenerator[Any, Any]], None] | None + +@final +class _asyncgen_hooks(structseq[_AsyncgenHook], tuple[_AsyncgenHook, _AsyncgenHook]): + @property + def firstiter(self) -> _AsyncgenHook: ... + @property + def finalizer(self) -> _AsyncgenHook: ... + +def get_asyncgen_hooks() -> _asyncgen_hooks: ... +def set_asyncgen_hooks(firstiter: _AsyncgenHook = ..., finalizer: _AsyncgenHook = ...) -> None: ... + +if sys.platform == "win32": + def _enablelegacywindowsfsencoding() -> None: ... + +def get_coroutine_origin_tracking_depth() -> int: ... +def set_coroutine_origin_tracking_depth(depth: int) -> None: ... + +if sys.version_info < (3, 8): + _CoroWrapper: TypeAlias = Callable[[Coroutine[Any, Any, Any]], Any] + def set_coroutine_wrapper(__wrapper: _CoroWrapper) -> None: ... + def get_coroutine_wrapper() -> _CoroWrapper: ... + +# The following two functions were added in 3.11.0, 3.10.7, 3.9.14, 3.8.14, & 3.7.14, +# as part of the response to CVE-2020-10735 +def set_int_max_str_digits(maxdigits: int) -> None: ... +def get_int_max_str_digits() -> int: ... + +# MicroPython specific functions +# Copyright (c) 2023 Jos Verlinde + +from typing import Optional +from _typeshed import Incomplete +def atexit(func:Optional[Callable[[],Any]]) -> Optional[Callable[[],Any]]: + """\ + Register func to be called upon termination. func must be a callable that takes no arguments, + or None to disable the call. The atexit function will return the previous value set by this function, + which is initially None. + + Ports: Unix, Windows + """ + ... + +def print_exception(exc, file=sys.stdout, /): + """Print exception with a traceback to a file-like object file (or sys.stdout by default).""" + ... \ No newline at end of file diff --git a/.vscode/Pico-W-Stub/stdlib/types.pyi b/.vscode/Pico-W-Stub/stdlib/types.pyi new file mode 100644 index 0000000..ba2b245 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/types.pyi @@ -0,0 +1,631 @@ +import sys +from collections.abc import ( + AsyncGenerator, + Awaitable, + Callable, + Coroutine, + Generator, + ItemsView, + Iterable, + Iterator, + KeysView, + MutableSequence, + ValuesView, +) +from importlib.machinery import ModuleSpec # type: ignore + +# pytype crashes if types.MappingProxyType inherits from collections.abc.Mapping instead of typing.Mapping +from typing import Any, ClassVar, Generic, Mapping, Protocol, TypeVar, overload # noqa: Y027 + +from _typeshed import SupportsKeysAndGetItem +from typing_extensions import Literal, ParamSpec, final + +__all__ = [ + "FunctionType", + "LambdaType", + "CodeType", + "MappingProxyType", + "SimpleNamespace", + "GeneratorType", + "CoroutineType", + "AsyncGeneratorType", + "MethodType", + "BuiltinFunctionType", + "ModuleType", + "TracebackType", + "FrameType", + "GetSetDescriptorType", + "MemberDescriptorType", + "new_class", + "prepare_class", + "DynamicClassAttribute", + "coroutine", + "BuiltinMethodType", + "ClassMethodDescriptorType", + "MethodDescriptorType", + "MethodWrapperType", + "WrapperDescriptorType", + "resolve_bases", +] + +if sys.version_info >= (3, 8): + __all__ += ["CellType"] + +if sys.version_info >= (3, 9): + __all__ += ["GenericAlias"] + +if sys.version_info >= (3, 10): + __all__ += ["EllipsisType", "NoneType", "NotImplementedType", "UnionType"] + +# Note, all classes "defined" here require special handling. + +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T_co = TypeVar("_T_co", covariant=True) +_T_contra = TypeVar("_T_contra", contravariant=True) +_KT = TypeVar("_KT") +_VT_co = TypeVar("_VT_co", covariant=True) +_V_co = TypeVar("_V_co", covariant=True) + +@final +class _Cell: + __hash__: ClassVar[None] # type: ignore[assignment] + cell_contents: Any + +# Make sure this class definition stays roughly in line with `builtins.function` +@final +class FunctionType: + @property + def __closure__(self) -> tuple[_Cell, ...] | None: ... + __code__: CodeType + __defaults__: tuple[Any, ...] | None + __dict__: dict[str, Any] + @property + def __globals__(self) -> dict[str, Any]: ... + __name__: str + __qualname__: str + __annotations__: dict[str, Any] + __kwdefaults__: dict[str, Any] + if sys.version_info >= (3, 10): + @property + def __builtins__(self) -> dict[str, Any]: ... + + __module__: str + def __init__( + self, + code: CodeType, + globals: dict[str, Any], + name: str | None = ..., + argdefs: tuple[object, ...] | None = ..., + closure: tuple[_Cell, ...] | None = ..., + ) -> None: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + @overload + def __get__(self, obj: None, type: type) -> FunctionType: ... + @overload + def __get__(self, obj: object, type: type | None = ...) -> MethodType: ... + +LambdaType = FunctionType + +@final +class CodeType: + @property + def co_argcount(self) -> int: ... + if sys.version_info >= (3, 8): + @property + def co_posonlyargcount(self) -> int: ... + + @property + def co_kwonlyargcount(self) -> int: ... + @property + def co_nlocals(self) -> int: ... + @property + def co_stacksize(self) -> int: ... + @property + def co_flags(self) -> int: ... + @property + def co_code(self) -> bytes: ... + @property + def co_consts(self) -> tuple[Any, ...]: ... + @property + def co_names(self) -> tuple[str, ...]: ... + @property + def co_varnames(self) -> tuple[str, ...]: ... + @property + def co_filename(self) -> str: ... + @property + def co_name(self) -> str: ... + @property + def co_firstlineno(self) -> int: ... + @property + def co_lnotab(self) -> bytes: ... + @property + def co_freevars(self) -> tuple[str, ...]: ... + @property + def co_cellvars(self) -> tuple[str, ...]: ... + if sys.version_info >= (3, 10): + @property + def co_linetable(self) -> bytes: ... + def co_lines(self) -> Iterator[tuple[int, int, int | None]]: ... + if sys.version_info >= (3, 11): + @property + def co_exceptiontable(self) -> bytes: ... + @property + def co_qualname(self) -> str: ... + def co_positions( + self, + ) -> Iterable[tuple[int | None, int | None, int | None, int | None]]: ... + + if sys.version_info >= (3, 11): + def __init__( + self, + __argcount: int, + __posonlyargcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __qualname: str, + __firstlineno: int, + __linetable: bytes, + __exceptiontable: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + elif sys.version_info >= (3, 10): + def __init__( + self, + __argcount: int, + __posonlyargcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __firstlineno: int, + __linetable: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + elif sys.version_info >= (3, 8): + def __init__( + self, + __argcount: int, + __posonlyargcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __firstlineno: int, + __lnotab: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + else: + def __init__( + self, + __argcount: int, + __kwonlyargcount: int, + __nlocals: int, + __stacksize: int, + __flags: int, + __codestring: bytes, + __constants: tuple[object, ...], + __names: tuple[str, ...], + __varnames: tuple[str, ...], + __filename: str, + __name: str, + __firstlineno: int, + __lnotab: bytes, + __freevars: tuple[str, ...] = ..., + __cellvars: tuple[str, ...] = ..., + ) -> None: ... + if sys.version_info >= (3, 11): + def replace( + self, + *, + co_argcount: int = ..., + co_posonlyargcount: int = ..., + co_kwonlyargcount: int = ..., + co_nlocals: int = ..., + co_stacksize: int = ..., + co_flags: int = ..., + co_firstlineno: int = ..., + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_qualname: str = ..., + co_linetable: bytes = ..., + co_exceptiontable: bytes = ..., + ) -> CodeType: ... + elif sys.version_info >= (3, 10): + def replace( + self, + *, + co_argcount: int = ..., + co_posonlyargcount: int = ..., + co_kwonlyargcount: int = ..., + co_nlocals: int = ..., + co_stacksize: int = ..., + co_flags: int = ..., + co_firstlineno: int = ..., + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_linetable: bytes = ..., + ) -> CodeType: ... + elif sys.version_info >= (3, 8): + def replace( + self, + *, + co_argcount: int = ..., + co_posonlyargcount: int = ..., + co_kwonlyargcount: int = ..., + co_nlocals: int = ..., + co_stacksize: int = ..., + co_flags: int = ..., + co_firstlineno: int = ..., + co_code: bytes = ..., + co_consts: tuple[object, ...] = ..., + co_names: tuple[str, ...] = ..., + co_varnames: tuple[str, ...] = ..., + co_freevars: tuple[str, ...] = ..., + co_cellvars: tuple[str, ...] = ..., + co_filename: str = ..., + co_name: str = ..., + co_lnotab: bytes = ..., + ) -> CodeType: ... + +@final +class MappingProxyType(Mapping[_KT, _VT_co], Generic[_KT, _VT_co]): + __hash__: ClassVar[None] # type: ignore[assignment] + def __init__(self, mapping: SupportsKeysAndGetItem[_KT, _VT_co]) -> None: ... + def __getitem__(self, __key: _KT) -> _VT_co: ... + def __iter__(self) -> Iterator[_KT]: ... + def __len__(self) -> int: ... + def copy(self) -> dict[_KT, _VT_co]: ... + def keys(self) -> KeysView[_KT]: ... + def values(self) -> ValuesView[_VT_co]: ... + def items(self) -> ItemsView[_KT, _VT_co]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, item: Any) -> GenericAlias: ... + def __reversed__(self) -> Iterator[_KT]: ... + def __or__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT_co | _T2]: ... + def __ror__(self, __value: Mapping[_T1, _T2]) -> dict[_KT | _T1, _VT_co | _T2]: ... + +class SimpleNamespace: + __hash__: ClassVar[None] # type: ignore[assignment] + def __init__(self, **kwargs: Any) -> None: ... + def __getattribute__(self, __name: str) -> Any: ... + def __setattr__(self, __name: str, __value: Any) -> None: ... + def __delattr__(self, __name: str) -> None: ... + +class _LoaderProtocol(Protocol): + def load_module(self, fullname: str) -> ModuleType: ... + +class ModuleType: + __name__: str + __file__: str | None + @property + def __dict__(self) -> dict[str, Any]: ... # type: ignore[override] + __loader__: _LoaderProtocol | None + __package__: str | None + __path__: MutableSequence[str] + __spec__: ModuleSpec | None + def __init__(self, name: str, doc: str | None = ...) -> None: ... + # __getattr__ doesn't exist at runtime, + # but having it here in typeshed makes dynamic imports + # using `builtins.__import__` or `importlib.import_module` less painful + def __getattr__(self, name: str) -> Any: ... + +@final +class GeneratorType(Generator[_T_co, _T_contra, _V_co]): + @property + def gi_yieldfrom(self) -> GeneratorType[_T_co, _T_contra, Any] | None: ... + if sys.version_info >= (3, 11): + @property + def gi_suspended(self) -> bool: ... + __name__: str + __qualname__: str + def __iter__(self) -> GeneratorType[_T_co, _T_contra, _V_co]: ... + def __next__(self) -> _T_co: ... + def send(self, __arg: _T_contra) -> _T_co: ... + @overload + def throw( + self, + __typ: type[BaseException], + __val: BaseException | object = ..., + __tb: TracebackType | None = ..., + ) -> _T_co: ... + @overload + def throw( + self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... + +@final +class AsyncGeneratorType(AsyncGenerator[_T_co, _T_contra]): + @property + def ag_await(self) -> Awaitable[Any] | None: ... + __name__: str + __qualname__: str + def __aiter__(self) -> AsyncGeneratorType[_T_co, _T_contra]: ... + def __anext__(self) -> Coroutine[Any, Any, _T_co]: ... + def asend(self, __val: _T_contra) -> Coroutine[Any, Any, _T_co]: ... + @overload + async def athrow( + self, + __typ: type[BaseException], + __val: BaseException | object = ..., + __tb: TracebackType | None = ..., + ) -> _T_co: ... + @overload + async def athrow( + self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... + def aclose(self) -> Coroutine[Any, Any, None]: ... + if sys.version_info >= (3, 9): + def __class_getitem__(cls, __item: Any) -> GenericAlias: ... + +@final +class CoroutineType(Coroutine[_T_co, _T_contra, _V_co]): + __name__: str + __qualname__: str + @property + def cr_origin(self) -> tuple[tuple[str, int, str], ...] | None: ... + if sys.version_info >= (3, 11): + @property + def cr_suspended(self) -> bool: ... + + def close(self) -> None: ... + def __await__(self) -> Generator[Any, None, _V_co]: ... + def send(self, __arg: _T_contra) -> _T_co: ... + @overload + def throw( + self, + __typ: type[BaseException], + __val: BaseException | object = ..., + __tb: TracebackType | None = ..., + ) -> _T_co: ... + @overload + def throw( + self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... + +class _StaticFunctionType: + # Fictional type to correct the type of MethodType.__func__. + # FunctionType is a descriptor, so mypy follows the descriptor protocol and + # converts MethodType.__func__ back to MethodType (the return type of + # FunctionType.__get__). But this is actually a special case; MethodType is + # implemented in C and its attribute access doesn't go through + # __getattribute__. + # By wrapping FunctionType in _StaticFunctionType, we get the right result; + # similar to wrapping a function in staticmethod() at runtime to prevent it + # being bound as a method. + def __get__(self, obj: object | None, type: type | None) -> FunctionType: ... + +@final +class MethodType: + @property + def __closure__(self) -> tuple[_Cell, ...] | None: ... # inherited from the added function + @property + def __defaults__(self) -> tuple[Any, ...] | None: ... # inherited from the added function + @property + def __func__(self) -> _StaticFunctionType: ... + @property + def __self__(self) -> object: ... + @property + def __name__(self) -> str: ... # inherited from the added function + @property + def __qualname__(self) -> str: ... # inherited from the added function + def __init__(self, __func: Callable[..., Any], __obj: object) -> None: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +@final +class BuiltinFunctionType: + @property + def __self__(self) -> object | ModuleType: ... + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + +BuiltinMethodType = BuiltinFunctionType + +@final +class WrapperDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, __obj: Any, __type: type = ...) -> Any: ... + +@final +class MethodWrapperType: + @property + def __self__(self) -> object: ... + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __eq__(self, __other: object) -> bool: ... + def __ne__(self, __other: object) -> bool: ... + +@final +class MethodDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, obj: Any, type: type = ...) -> Any: ... + +@final +class ClassMethodDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def __get__(self, obj: Any, type: type = ...) -> Any: ... + +@final +class TracebackType: + def __init__( + self, tb_next: TracebackType | None, tb_frame: FrameType, tb_lasti: int, tb_lineno: int + ) -> None: ... + tb_next: TracebackType | None + # the rest are read-only even in 3.7 + @property + def tb_frame(self) -> FrameType: ... + @property + def tb_lasti(self) -> int: ... + @property + def tb_lineno(self) -> int: ... + +@final +class FrameType: + @property + def f_back(self) -> FrameType | None: ... + @property + def f_builtins(self) -> dict[str, Any]: ... + @property + def f_code(self) -> CodeType: ... + @property + def f_globals(self) -> dict[str, Any]: ... + @property + def f_lasti(self) -> int: ... + # see discussion in #6769: f_lineno *can* sometimes be None, + # but you should probably file a bug report with CPython if you encounter it being None in the wild. + # An `int | None` annotation here causes too many false-positive errors. + @property + def f_lineno(self) -> int | Any: ... + @property + def f_locals(self) -> dict[str, Any]: ... + f_trace: Callable[[FrameType, str, Any], Any] | None + f_trace_lines: bool + f_trace_opcodes: bool + def clear(self) -> None: ... + +@final +class GetSetDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __get__(self, __obj: Any, __type: type = ...) -> Any: ... + def __set__(self, __instance: Any, __value: Any) -> None: ... + def __delete__(self, __obj: Any) -> None: ... + +@final +class MemberDescriptorType: + @property + def __name__(self) -> str: ... + @property + def __qualname__(self) -> str: ... + @property + def __objclass__(self) -> type: ... + def __get__(self, __obj: Any, __type: type = ...) -> Any: ... + def __set__(self, __instance: Any, __value: Any) -> None: ... + def __delete__(self, __obj: Any) -> None: ... + +def new_class( + name: str, + bases: Iterable[object] = ..., + kwds: dict[str, Any] | None = ..., + exec_body: Callable[[dict[str, Any]], object] | None = ..., +) -> type: ... +def resolve_bases(bases: Iterable[object]) -> tuple[Any, ...]: ... +def prepare_class( + name: str, bases: tuple[type, ...] = ..., kwds: dict[str, Any] | None = ... +) -> tuple[type, dict[str, Any], dict[str, Any]]: ... + +# Actually a different type, but `property` is special and we want that too. +DynamicClassAttribute = property + +_Fn = TypeVar("_Fn", bound=Callable[..., object]) +_R = TypeVar("_R") +_P = ParamSpec("_P") + +# it's not really an Awaitable, but can be used in an await expression. Real type: Generator & Awaitable +# The type: ignore is due to overlapping overloads, not the use of ParamSpec +@overload +def coroutine(func: Callable[_P, Generator[_R, Any, Any]]) -> Callable[_P, Awaitable[_R]]: ... # type: ignore[misc] +@overload +def coroutine(func: _Fn) -> _Fn: ... + +if sys.version_info >= (3, 8): + CellType = _Cell + +if sys.version_info >= (3, 9): + class GenericAlias: + @property + def __origin__(self) -> type: ... + @property + def __args__(self) -> tuple[Any, ...]: ... + @property + def __parameters__(self) -> tuple[Any, ...]: ... + def __init__(self, origin: type, args: Any) -> None: ... + def __getitem__(self, __typeargs: Any) -> GenericAlias: ... + if sys.version_info >= (3, 11): + @property + def __unpacked__(self) -> bool: ... + @property + def __typing_unpacked_tuple_args__(self) -> tuple[Any, ...] | None: ... + + # GenericAlias delegates attr access to `__origin__` + def __getattr__(self, name: str) -> Any: ... + +if sys.version_info >= (3, 10): + @final + class NoneType: + def __bool__(self) -> Literal[False]: ... + EllipsisType = ellipsis # noqa: F821 from builtins + from builtins import _NotImplementedType + + NotImplementedType = _NotImplementedType + @final + class UnionType: + @property + def __args__(self) -> tuple[Any, ...]: ... + def __or__(self, __obj: Any) -> UnionType: ... + def __ror__(self, __obj: Any) -> UnionType: ... diff --git a/.vscode/Pico-W-Stub/stdlib/typing.pyi b/.vscode/Pico-W-Stub/stdlib/typing.pyi new file mode 100644 index 0000000..cc27ae7 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/typing.pyi @@ -0,0 +1,840 @@ +import _typeshed +import collections # Needed by aliases like DefaultDict, see mypy issue 2986 +import sys +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import IdentityFunction, Incomplete, SupportsKeysAndGetItem +from abc import ABCMeta, abstractmethod +from contextlib import AbstractAsyncContextManager, AbstractContextManager +from re import Match as Match, Pattern as Pattern +from types import ( + BuiltinFunctionType, + CodeType, + FrameType, + FunctionType, + MethodDescriptorType, + MethodType, + MethodWrapperType, + ModuleType, + TracebackType, + WrapperDescriptorType, +) +from typing_extensions import Never as _Never, ParamSpec as _ParamSpec, final as _final + +__all__ = [ + "AbstractSet", + "Any", + "AnyStr", + "AsyncContextManager", + "AsyncGenerator", + "AsyncIterable", + "AsyncIterator", + "Awaitable", + "ByteString", + "Callable", + "ChainMap", + "ClassVar", + "Collection", + "Container", + "ContextManager", + "Coroutine", + "Counter", + "DefaultDict", + "Deque", + "Dict", + "FrozenSet", + "Generator", + "Generic", + "Hashable", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "List", + "Mapping", + "MappingView", + "MutableMapping", + "MutableSequence", + "MutableSet", + "NamedTuple", + "NewType", + "Optional", + "Reversible", + "Sequence", + "Set", + "Sized", + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsInt", + "SupportsRound", + "Text", + "Tuple", + "Type", + "TypeVar", + "Union", + "ValuesView", + "TYPE_CHECKING", + "cast", + "get_type_hints", + "no_type_check", + "no_type_check_decorator", + "overload", + "ForwardRef", + "NoReturn", + "OrderedDict", +] + +if sys.version_info >= (3, 8): + __all__ += [ + "Final", + "Literal", + "Protocol", + "SupportsIndex", + "TypedDict", + "final", + "get_args", + "get_origin", + "runtime_checkable", + ] + +if sys.version_info >= (3, 9): + __all__ += ["Annotated", "BinaryIO", "IO", "Match", "Pattern", "TextIO"] + +if sys.version_info >= (3, 10): + __all__ += ["Concatenate", "ParamSpec", "ParamSpecArgs", "ParamSpecKwargs", "TypeAlias", "TypeGuard", "is_typeddict"] + +if sys.version_info >= (3, 11): + __all__ += [ + "LiteralString", + "Never", + "NotRequired", + "Required", + "Self", + "TypeVarTuple", + "Unpack", + "assert_never", + "assert_type", + "clear_overloads", + "dataclass_transform", + "get_overloads", + "reveal_type", + ] + +ContextManager = AbstractContextManager +AsyncContextManager = AbstractAsyncContextManager + +# This itself is only available during type checking +def type_check_only(func_or_cls: _F) -> _F: ... + +Any = object() + +@_final +class TypeVar: + __name__: str + __bound__: Any | None + __constraints__: tuple[Any, ...] + __covariant__: bool + __contravariant__: bool + def __init__( + self, name: str, *constraints: Any, bound: Any | None = ..., covariant: bool = ..., contravariant: bool = ... + ) -> None: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Incomplete) -> Incomplete: ... + +# Used for an undocumented mypy feature. Does not exist at runtime. +_promote = object() + +# N.B. Keep this definition in sync with typing_extensions._SpecialForm +@_final +class _SpecialForm: + def __getitem__(self, parameters: Any) -> object: ... + if sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +_F = TypeVar("_F", bound=Callable[..., Any]) +_P = _ParamSpec("_P") +_T = TypeVar("_T") + +def overload(func: _F) -> _F: ... + +# Unlike the vast majority module-level objects in stub files, +# these `_SpecialForm` objects in typing need the default value `= ...`, +# due to the fact that they are used elswhere in the same file. +# Otherwise, flake8 erroneously flags them as undefined. +# `_SpecialForm` objects in typing.py that are not used elswhere in the same file +# do not need the default value assignment. +Union: _SpecialForm = ... +Generic: _SpecialForm = ... +# Protocol is only present in 3.8 and later, but mypy needs it unconditionally +Protocol: _SpecialForm = ... +Callable: _SpecialForm = ... +Type: _SpecialForm = ... +NoReturn: _SpecialForm = ... +ClassVar: _SpecialForm = ... + +Optional: _SpecialForm +Tuple: _SpecialForm +if sys.version_info >= (3, 8): + Final: _SpecialForm + def final(f: _T) -> _T: ... + Literal: _SpecialForm + # TypedDict is a (non-subscriptable) special form. + TypedDict: object + +if sys.version_info >= (3, 11): + Self: _SpecialForm + Never: _SpecialForm = ... + Unpack: _SpecialForm + Required: _SpecialForm + NotRequired: _SpecialForm + LiteralString: _SpecialForm + + class TypeVarTuple: + __name__: str + def __init__(self, name: str) -> None: ... + def __iter__(self) -> Any: ... + def __typing_subst__(self, arg: Never) -> Never: ... + def __typing_prepare_subst__(self, alias: Incomplete, args: Incomplete) -> Incomplete: ... + +if sys.version_info >= (3, 10): + class ParamSpecArgs: + __origin__: ParamSpec + def __init__(self, origin: ParamSpec) -> None: ... + + class ParamSpecKwargs: + __origin__: ParamSpec + def __init__(self, origin: ParamSpec) -> None: ... + + class ParamSpec: + __name__: str + __bound__: Any | None + __covariant__: bool + __contravariant__: bool + def __init__(self, name: str, *, bound: Any | None = ..., contravariant: bool = ..., covariant: bool = ...) -> None: ... + @property + def args(self) -> ParamSpecArgs: ... + @property + def kwargs(self) -> ParamSpecKwargs: ... + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Incomplete) -> Incomplete: ... + def __typing_prepare_subst__(self, alias: Incomplete, args: Incomplete) -> Incomplete: ... + + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + Concatenate: _SpecialForm + TypeAlias: _SpecialForm + TypeGuard: _SpecialForm + + class NewType: + def __init__(self, name: str, tp: Any) -> None: ... + def __call__(self, x: _T) -> _T: ... + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + __supertype__: type + +else: + def NewType(name: str, tp: Any) -> Any: ... + +# These type variables are used by the container types. +_S = TypeVar("_S") +_KT = TypeVar("_KT") # Key type. +_VT = TypeVar("_VT") # Value type. +_T_co = TypeVar("_T_co", covariant=True) # Any type covariant containers. +_V_co = TypeVar("_V_co", covariant=True) # Any type covariant containers. +_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers. +_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers. +_T_contra = TypeVar("_T_contra", contravariant=True) # Ditto contravariant. +_TC = TypeVar("_TC", bound=Type[object]) + +def no_type_check(arg: _F) -> _F: ... +def no_type_check_decorator(decorator: Callable[_P, _T]) -> Callable[_P, _T]: ... # type: ignore[misc] + +# Type aliases and type constructors + +class _Alias: + # Class for defining generic aliases for library types. + def __getitem__(self, typeargs: Any) -> Any: ... + +List = _Alias() +Dict = _Alias() +DefaultDict = _Alias() +Set = _Alias() +FrozenSet = _Alias() +Counter = _Alias() +Deque = _Alias() +ChainMap = _Alias() + +OrderedDict = _Alias() + +if sys.version_info >= (3, 9): + Annotated: _SpecialForm + +# Predefined type variables. +AnyStr = TypeVar("AnyStr", str, bytes) # noqa: Y001 + +# Technically in 3.7 this inherited from GenericMeta. But let's not reflect that, since +# type checkers tend to assume that Protocols all have the ABCMeta metaclass. +class _ProtocolMeta(ABCMeta): ... + +# Abstract base classes. + +def runtime_checkable(cls: _TC) -> _TC: ... +@runtime_checkable +class SupportsInt(Protocol, metaclass=ABCMeta): + @abstractmethod + def __int__(self) -> int: ... + +@runtime_checkable +class SupportsFloat(Protocol, metaclass=ABCMeta): + @abstractmethod + def __float__(self) -> float: ... + +@runtime_checkable +class SupportsComplex(Protocol, metaclass=ABCMeta): + @abstractmethod + def __complex__(self) -> complex: ... + +@runtime_checkable +class SupportsBytes(Protocol, metaclass=ABCMeta): + @abstractmethod + def __bytes__(self) -> bytes: ... + +if sys.version_info >= (3, 8): + @runtime_checkable + class SupportsIndex(Protocol, metaclass=ABCMeta): + @abstractmethod + def __index__(self) -> int: ... + +@runtime_checkable +class SupportsAbs(Protocol[_T_co]): + @abstractmethod + def __abs__(self) -> _T_co: ... + +@runtime_checkable +class SupportsRound(Protocol[_T_co]): + @overload + @abstractmethod + def __round__(self) -> int: ... + @overload + @abstractmethod + def __round__(self, __ndigits: int) -> _T_co: ... + +@runtime_checkable +class Sized(Protocol): + @abstractmethod + def __len__(self) -> int: ... + +@runtime_checkable +class Hashable(Protocol, metaclass=ABCMeta): + # TODO: This is special, in that a subclass of a hashable class may not be hashable + # (for example, list vs. object). It's not obvious how to represent this. This class + # is currently mostly useless for static checking. + @abstractmethod + def __hash__(self) -> int: ... + +@runtime_checkable +class Iterable(Protocol[_T_co]): + @abstractmethod + def __iter__(self) -> Iterator[_T_co]: ... + +@runtime_checkable +class Iterator(Iterable[_T_co], Protocol[_T_co]): + @abstractmethod + def __next__(self) -> _T_co: ... + def __iter__(self) -> Iterator[_T_co]: ... + +@runtime_checkable +class Reversible(Iterable[_T_co], Protocol[_T_co]): + @abstractmethod + def __reversed__(self) -> Iterator[_T_co]: ... + +class Generator(Iterator[_T_co], Generic[_T_co, _T_contra, _V_co]): + def __next__(self) -> _T_co: ... + @abstractmethod + def send(self, __value: _T_contra) -> _T_co: ... + @overload + @abstractmethod + def throw( + self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... + @overload + @abstractmethod + def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + def close(self) -> None: ... + def __iter__(self) -> Generator[_T_co, _T_contra, _V_co]: ... + @property + def gi_code(self) -> CodeType: ... + @property + def gi_frame(self) -> FrameType: ... + @property + def gi_running(self) -> bool: ... + @property + def gi_yieldfrom(self) -> Generator[Any, Any, Any] | None: ... + +@runtime_checkable +class Awaitable(Protocol[_T_co]): + @abstractmethod + def __await__(self) -> Generator[Any, None, _T_co]: ... + +class Coroutine(Awaitable[_V_co], Generic[_T_co, _T_contra, _V_co]): + __name__: str + __qualname__: str + @property + def cr_await(self) -> Any | None: ... + @property + def cr_code(self) -> CodeType: ... + @property + def cr_frame(self) -> FrameType: ... + @property + def cr_running(self) -> bool: ... + @abstractmethod + def send(self, __value: _T_contra) -> _T_co: ... + @overload + @abstractmethod + def throw( + self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + ) -> _T_co: ... + @overload + @abstractmethod + def throw(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> _T_co: ... + @abstractmethod + def close(self) -> None: ... + +# NOTE: This type does not exist in typing.py or PEP 484 but mypy needs it to exist. +# The parameters correspond to Generator, but the 4th is the original type. +@type_check_only +class AwaitableGenerator( + Awaitable[_V_co], Generator[_T_co, _T_contra, _V_co], Generic[_T_co, _T_contra, _V_co, _S], metaclass=ABCMeta +): ... + +@runtime_checkable +class AsyncIterable(Protocol[_T_co]): + @abstractmethod + def __aiter__(self) -> AsyncIterator[_T_co]: ... + +@runtime_checkable +class AsyncIterator(AsyncIterable[_T_co], Protocol[_T_co]): + @abstractmethod + def __anext__(self) -> Awaitable[_T_co]: ... + def __aiter__(self) -> AsyncIterator[_T_co]: ... + +class AsyncGenerator(AsyncIterator[_T_co], Generic[_T_co, _T_contra]): + def __anext__(self) -> Awaitable[_T_co]: ... + @abstractmethod + def asend(self, __value: _T_contra) -> Awaitable[_T_co]: ... + @overload + @abstractmethod + def athrow( + self, __typ: Type[BaseException], __val: BaseException | object = ..., __tb: TracebackType | None = ... + ) -> Awaitable[_T_co]: ... + @overload + @abstractmethod + def athrow(self, __typ: BaseException, __val: None = ..., __tb: TracebackType | None = ...) -> Awaitable[_T_co]: ... + def aclose(self) -> Awaitable[None]: ... + @property + def ag_await(self) -> Any: ... + @property + def ag_code(self) -> CodeType: ... + @property + def ag_frame(self) -> FrameType: ... + @property + def ag_running(self) -> bool: ... + +@runtime_checkable +class Container(Protocol[_T_co]): + # This is generic more on vibes than anything else + @abstractmethod + def __contains__(self, __x: object) -> bool: ... + +@runtime_checkable +class Collection(Sized, Iterable[_T_co], Container[_T_co], Protocol[_T_co]): ... + +class Sequence(Collection[_T_co], Reversible[_T_co], Generic[_T_co]): + @overload + @abstractmethod + def __getitem__(self, index: int) -> _T_co: ... + @overload + @abstractmethod + def __getitem__(self, index: slice) -> Sequence[_T_co]: ... + # Mixin methods + def index(self, value: Any, start: int = ..., stop: int = ...) -> int: ... + def count(self, value: Any) -> int: ... + def __contains__(self, value: object) -> bool: ... + def __iter__(self) -> Iterator[_T_co]: ... + def __reversed__(self) -> Iterator[_T_co]: ... + +class MutableSequence(Sequence[_T], Generic[_T]): + @abstractmethod + def insert(self, index: int, value: _T) -> None: ... + @overload + @abstractmethod + def __getitem__(self, index: int) -> _T: ... + @overload + @abstractmethod + def __getitem__(self, index: slice) -> MutableSequence[_T]: ... + @overload + @abstractmethod + def __setitem__(self, index: int, value: _T) -> None: ... + @overload + @abstractmethod + def __setitem__(self, index: slice, value: Iterable[_T]) -> None: ... + @overload + @abstractmethod + def __delitem__(self, index: int) -> None: ... + @overload + @abstractmethod + def __delitem__(self, index: slice) -> None: ... + # Mixin methods + def append(self, value: _T) -> None: ... + def clear(self) -> None: ... + def extend(self, values: Iterable[_T]) -> None: ... + def reverse(self) -> None: ... + def pop(self, index: int = ...) -> _T: ... + def remove(self, value: _T) -> None: ... + def __iadd__(self: _typeshed.Self, values: Iterable[_T]) -> _typeshed.Self: ... + +class AbstractSet(Collection[_T_co], Generic[_T_co]): + @abstractmethod + def __contains__(self, x: object) -> bool: ... + def _hash(self) -> int: ... + # Mixin methods + def __le__(self, other: AbstractSet[Any]) -> bool: ... + def __lt__(self, other: AbstractSet[Any]) -> bool: ... + def __gt__(self, other: AbstractSet[Any]) -> bool: ... + def __ge__(self, other: AbstractSet[Any]) -> bool: ... + def __and__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ... + def __or__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... + def __sub__(self, other: AbstractSet[Any]) -> AbstractSet[_T_co]: ... + def __xor__(self, other: AbstractSet[_T]) -> AbstractSet[_T_co | _T]: ... + def isdisjoint(self, other: Iterable[Any]) -> bool: ... + +class MutableSet(AbstractSet[_T], Generic[_T]): + @abstractmethod + def add(self, value: _T) -> None: ... + @abstractmethod + def discard(self, value: _T) -> None: ... + # Mixin methods + def clear(self) -> None: ... + def pop(self) -> _T: ... + def remove(self, value: _T) -> None: ... + def __ior__(self: _typeshed.Self, it: AbstractSet[_T]) -> _typeshed.Self: ... # type: ignore[override,misc] + def __iand__(self: _typeshed.Self, it: AbstractSet[Any]) -> _typeshed.Self: ... + def __ixor__(self: _typeshed.Self, it: AbstractSet[_T]) -> _typeshed.Self: ... # type: ignore[override,misc] + def __isub__(self: _typeshed.Self, it: AbstractSet[Any]) -> _typeshed.Self: ... + +class MappingView(Sized): + def __init__(self, mapping: Mapping[Any, Any]) -> None: ... # undocumented + def __len__(self) -> int: ... + +class ItemsView(MappingView, AbstractSet[tuple[_KT_co, _VT_co]], Generic[_KT_co, _VT_co]): + def __init__(self, mapping: Mapping[_KT_co, _VT_co]) -> None: ... # undocumented + def __and__(self, other: Iterable[Any]) -> set[tuple[_KT_co, _VT_co]]: ... + def __rand__(self, other: Iterable[_T]) -> set[_T]: ... + def __contains__(self, item: object) -> bool: ... + def __iter__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + if sys.version_info >= (3, 8): + def __reversed__(self) -> Iterator[tuple[_KT_co, _VT_co]]: ... + + def __or__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __ror__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __sub__(self, other: Iterable[Any]) -> set[tuple[_KT_co, _VT_co]]: ... + def __rsub__(self, other: Iterable[_T]) -> set[_T]: ... + def __xor__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + def __rxor__(self, other: Iterable[_T]) -> set[tuple[_KT_co, _VT_co] | _T]: ... + +class KeysView(MappingView, AbstractSet[_KT_co], Generic[_KT_co]): + def __init__(self, mapping: Mapping[_KT_co, Any]) -> None: ... # undocumented + def __and__(self, other: Iterable[Any]) -> set[_KT_co]: ... + def __rand__(self, other: Iterable[_T]) -> set[_T]: ... + def __contains__(self, key: object) -> bool: ... + def __iter__(self) -> Iterator[_KT_co]: ... + if sys.version_info >= (3, 8): + def __reversed__(self) -> Iterator[_KT_co]: ... + + def __or__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + def __ror__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + def __sub__(self, other: Iterable[Any]) -> set[_KT_co]: ... + def __rsub__(self, other: Iterable[_T]) -> set[_T]: ... + def __xor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + def __rxor__(self, other: Iterable[_T]) -> set[_KT_co | _T]: ... + +class ValuesView(MappingView, Collection[_VT_co], Generic[_VT_co]): + def __init__(self, mapping: Mapping[Any, _VT_co]) -> None: ... # undocumented + def __contains__(self, value: object) -> bool: ... + def __iter__(self) -> Iterator[_VT_co]: ... + if sys.version_info >= (3, 8): + def __reversed__(self) -> Iterator[_VT_co]: ... + +class Mapping(Collection[_KT], Generic[_KT, _VT_co]): + # TODO: We wish the key type could also be covariant, but that doesn't work, + # see discussion in https://github.com/python/typing/pull/273. + @abstractmethod + def __getitem__(self, __key: _KT) -> _VT_co: ... + # Mixin methods + @overload + def get(self, __key: _KT) -> _VT_co | None: ... + @overload + def get(self, __key: _KT, default: _VT_co | _T) -> _VT_co | _T: ... + def items(self) -> ItemsView[_KT, _VT_co]: ... + def keys(self) -> KeysView[_KT]: ... + def values(self) -> ValuesView[_VT_co]: ... + def __contains__(self, __o: object) -> bool: ... + +class MutableMapping(Mapping[_KT, _VT], Generic[_KT, _VT]): + @abstractmethod + def __setitem__(self, __key: _KT, __value: _VT) -> None: ... + @abstractmethod + def __delitem__(self, __key: _KT) -> None: ... + def clear(self) -> None: ... + @overload + def pop(self, __key: _KT) -> _VT: ... + @overload + def pop(self, __key: _KT, default: _VT | _T) -> _VT | _T: ... + def popitem(self) -> tuple[_KT, _VT]: ... + # This overload should be allowed only if the value type is compatible with None. + # Keep OrderedDict.setdefault in line with MutableMapping.setdefault, modulo positional-only differences. + @overload + def setdefault(self: MutableMapping[_KT, _T | None], __key: _KT) -> _T | None: ... + @overload + def setdefault(self, __key: _KT, __default: _VT) -> _VT: ... + # 'update' used to take a Union, but using overloading is better. + # The second overloaded type here is a bit too general, because + # Mapping[tuple[_KT, _VT], W] is a subclass of Iterable[tuple[_KT, _VT]], + # but will always have the behavior of the first overloaded type + # at runtime, leading to keys of a mix of types _KT and tuple[_KT, _VT]. + # We don't currently have any way of forcing all Mappings to use + # the first overload, but by using overloading rather than a Union, + # mypy will commit to using the first overload when the argument is + # known to be a Mapping with unknown type parameters, which is closer + # to the behavior we want. See mypy issue #1430. + # + # Various mapping classes have __ior__ methods that should be kept roughly in line with .update(): + # -- dict.__ior__ + # -- os._Environ.__ior__ + # -- collections.UserDict.__ior__ + # -- collections.ChainMap.__ior__ + # -- peewee.attrdict.__add__ + # -- peewee.attrdict.__iadd__ + # -- weakref.WeakValueDictionary.__ior__ + # -- weakref.WeakKeyDictionary.__ior__ + @overload + def update(self, __m: SupportsKeysAndGetItem[_KT, _VT], **kwargs: _VT) -> None: ... + @overload + def update(self, __m: Iterable[tuple[_KT, _VT]], **kwargs: _VT) -> None: ... + @overload + def update(self, **kwargs: _VT) -> None: ... + +Text = str + +TYPE_CHECKING: bool + +# In stubs, the arguments of the IO class are marked as positional-only. +# This differs from runtime, but better reflects the fact that in reality +# classes deriving from IO use different names for the arguments. +class IO(Iterator[AnyStr], Generic[AnyStr]): + # At runtime these are all abstract properties, + # but making them abstract in the stub is hugely disruptive, for not much gain. + # See #8726 + @property + def mode(self) -> str: ... + @property + def name(self) -> str: ... + @abstractmethod + def close(self) -> None: ... + @property + def closed(self) -> bool: ... + @abstractmethod + def fileno(self) -> int: ... + @abstractmethod + def flush(self) -> None: ... + @abstractmethod + def isatty(self) -> bool: ... + @abstractmethod + def read(self, __n: int = ...) -> AnyStr: ... + @abstractmethod + def readable(self) -> bool: ... + @abstractmethod + def readline(self, __limit: int = ...) -> AnyStr: ... + @abstractmethod + def readlines(self, __hint: int = ...) -> list[AnyStr]: ... + @abstractmethod + def seek(self, __offset: int, __whence: int = ...) -> int: ... + @abstractmethod + def seekable(self) -> bool: ... + @abstractmethod + def tell(self) -> int: ... + @abstractmethod + def truncate(self, __size: int | None = ...) -> int: ... + @abstractmethod + def writable(self) -> bool: ... + @abstractmethod + def write(self, __s: AnyStr) -> int: ... + @abstractmethod + def writelines(self, __lines: Iterable[AnyStr]) -> None: ... + @abstractmethod + def __next__(self) -> AnyStr: ... + @abstractmethod + def __iter__(self) -> Iterator[AnyStr]: ... + @abstractmethod + def __enter__(self) -> IO[AnyStr]: ... + @abstractmethod + def __exit__( + self, __t: Type[BaseException] | None, __value: BaseException | None, __traceback: TracebackType | None + ) -> None: ... + +class BinaryIO(IO[bytes]): + @abstractmethod + def __enter__(self) -> BinaryIO: ... + +class TextIO(IO[str]): + # See comment regarding the @properties in the `IO` class + @property + def buffer(self) -> BinaryIO: ... + @property + def encoding(self) -> str: ... + @property + def errors(self) -> str | None: ... + @property + def line_buffering(self) -> int: ... # int on PyPy, bool on CPython + @property + def newlines(self) -> Any: ... # None, str or tuple + @abstractmethod + def __enter__(self) -> TextIO: ... + +class ByteString(Sequence[int], metaclass=ABCMeta): ... + +# Functions + +_get_type_hints_obj_allowed_types = ( # noqa: Y026 # TODO: Use TypeAlias once mypy bugs are fixed + object + | Callable[..., Any] + | FunctionType + | BuiltinFunctionType + | MethodType + | ModuleType + | WrapperDescriptorType + | MethodWrapperType + | MethodDescriptorType +) + +if sys.version_info >= (3, 9): + def get_type_hints( + obj: _get_type_hints_obj_allowed_types, + globalns: dict[str, Any] | None = ..., + localns: dict[str, Any] | None = ..., + include_extras: bool = ..., + ) -> dict[str, Any]: ... + +else: + def get_type_hints( + obj: _get_type_hints_obj_allowed_types, globalns: dict[str, Any] | None = ..., localns: dict[str, Any] | None = ... + ) -> dict[str, Any]: ... + +if sys.version_info >= (3, 8): + def get_origin(tp: Any) -> Any | None: ... + def get_args(tp: Any) -> tuple[Any, ...]: ... + +@overload +def cast(typ: Type[_T], val: Any) -> _T: ... +@overload +def cast(typ: str, val: Any) -> Any: ... +@overload +def cast(typ: object, val: Any) -> Any: ... + +if sys.version_info >= (3, 11): + def reveal_type(__obj: _T) -> _T: ... + def assert_never(__arg: Never) -> Never: ... + def assert_type(__val: _T, __typ: Any) -> _T: ... + def clear_overloads() -> None: ... + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... + def dataclass_transform( + *, + eq_default: bool = ..., + order_default: bool = ..., + kw_only_default: bool = ..., + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = ..., + **kwargs: Any, + ) -> IdentityFunction: ... + +# Type constructors + +class NamedTuple(tuple[Any, ...]): + if sys.version_info < (3, 8): + _field_types: collections.OrderedDict[str, type] + elif sys.version_info < (3, 9): + _field_types: dict[str, type] + _field_defaults: dict[str, Any] + _fields: tuple[str, ...] + _source: str + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... + @overload + def __init__(self, typename: str, fields: None = ..., **kwargs: Any) -> None: ... + @classmethod + def _make(cls: Type[_T], iterable: Iterable[Any]) -> _T: ... + if sys.version_info >= (3, 8): + def _asdict(self) -> dict[str, Any]: ... + else: + def _asdict(self) -> collections.OrderedDict[str, Any]: ... + + def _replace(self: _typeshed.Self, **kwargs: Any) -> _typeshed.Self: ... + +# Internal mypy fallback type for all typed dicts (does not exist at runtime) +# N.B. Keep this mostly in sync with typing_extensions._TypedDict/mypy_extensions._TypedDict +@type_check_only +class _TypedDict(Mapping[str, object], metaclass=ABCMeta): + __total__: ClassVar[bool] + if sys.version_info >= (3, 9): + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] + def copy(self: _typeshed.Self) -> _typeshed.Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: _Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: _Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self: _T, __m: _T) -> None: ... + def __delitem__(self, k: _Never) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + if sys.version_info >= (3, 9): + def __or__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... + def __ior__(self: _typeshed.Self, __value: _typeshed.Self) -> _typeshed.Self: ... + +@_final +class ForwardRef: + __forward_arg__: str + __forward_code__: CodeType + __forward_evaluated__: bool + __forward_value__: Any | None + __forward_is_argument__: bool + __forward_is_class__: bool + __forward_module__: Any | None + if sys.version_info >= (3, 9): + # The module and is_class arguments were added in later Python 3.9 versions. + def __init__(self, arg: str, is_argument: bool = ..., module: Any | None = ..., *, is_class: bool = ...) -> None: ... + else: + def __init__(self, arg: str, is_argument: bool = ...) -> None: ... + + if sys.version_info >= (3, 9): + def _evaluate( + self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None, recursive_guard: frozenset[str] + ) -> Any | None: ... + else: + def _evaluate(self, globalns: dict[str, Any] | None, localns: dict[str, Any] | None) -> Any | None: ... + + def __eq__(self, other: object) -> bool: ... + if sys.version_info >= (3, 11): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +if sys.version_info >= (3, 10): + def is_typeddict(tp: object) -> bool: ... + +def _type_repr(obj: object) -> str: ... diff --git a/.vscode/Pico-W-Stub/stdlib/typing_extensions.pyi b/.vscode/Pico-W-Stub/stdlib/typing_extensions.pyi new file mode 100644 index 0000000..32be3c0 --- /dev/null +++ b/.vscode/Pico-W-Stub/stdlib/typing_extensions.pyi @@ -0,0 +1,495 @@ +import abc +import collections +import sys +import typing +from _collections_abc import dict_items, dict_keys, dict_values +from _typeshed import IdentityFunction, Incomplete +from typing import ( # noqa: Y022,Y037,Y038,Y039 + IO as IO, + TYPE_CHECKING as TYPE_CHECKING, + AbstractSet as AbstractSet, + Any as Any, + AnyStr as AnyStr, + AsyncContextManager as AsyncContextManager, + AsyncGenerator as AsyncGenerator, + AsyncIterable as AsyncIterable, + AsyncIterator as AsyncIterator, + Awaitable as Awaitable, + BinaryIO as BinaryIO, + Callable as Callable, + ChainMap as ChainMap, + ClassVar as ClassVar, + Collection as Collection, + Container as Container, + ContextManager as ContextManager, + Coroutine as Coroutine, + Counter as Counter, + DefaultDict as DefaultDict, + Deque as Deque, + Dict as Dict, + ForwardRef as ForwardRef, + FrozenSet as FrozenSet, + Generator as Generator, + Generic as Generic, + Hashable as Hashable, + ItemsView as ItemsView, + Iterable as Iterable, + Iterator as Iterator, + KeysView as KeysView, + List as List, + Mapping as Mapping, + MappingView as MappingView, + Match as Match, + MutableMapping as MutableMapping, + MutableSequence as MutableSequence, + MutableSet as MutableSet, + NoReturn as NoReturn, + Optional as Optional, + Pattern as Pattern, + Reversible as Reversible, + Sequence as Sequence, + Set as Set, + Sized as Sized, + SupportsAbs as SupportsAbs, + SupportsBytes as SupportsBytes, + SupportsComplex as SupportsComplex, + SupportsFloat as SupportsFloat, + SupportsInt as SupportsInt, + SupportsRound as SupportsRound, + Text as Text, + TextIO as TextIO, + Tuple as Tuple, + Type as Type, + Union as Union, + ValuesView as ValuesView, + _Alias, + cast as cast, + no_type_check as no_type_check, + no_type_check_decorator as no_type_check_decorator, + overload as overload, + type_check_only, +) + +if sys.version_info >= (3, 10): + from types import UnionType +if sys.version_info >= (3, 9): + from types import GenericAlias + +__all__ = [ + "Any", + "Buffer", + "ClassVar", + "Concatenate", + "Final", + "LiteralString", + "ParamSpec", + "ParamSpecArgs", + "ParamSpecKwargs", + "Self", + "Type", + "TypeVar", + "TypeVarTuple", + "Unpack", + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "AsyncGenerator", + "AsyncContextManager", + "ChainMap", + "ContextManager", + "Counter", + "Deque", + "DefaultDict", + "NamedTuple", + "OrderedDict", + "TypedDict", + "SupportsIndex", + "SupportsAbs", + "SupportsRound", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsInt", + "Annotated", + "assert_never", + "assert_type", + "dataclass_transform", + "deprecated", + "final", + "IntVar", + "is_typeddict", + "Literal", + "NewType", + "overload", + "override", + "Protocol", + "reveal_type", + "runtime", + "runtime_checkable", + "Text", + "TypeAlias", + "TypeAliasType", + "TypeGuard", + "TYPE_CHECKING", + "Never", + "NoReturn", + "Required", + "NotRequired", + "clear_overloads", + "get_args", + "get_origin", + "get_original_bases", + "get_overloads", + "get_type_hints", + "AbstractSet", + "AnyStr", + "BinaryIO", + "Callable", + "Collection", + "Container", + "Dict", + "ForwardRef", + "FrozenSet", + "Generator", + "Generic", + "Hashable", + "IO", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "List", + "Mapping", + "MappingView", + "Match", + "MutableMapping", + "MutableSequence", + "MutableSet", + "Optional", + "Pattern", + "Reversible", + "Sequence", + "Set", + "Sized", + "TextIO", + "Tuple", + "Union", + "ValuesView", + "cast", + "get_protocol_members", + "is_protocol", + "no_type_check", + "no_type_check_decorator", +] + +_T = typing.TypeVar("_T") +_F = typing.TypeVar("_F", bound=Callable[..., Any]) +_TC = typing.TypeVar("_TC", bound=type[object]) + +# unfortunately we have to duplicate this class definition from typing.pyi or we break pytype +class _SpecialForm: + def __getitem__(self, parameters: Any) -> object: ... + if sys.version_info >= (3, 10): + def __or__(self, other: Any) -> _SpecialForm: ... + def __ror__(self, other: Any) -> _SpecialForm: ... + +# Do not import (and re-export) Protocol or runtime_checkable from +# typing module because type checkers need to be able to distinguish +# typing.Protocol and typing_extensions.Protocol so they can properly +# warn users about potential runtime exceptions when using typing.Protocol +# on older versions of Python. +Protocol: _SpecialForm + +def runtime_checkable(cls: _TC) -> _TC: ... + +# This alias for above is kept here for backwards compatibility. +runtime = runtime_checkable +Final: _SpecialForm + +def final(f: _F) -> _F: ... + +Literal: _SpecialForm + +def IntVar(name: str) -> Any: ... # returns a new TypeVar + +# Internal mypy fallback type for all typed dicts (does not exist at runtime) +# N.B. Keep this mostly in sync with typing._TypedDict/mypy_extensions._TypedDict +@type_check_only +class _TypedDict(Mapping[str, object], metaclass=abc.ABCMeta): + __required_keys__: ClassVar[frozenset[str]] + __optional_keys__: ClassVar[frozenset[str]] + __total__: ClassVar[bool] + __orig_bases__: ClassVar[tuple[Any, ...]] + def copy(self) -> Self: ... + # Using Never so that only calls using mypy plugin hook that specialize the signature + # can go through. + def setdefault(self, k: Never, default: object) -> object: ... + # Mypy plugin hook for 'pop' expects that 'default' has a type variable type. + def pop(self, k: Never, default: _T = ...) -> object: ... # pyright: ignore[reportInvalidTypeVarUse] + def update(self: _T, __m: _T) -> None: ... + def items(self) -> dict_items[str, object]: ... + def keys(self) -> dict_keys[str, object]: ... + def values(self) -> dict_values[str, object]: ... + def __delitem__(self, k: Never) -> None: ... + if sys.version_info >= (3, 9): + @overload + def __or__(self, __value: Self) -> Self: ... + @overload + def __or__(self, __value: dict[str, Any]) -> dict[str, object]: ... + @overload + def __ror__(self, __value: Self) -> Self: ... + @overload + def __ror__(self, __value: dict[str, Any]) -> dict[str, object]: ... + # supposedly incompatible definitions of `__ior__` and `__or__`: + def __ior__(self, __value: Self) -> Self: ... # type: ignore[misc] + +# TypedDict is a (non-subscriptable) special form. +TypedDict: object + +OrderedDict = _Alias() + +def get_type_hints( + obj: Callable[..., Any], + globalns: dict[str, Any] | None = None, + localns: dict[str, Any] | None = None, + include_extras: bool = False, +) -> dict[str, Any]: ... +def get_args(tp: Any) -> tuple[Any, ...]: ... + +if sys.version_info >= (3, 10): + @overload + def get_origin(tp: UnionType) -> type[UnionType]: ... + +if sys.version_info >= (3, 9): + @overload + def get_origin(tp: GenericAlias) -> type: ... + +@overload +def get_origin(tp: ParamSpecArgs | ParamSpecKwargs) -> ParamSpec: ... +@overload +def get_origin(tp: Any) -> Any | None: ... + +Annotated: _SpecialForm +_AnnotatedAlias: Any # undocumented + +@runtime_checkable +class SupportsIndex(Protocol, metaclass=abc.ABCMeta): + @abc.abstractmethod + def __index__(self) -> int: ... + +# New and changed things in 3.10 +if sys.version_info >= (3, 10): + from typing import ( + Concatenate as Concatenate, + NewType as NewType, + ParamSpecArgs as ParamSpecArgs, + ParamSpecKwargs as ParamSpecKwargs, + TypeAlias as TypeAlias, + TypeGuard as TypeGuard, + is_typeddict as is_typeddict, + ) +else: + @final + class ParamSpecArgs: + @property + def __origin__(self) -> ParamSpec: ... + def __init__(self, origin: ParamSpec) -> None: ... + + @final + class ParamSpecKwargs: + @property + def __origin__(self) -> ParamSpec: ... + def __init__(self, origin: ParamSpec) -> None: ... + + Concatenate: _SpecialForm + TypeAlias: _SpecialForm + TypeGuard: _SpecialForm + def is_typeddict(tp: object) -> bool: ... + + class NewType: + def __init__(self, name: str, tp: Any) -> None: ... + def __call__(self, __x: _T) -> _T: ... + __supertype__: type + +# New things in 3.11 +# NamedTuples are not new, but the ability to create generic NamedTuples is new in 3.11 +if sys.version_info >= (3, 11): + from typing import ( + LiteralString as LiteralString, + NamedTuple as NamedTuple, + Never as Never, + NotRequired as NotRequired, + Required as Required, + Self as Self, + Unpack as Unpack, + assert_never as assert_never, + assert_type as assert_type, + clear_overloads as clear_overloads, + dataclass_transform as dataclass_transform, + get_overloads as get_overloads, + reveal_type as reveal_type, + ) +else: + Self: _SpecialForm + Never: _SpecialForm + def reveal_type(__obj: _T) -> _T: ... + def assert_never(__arg: Never) -> Never: ... + def assert_type(__val: _T, __typ: Any) -> _T: ... + def clear_overloads() -> None: ... + def get_overloads(func: Callable[..., object]) -> Sequence[Callable[..., object]]: ... + + Required: _SpecialForm + NotRequired: _SpecialForm + LiteralString: _SpecialForm + Unpack: _SpecialForm + + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, + field_specifiers: tuple[type[Any] | Callable[..., Any], ...] = (), + **kwargs: object, + ) -> IdentityFunction: ... + + class NamedTuple(tuple[Any, ...]): + if sys.version_info < (3, 8): + _field_types: ClassVar[collections.OrderedDict[str, type]] + elif sys.version_info < (3, 9): + _field_types: ClassVar[dict[str, type]] + _field_defaults: ClassVar[dict[str, Any]] + _fields: ClassVar[tuple[str, ...]] + __orig_bases__: ClassVar[tuple[Any, ...]] + @overload + def __init__(self, typename: str, fields: Iterable[tuple[str, Any]] = ...) -> None: ... + @overload + def __init__(self, typename: str, fields: None = None, **kwargs: Any) -> None: ... + @classmethod + def _make(cls, iterable: Iterable[Any]) -> Self: ... + if sys.version_info >= (3, 8): + def _asdict(self) -> dict[str, Any]: ... + else: + def _asdict(self) -> collections.OrderedDict[str, Any]: ... + + def _replace(self, **kwargs: Any) -> Self: ... + +# New things in 3.xx +# The `default` parameter was added to TypeVar, ParamSpec, and TypeVarTuple (PEP 696) +# The `infer_variance` parameter was added to TypeVar in 3.12 (PEP 695) +# typing_extensions.override (PEP 698) +@final +class TypeVar: + @property + def __name__(self) -> str: ... + @property + def __bound__(self) -> Any | None: ... + @property + def __constraints__(self) -> tuple[Any, ...]: ... + @property + def __covariant__(self) -> bool: ... + @property + def __contravariant__(self) -> bool: ... + @property + def __infer_variance__(self) -> bool: ... + @property + def __default__(self) -> Any | None: ... + def __init__( + self, + name: str, + *constraints: Any, + bound: Any | None = None, + covariant: bool = False, + contravariant: bool = False, + default: Any | None = None, + infer_variance: bool = False, + ) -> None: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + if sys.version_info >= (3, 11): + def __typing_subst__(self, arg: Incomplete) -> Incomplete: ... + +@final +class ParamSpec: + @property + def __name__(self) -> str: ... + @property + def __bound__(self) -> Any | None: ... + @property + def __covariant__(self) -> bool: ... + @property + def __contravariant__(self) -> bool: ... + @property + def __infer_variance__(self) -> bool: ... + @property + def __default__(self) -> Any | None: ... + def __init__( + self, + name: str, + *, + bound: None | type[Any] | str = None, + contravariant: bool = False, + covariant: bool = False, + default: type[Any] | str | None = None, + ) -> None: ... + @property + def args(self) -> ParamSpecArgs: ... + @property + def kwargs(self) -> ParamSpecKwargs: ... + +@final +class TypeVarTuple: + @property + def __name__(self) -> str: ... + @property + def __default__(self) -> Any | None: ... + def __init__(self, name: str, *, default: Any | None = None) -> None: ... + def __iter__(self) -> Any: ... # Unpack[Self] + +def deprecated(__msg: str, *, category: type[Warning] | None = ..., stacklevel: int = 1) -> Callable[[_T], _T]: ... + +if sys.version_info >= (3, 12): + from collections.abc import Buffer as Buffer + from types import get_original_bases as get_original_bases + from typing import TypeAliasType as TypeAliasType, override as override +else: + def override(__arg: _F) -> _F: ... + def get_original_bases(__cls: type) -> tuple[Any, ...]: ... + @final + class TypeAliasType: + def __init__( + self, name: str, value: Any, *, type_params: tuple[TypeVar | ParamSpec | TypeVarTuple, ...] = () + ) -> None: ... + @property + def __value__(self) -> Any: ... + @property + def __type_params__(self) -> tuple[TypeVar | ParamSpec | TypeVarTuple, ...]: ... + @property + def __parameters__(self) -> tuple[Any, ...]: ... + @property + def __name__(self) -> str: ... + # It's writable on types, but not on instances of TypeAliasType. + @property + def __module__(self) -> str | None: ... # type: ignore[override] + def __getitem__(self, parameters: Any) -> Any: ... + if sys.version_info >= (3, 10): + def __or__(self, right: Any) -> _SpecialForm: ... + def __ror__(self, left: Any) -> _SpecialForm: ... + + @runtime_checkable + class Buffer(Protocol): + # Not actually a Protocol at runtime; see + # https://github.com/python/typeshed/issues/10224 for why we're defining it this way + def __buffer__(self, __flags: int) -> memoryview: ... + +if sys.version_info >= (3, 13): + from typing import get_protocol_members as get_protocol_members, is_protocol as is_protocol +else: + def is_protocol(__tp: type) -> bool: ... + def get_protocol_members(__tp: type) -> frozenset[str]: ... + +# PEP 705 +ReadOnly: _SpecialForm + diff --git a/.vscode/Pico-W-Stub/struct.pyi b/.vscode/Pico-W-Stub/struct.pyi new file mode 100644 index 0000000..8638bc5 --- /dev/null +++ b/.vscode/Pico-W-Stub/struct.pyi @@ -0,0 +1,93 @@ +""" +Pack and unpack primitive data types. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/struct.html + +CPython module: :mod:`python:struct` https://docs.python.org/3/library/struct.html . + +The following byte orders are supported: + ++-----------+------------------------+----------+-----------+ +| Character | Byte order | Size | Alignment | ++===========+========================+==========+===========+ +| @ | native | native | native | ++-----------+------------------------+----------+-----------+ +| < | little-endian | standard | none | ++-----------+------------------------+----------+-----------+ +| > | big-endian | standard | none | ++-----------+------------------------+----------+-----------+ +| ! | network (= big-endian) | standard | none | ++-----------+------------------------+----------+-----------+ + +The following data types are supported: + ++--------+--------------------+-------------------+---------------+ +| Format | C Type | Python type | Standard size | ++========+====================+===================+===============+ +| b | signed char | integer | 1 | ++--------+--------------------+-------------------+---------------+ +| B | unsigned char | integer | 1 | ++--------+--------------------+-------------------+---------------+ +| h | short | integer | 2 | ++--------+--------------------+-------------------+---------------+ +| H | unsigned short | integer | 2 | ++--------+--------------------+-------------------+---------------+ +| i | int | integer (`1`) | 4 | ++--------+--------------------+-------------------+---------------+ +| I | unsigned int | integer (`1`) | 4 | ++--------+--------------------+-------------------+---------------+ +| l | long | integer (`1`) | 4 | ++--------+--------------------+-------------------+---------------+ +| L | unsigned long | integer (`1`) | 4 | ++--------+--------------------+-------------------+---------------+ +| q | long long | integer (`1`) | 8 | ++--------+--------------------+-------------------+---------------+ +| Q | unsigned long long | integer (`1`) | 8 | ++--------+--------------------+-------------------+---------------+ +| f | float | float (`2`) | 4 | ++--------+--------------------+-------------------+---------------+ +| d | double | float (`2`) | 8 | ++--------+--------------------+-------------------+---------------+ +| s | char[] | bytes | | ++--------+--------------------+-------------------+---------------+ +| P | void * | integer | | ++--------+--------------------+-------------------+---------------+ +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Tuple + +def pack_into(fmt, buffer, offset, v1, *args, **kwargs) -> Incomplete: + """ + Pack the values *v1*, *v2*, ... according to the format string *fmt* + into a *buffer* starting at *offset*. *offset* may be negative to count + from the end of *buffer*. + """ + ... + +def unpack(fmt, data) -> Tuple: + """ + Unpack from the *data* according to the format string *fmt*. + The return value is a tuple of the unpacked values. + """ + ... + +def unpack_from(fmt, data, offset=0, /) -> Tuple: + """ + Unpack from the *data* starting at *offset* according to the format string + *fmt*. *offset* may be negative to count from the end of *data*. The return + value is a tuple of the unpacked values. + """ + ... + +def pack(fmt, v1, *args, **kwargs) -> bytes: + """ + Pack the values *v1*, *v2*, ... according to the format string *fmt*. + The return value is a bytes object encoding the values. + """ + ... + +def calcsize(fmt) -> int: + """ + Return the number of bytes needed to store the given *fmt*. + """ + ... diff --git a/.vscode/Pico-W-Stub/sys.pyi b/.vscode/Pico-W-Stub/sys.pyi new file mode 100644 index 0000000..9cabee8 --- /dev/null +++ b/.vscode/Pico-W-Stub/sys.pyi @@ -0,0 +1,49 @@ +""" +System specific functions. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/sys.html + +CPython module: :mod:`python:sys` https://docs.python.org/3/library/sys.html . +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Dict, List, Tuple + +platform: str +version_info: tuple +path: list +version: str +ps1: str +ps2: str +byteorder: str +modules: dict +argv: list +implementation: tuple +maxsize: int + +def print_exception(exc, file=stdout, /) -> None: + """ + Print exception with a traceback to a file-like object *file* (or + `sys.stdout` by default). + + Difference to CPython + + This is simplified version of a function which appears in the + ``traceback`` module in CPython. Unlike ``traceback.print_exception()``, + this function takes just exception value instead of exception type, + exception value, and traceback object; *file* argument should be + positional; further arguments are not supported. CPython-compatible + ``traceback`` module can be found in `micropython-lib`. + """ + ... + +def exit(retval=0, /) -> Incomplete: + """ + Terminate current program with a given exit code. Underlyingly, this + function raise as `SystemExit` exception. If an argument is given, its + value given as an argument to `SystemExit`. + """ + ... + +stderr: Incomplete +stdout: Incomplete +stdin: Incomplete diff --git a/.vscode/Pico-W-Stub/time.pyi b/.vscode/Pico-W-Stub/time.pyi new file mode 100644 index 0000000..39399c1 --- /dev/null +++ b/.vscode/Pico-W-Stub/time.pyi @@ -0,0 +1,290 @@ +""" +Time related functions. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/time.html + +CPython module: :mod:`python:time` https://docs.python.org/3/library/time.html . + +The ``time`` module provides functions for getting the current time and date, +measuring time intervals, and for delays. + +**Time Epoch**: Unix port uses standard for POSIX systems epoch of +1970-01-01 00:00:00 UTC. However, some embedded ports use epoch of +2000-01-01 00:00:00 UTC. Epoch year may be determined with ``gmtime(0)[0]``. + +**Maintaining actual calendar date/time**: This requires a +Real Time Clock (RTC). On systems with underlying OS (including some +RTOS), an RTC may be implicit. Setting and maintaining actual calendar +time is responsibility of OS/RTOS and is done outside of MicroPython, +it just uses OS API to query date/time. On baremetal ports however +system time depends on ``machine.RTC()`` object. The current calendar time +may be set using ``machine.RTC().datetime(tuple)`` function, and maintained +by following means: + +* By a backup battery (which may be an additional, optional component for + a particular board). +* Using networked time protocol (requires setup by a port/user). +* Set manually by a user on each power-up (many boards then maintain + RTC time across hard resets, though some may require setting it again + in such case). + +If actual calendar time is not maintained with a system/MicroPython RTC, +functions below which require reference to current absolute time may +behave not as expected. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional, Tuple + +def ticks_diff(ticks1, ticks2) -> int: + """ + Measure ticks difference between values returned from `ticks_ms()`, `ticks_us()`, + or `ticks_cpu()` functions, as a signed value which may wrap around. + + The argument order is the same as for subtraction + operator, ``ticks_diff(ticks1, ticks2)`` has the same meaning as ``ticks1 - ticks2``. + However, values returned by `ticks_ms()`, etc. functions may wrap around, so + directly using subtraction on them will produce incorrect result. That is why + `ticks_diff()` is needed, it implements modular (or more specifically, ring) + arithmetic to produce correct result even for wrap-around values (as long as they not + too distant in between, see below). The function returns **signed** value in the range + [*-TICKS_PERIOD/2* .. *TICKS_PERIOD/2-1*] (that's a typical range definition for + two's-complement signed binary integers). If the result is negative, it means that + *ticks1* occurred earlier in time than *ticks2*. Otherwise, it means that + *ticks1* occurred after *ticks2*. This holds **only** if *ticks1* and *ticks2* + are apart from each other for no more than *TICKS_PERIOD/2-1* ticks. If that does + not hold, incorrect result will be returned. Specifically, if two tick values are + apart for *TICKS_PERIOD/2-1* ticks, that value will be returned by the function. + However, if *TICKS_PERIOD/2* of real-time ticks has passed between them, the + function will return *-TICKS_PERIOD/2* instead, i.e. result value will wrap around + to the negative range of possible values. + + Informal rationale of the constraints above: Suppose you are locked in a room with no + means to monitor passing of time except a standard 12-notch clock. Then if you look at + dial-plate now, and don't look again for another 13 hours (e.g., if you fall for a + long sleep), then once you finally look again, it may seem to you that only 1 hour + has passed. To avoid this mistake, just look at the clock regularly. Your application + should do the same. "Too long sleep" metaphor also maps directly to application + behaviour: don't let your application run any single task for too long. Run tasks + in steps, and do time-keeping in between. + + `ticks_diff()` is designed to accommodate various usage patterns, among them: + + * Polling with timeout. In this case, the order of events is known, and you will deal + only with positive results of `ticks_diff()`:: + + # Wait for GPIO pin to be asserted, but at most 500us + start = time.ticks_us() + while pin.value() == 0: + if time.ticks_diff(time.ticks_us(), start) > 500: + raise TimeoutError + + * Scheduling events. In this case, `ticks_diff()` result may be negative + if an event is overdue:: + + # This code snippet is not optimized + now = time.ticks_ms() + scheduled_time = task.scheduled_time() + if ticks_diff(scheduled_time, now) > 0: + print("Too early, let's nap") + sleep_ms(ticks_diff(scheduled_time, now)) + task.run() + elif ticks_diff(scheduled_time, now) == 0: + print("Right at time!") + task.run() + elif ticks_diff(scheduled_time, now) < 0: + print("Oops, running late, tell task to run faster!") + task.run(run_faster=true) + + Note: Do not pass `time()` values to `ticks_diff()`, you should use + normal mathematical operations on them. But note that `time()` may (and will) + also overflow. This is known as https://en.wikipedia.org/wiki/Year_2038_problem . + """ + ... + +def ticks_add(ticks, delta) -> Incomplete: + """ + Offset ticks value by a given number, which can be either positive or negative. + Given a *ticks* value, this function allows to calculate ticks value *delta* + ticks before or after it, following modular-arithmetic definition of tick values + (see `ticks_ms()` above). *ticks* parameter must be a direct result of call + to `ticks_ms()`, `ticks_us()`, or `ticks_cpu()` functions (or from previous + call to `ticks_add()`). However, *delta* can be an arbitrary integer number + or numeric expression. `ticks_add()` is useful for calculating deadlines for + events/tasks. (Note: you must use `ticks_diff()` function to work with + deadlines.) + + Examples:: + + # Find out what ticks value there was 100ms ago + print(ticks_add(time.ticks_ms(), -100)) + + # Calculate deadline for operation and test for it + deadline = ticks_add(time.ticks_ms(), 200) + while ticks_diff(deadline, time.ticks_ms()) > 0: + do_a_little_of_something() + + # Find out TICKS_MAX used by this port + print(ticks_add(0, -1)) + """ + ... + +def ticks_cpu() -> Incomplete: + """ + Similar to `ticks_ms()` and `ticks_us()`, but with the highest possible resolution + in the system. This is usually CPU clocks, and that's why the function is named that + way. But it doesn't have to be a CPU clock, some other timing source available in a + system (e.g. high-resolution timer) can be used instead. The exact timing unit + (resolution) of this function is not specified on ``time`` module level, but + documentation for a specific port may provide more specific information. This + function is intended for very fine benchmarking or very tight real-time loops. + Avoid using it in portable code. + + Availability: Not every port implements this function. + """ + ... + +def time() -> int: + """ + Returns the number of seconds, as an integer, since the Epoch, assuming that + underlying RTC is set and maintained as described above. If an RTC is not set, this + function returns number of seconds since a port-specific reference point in time (for + embedded boards without a battery-backed RTC, usually since power up or reset). If you + want to develop portable MicroPython application, you should not rely on this function + to provide higher than second precision. If you need higher precision, absolute + timestamps, use `time_ns()`. If relative times are acceptable then use the + `ticks_ms()` and `ticks_us()` functions. If you need calendar time, `gmtime()` or + `localtime()` without an argument is a better choice. + + Difference to CPython + + In CPython, this function returns number of + seconds since Unix epoch, 1970-01-01 00:00 UTC, as a floating-point, + usually having microsecond precision. With MicroPython, only Unix port + uses the same Epoch, and if floating-point precision allows, + returns sub-second precision. Embedded hardware usually doesn't have + floating-point precision to represent both long time ranges and subsecond + precision, so they use integer value with second precision. Some embedded + hardware also lacks battery-powered RTC, so returns number of seconds + since last power-up or from other relative, hardware-specific point + (e.g. reset). + """ + ... + +def ticks_ms() -> int: + """ + Returns an increasing millisecond counter with an arbitrary reference point, that + wraps around after some value. + + The wrap-around value is not explicitly exposed, but we will + refer to it as *TICKS_MAX* to simplify discussion. Period of the values is + *TICKS_PERIOD = TICKS_MAX + 1*. *TICKS_PERIOD* is guaranteed to be a power of + two, but otherwise may differ from port to port. The same period value is used + for all of `ticks_ms()`, `ticks_us()`, `ticks_cpu()` functions (for + simplicity). Thus, these functions will return a value in range [*0* .. + *TICKS_MAX*], inclusive, total *TICKS_PERIOD* values. Note that only + non-negative values are used. For the most part, you should treat values returned + by these functions as opaque. The only operations available for them are + `ticks_diff()` and `ticks_add()` functions described below. + + Note: Performing standard mathematical operations (+, -) or relational + operators (<, <=, >, >=) directly on these value will lead to invalid + result. Performing mathematical operations and then passing their results + as arguments to `ticks_diff()` or `ticks_add()` will also lead to + invalid results from the latter functions. + """ + ... + +def ticks_us() -> Incomplete: + """ + Just like `ticks_ms()` above, but in microseconds. + """ + ... + +def time_ns() -> int: + """ + Similar to `time()` but returns nanoseconds since the Epoch, as an integer (usually + a big integer, so will allocate on the heap). + """ + ... + +def localtime(secs: Optional[Any] = None) -> Tuple: + """ + Convert the time *secs* expressed in seconds since the Epoch (see above) into an + 8-tuple which contains: ``(year, month, mday, hour, minute, second, weekday, yearday)`` + If *secs* is not provided or None, then the current time from the RTC is used. + + The `gmtime()` function returns a date-time tuple in UTC, and `localtime()` returns a + date-time tuple in local time. + + The format of the entries in the 8-tuple are: + + * year includes the century (for example 2014). + * month is 1-12 + * mday is 1-31 + * hour is 0-23 + * minute is 0-59 + * second is 0-59 + * weekday is 0-6 for Mon-Sun + * yearday is 1-366 + """ + ... + +def sleep_us(us) -> None: + """ + Delay for given number of microseconds, should be positive or 0. + + This function attempts to provide an accurate delay of at least *us* + microseconds, but it may take longer if the system has other higher priority + processing to perform. + """ + ... + +def gmtime(secs: Optional[Any] = None) -> Tuple: + """ + Convert the time *secs* expressed in seconds since the Epoch (see above) into an + 8-tuple which contains: ``(year, month, mday, hour, minute, second, weekday, yearday)`` + If *secs* is not provided or None, then the current time from the RTC is used. + + The `gmtime()` function returns a date-time tuple in UTC, and `localtime()` returns a + date-time tuple in local time. + + The format of the entries in the 8-tuple are: + + * year includes the century (for example 2014). + * month is 1-12 + * mday is 1-31 + * hour is 0-23 + * minute is 0-59 + * second is 0-59 + * weekday is 0-6 for Mon-Sun + * yearday is 1-366 + """ + ... + +def sleep_ms(ms) -> None: + """ + Delay for given number of milliseconds, should be positive or 0. + + This function will delay for at least the given number of milliseconds, but + may take longer than that if other processing must take place, for example + interrupt handlers or other threads. Passing in 0 for *ms* will still allow + this other processing to occur. Use `sleep_us()` for more precise delays. + """ + ... + +def mktime() -> int: + """ + This is inverse function of localtime. It's argument is a full 8-tuple + which expresses a time as per localtime. It returns an integer which is + the number of seconds since Jan 1, 2000. + """ + ... + +def sleep(seconds) -> Incomplete: + """ + Sleep for the given number of seconds. Some boards may accept *seconds* as a + floating-point number to sleep for a fractional number of seconds. Note that + other boards may not accept a floating-point argument, for compatibility with + them use `sleep_ms()` and `sleep_us()` functions. + """ + ... diff --git a/.vscode/Pico-W-Stub/uarray.pyi b/.vscode/Pico-W-Stub/uarray.pyi new file mode 100644 index 0000000..8841b77 --- /dev/null +++ b/.vscode/Pico-W-Stub/uarray.pyi @@ -0,0 +1,33 @@ +""" +Efficient arrays of numeric data. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/array.html + +CPython module: :mod:`python:array` https://docs.python.org/3/library/array.html . + +Supported format codes: ``b``, ``B``, ``h``, ``H``, ``i``, ``I``, ``l``, +``L``, ``q``, ``Q``, ``f``, ``d`` (the latter 2 depending on the +floating-point support). +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, List, Optional + +class array: + """ + Create array with elements of given type. Initial contents of the + array are given by *iterable*. If it is not provided, an empty + array is created. + """ + + def extend(self, iterable) -> Incomplete: + """ + Append new elements as contained in *iterable* to the end of + array, growing it. + """ + ... + def append(self, val) -> Incomplete: + """ + Append new element *val* to the end of array, growing it. + """ + ... + def __init__(self, typecode, iterable: Optional[Any] = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/uasyncio.pyi b/.vscode/Pico-W-Stub/uasyncio.pyi new file mode 100644 index 0000000..d53bcfb --- /dev/null +++ b/.vscode/Pico-W-Stub/uasyncio.pyi @@ -0,0 +1 @@ +def __getattr__(attr): ... diff --git a/.vscode/Pico-W-Stub/uasyncio/__init__.pyi b/.vscode/Pico-W-Stub/uasyncio/__init__.pyi new file mode 100644 index 0000000..e80dd01 --- /dev/null +++ b/.vscode/Pico-W-Stub/uasyncio/__init__.pyi @@ -0,0 +1,45 @@ +from _typeshed import Incomplete as Incomplete + +def ticks_diff(*args, **kwargs) -> Incomplete: ... +def run_until_complete(*args, **kwargs) -> Incomplete: ... +def create_task(*args, **kwargs) -> Incomplete: ... +def wait_for_ms(*args, **kwargs) -> Incomplete: ... +def run(*args, **kwargs) -> Incomplete: ... +def new_event_loop(*args, **kwargs) -> Incomplete: ... +def current_task(*args, **kwargs) -> Incomplete: ... +def get_event_loop(*args, **kwargs) -> Incomplete: ... +def ticks(*args, **kwargs) -> Incomplete: ... +def sleep_ms(*args, **kwargs) -> Incomplete: ... +def ticks_add(*args, **kwargs) -> Incomplete: ... +def sleep(*args, **kwargs) -> Incomplete: ... + +wait_for: Incomplete +gather: Incomplete + +class Loop: + def call_exception_handler(self, *args, **kwargs) -> Incomplete: ... + def run_forever(self, *args, **kwargs) -> Incomplete: ... + def set_exception_handler(self, *args, **kwargs) -> Incomplete: ... + def get_exception_handler(self, *args, **kwargs) -> Incomplete: ... + def default_exception_handler(self, *args, **kwargs) -> Incomplete: ... + def run_until_complete(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + def stop(self, *args, **kwargs) -> Incomplete: ... + def create_task(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... + +class IOQueue: + def queue_write(self, *args, **kwargs) -> Incomplete: ... + def queue_read(self, *args, **kwargs) -> Incomplete: ... + def wait_io_event(self, *args, **kwargs) -> Incomplete: ... + def remove(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... + +class Event: + def set(self, *args, **kwargs) -> Incomplete: ... + def is_set(self, *args, **kwargs) -> Incomplete: ... + def clear(self, *args, **kwargs) -> Incomplete: ... + wait: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + +class CancelledError(Exception): ... diff --git a/.vscode/Pico-W-Stub/uasyncio/core.pyi b/.vscode/Pico-W-Stub/uasyncio/core.pyi new file mode 100644 index 0000000..713515f --- /dev/null +++ b/.vscode/Pico-W-Stub/uasyncio/core.pyi @@ -0,0 +1,25 @@ +from _typeshed import Incomplete as Incomplete + +def ticks(*args, **kwargs) -> Incomplete: ... +def run_until_complete(*args, **kwargs) -> Incomplete: ... +def create_task(*args, **kwargs) -> Incomplete: ... +def ticks_diff(*args, **kwargs) -> Incomplete: ... +def run(*args, **kwargs) -> Incomplete: ... +def new_event_loop(*args, **kwargs) -> Incomplete: ... +def current_task(*args, **kwargs) -> Incomplete: ... +def get_event_loop(*args, **kwargs) -> Incomplete: ... +def sleep_ms(*args, **kwargs) -> Incomplete: ... +def ticks_add(*args, **kwargs) -> Incomplete: ... +def sleep(*args, **kwargs) -> Incomplete: ... + +class TaskQueue: + def push(self, *args, **kwargs) -> Incomplete: ... + def peek(self, *args, **kwargs) -> Incomplete: ... + def remove(self, *args, **kwargs) -> Incomplete: ... + def pop(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... + +class Task: + def __init__(self, *argv, **kwargs) -> None: ... + +class CancelledError(Exception): ... diff --git a/.vscode/Pico-W-Stub/uasyncio/event.pyi b/.vscode/Pico-W-Stub/uasyncio/event.pyi new file mode 100644 index 0000000..9f62c5c --- /dev/null +++ b/.vscode/Pico-W-Stub/uasyncio/event.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete as Incomplete + +class ThreadSafeFlag: + def set(self, *args, **kwargs) -> Incomplete: ... + def ioctl(self, *args, **kwargs) -> Incomplete: ... + def clear(self, *args, **kwargs) -> Incomplete: ... + wait: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + +class Event: + def set(self, *args, **kwargs) -> Incomplete: ... + def is_set(self, *args, **kwargs) -> Incomplete: ... + def clear(self, *args, **kwargs) -> Incomplete: ... + wait: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... diff --git a/.vscode/Pico-W-Stub/uasyncio/funcs.pyi b/.vscode/Pico-W-Stub/uasyncio/funcs.pyi new file mode 100644 index 0000000..55caa41 --- /dev/null +++ b/.vscode/Pico-W-Stub/uasyncio/funcs.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete as Incomplete + +def wait_for_ms(*args, **kwargs) -> Incomplete: ... + +gather: Incomplete +wait_for: Incomplete diff --git a/.vscode/Pico-W-Stub/uasyncio/lock.pyi b/.vscode/Pico-W-Stub/uasyncio/lock.pyi new file mode 100644 index 0000000..6661216 --- /dev/null +++ b/.vscode/Pico-W-Stub/uasyncio/lock.pyi @@ -0,0 +1,7 @@ +from _typeshed import Incomplete as Incomplete + +class Lock: + def locked(self, *args, **kwargs) -> Incomplete: ... + def release(self, *args, **kwargs) -> Incomplete: ... + acquire: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... diff --git a/.vscode/Pico-W-Stub/uasyncio/stream.pyi b/.vscode/Pico-W-Stub/uasyncio/stream.pyi new file mode 100644 index 0000000..0f7ea9f --- /dev/null +++ b/.vscode/Pico-W-Stub/uasyncio/stream.pyi @@ -0,0 +1,56 @@ +from _typeshed import Incomplete as Incomplete + +stream_awrite: Incomplete + +class StreamWriter: + def get_extra_info(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + awrite: Incomplete + readexactly: Incomplete + awritestr: Incomplete + drain: Incomplete + readinto: Incomplete + read: Incomplete + aclose: Incomplete + readline: Incomplete + wait_closed: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + +class Stream: + def get_extra_info(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + awrite: Incomplete + readexactly: Incomplete + awritestr: Incomplete + drain: Incomplete + readinto: Incomplete + read: Incomplete + aclose: Incomplete + readline: Incomplete + wait_closed: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + +class Server: + def close(self, *args, **kwargs) -> Incomplete: ... + wait_closed: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + +class StreamReader: + def get_extra_info(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + awrite: Incomplete + readexactly: Incomplete + awritestr: Incomplete + drain: Incomplete + readinto: Incomplete + read: Incomplete + aclose: Incomplete + readline: Incomplete + wait_closed: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + +open_connection: Incomplete +start_server: Incomplete diff --git a/.vscode/Pico-W-Stub/ubinascii.pyi b/.vscode/Pico-W-Stub/ubinascii.pyi new file mode 100644 index 0000000..4026fc1 --- /dev/null +++ b/.vscode/Pico-W-Stub/ubinascii.pyi @@ -0,0 +1,46 @@ +""" +Binary/ASCII conversions. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/binascii.html + +CPython module: :mod:`python:binascii` https://docs.python.org/3/library/binascii.html . + +This module implements conversions between binary data and various +encodings of it in ASCII form (in both directions). +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +def crc32(*args, **kwargs) -> Incomplete: ... +def hexlify(data, sep: Optional[Any] = None) -> bytes: + """ + Convert the bytes in the *data* object to a hexadecimal representation. + Returns a bytes object. + + If the additional argument *sep* is supplied it is used as a separator + between hexadecimal values. + """ + ... + +def unhexlify(data) -> bytes: + """ + Convert hexadecimal data to binary representation. Returns bytes string. + (i.e. inverse of hexlify) + """ + ... + +def b2a_base64(data, *, newline=True) -> bytes: + """ + Encode binary data in base64 format, as in `RFC 3548 + `_. Returns the encoded data + followed by a newline character if newline is true, as a bytes object. + """ + ... + +def a2b_base64(data) -> bytes: + """ + Decode base64-encoded data, ignoring invalid characters in the input. + Conforms to `RFC 2045 s.6.8 `_. + Returns a bytes object. + """ + ... diff --git a/.vscode/Pico-W-Stub/ubluetooth.pyi b/.vscode/Pico-W-Stub/ubluetooth.pyi new file mode 100644 index 0000000..ab39369 --- /dev/null +++ b/.vscode/Pico-W-Stub/ubluetooth.pyi @@ -0,0 +1,592 @@ +""" +Low-level Bluetooth radio functionality. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/bluetooth.html + +This module provides an interface to a Bluetooth controller on a board. +Currently this supports Bluetooth Low Energy (BLE) in Central, Peripheral, +Broadcaster, and Observer roles, as well as GATT Server and Client and L2CAP +connection-oriented-channels. A device may operate in multiple roles +concurrently. Pairing (and bonding) is supported on some ports. + +This API is intended to match the low-level Bluetooth protocol and provide +building-blocks for higher-level abstractions such as specific device types. + +``Note:`` For most applications, we recommend using the higher-level + `aioble library `_. + +``Note:`` This module is still under development and its classes, functions, + methods and constants are subject to change. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional, Tuple + +FLAG_NOTIFY: int +FLAG_READ: int +FLAG_WRITE: int +FLAG_INDICATE: int +FLAG_WRITE_NO_RESPONSE: int + +class UUID: + """ + Creates a UUID instance with the specified **value**. + + The **value** can be either: + + - A 16-bit integer. e.g. ``0x2908``. + - A 128-bit UUID string. e.g. ``'6E400001-B5A3-F393-E0A9-E50E24DCCA9E'``. + """ + + def __init__(self, value, /) -> None: ... + +class BLE: + """ + Returns the singleton BLE object. + """ + + def gatts_notify(self, conn_handle, value_handle, data=None, /) -> None: + """ + Sends a notification request to a connected client. + + If *data* is ``None`` (the default), then the current local value (as set + with :meth:`gatts_write `) will be sent. + + Otherwise, if *data* is not ``None``, then that value is sent to the client + as part of the notification. The local value will not be modified. + + **Note:** The notification will be sent regardless of the subscription + status of the client to this characteristic. + """ + ... + def gatts_indicate(self, conn_handle, value_handle, data=None, /) -> None: + """ + Sends a indication request to a connected client. + + If *data* is ``None`` (the default), then the current local value (as set + with :meth:`gatts_write `) will be sent. + + Otherwise, if *data* is not ``None``, then that value is sent to the client + as part of the indication. The local value will not be modified. + + On acknowledgment (or failure, e.g. timeout), the + ``_IRQ_GATTS_INDICATE_DONE`` event will be raised. + + **Note:** The indication will be sent regardless of the subscription + status of the client to this characteristic. + """ + ... + def gattc_write(self, conn_handle, value_handle, data, mode=0, /) -> None: + """ + Issue a remote write to a connected server for the specified + characteristic or descriptor handle. + + The argument *mode* specifies the write behaviour, with the currently + supported values being: + + * ``mode=0`` (default) is a write-without-response: the write will + be sent to the remote server but no confirmation will be + returned, and no event will be raised. + * ``mode=1`` is a write-with-response: the remote server is + requested to send a response/acknowledgement that it received the + data. + + If a response is received from the remote server the + ``_IRQ_GATTC_WRITE_DONE`` event will be raised. + """ + ... + def gattc_read(self, conn_handle, value_handle, /) -> None: + """ + Issue a remote read to a connected server for the specified + characteristic or descriptor handle. + + When a value is available, the ``_IRQ_GATTC_READ_RESULT`` event will be + raised. Additionally, the ``_IRQ_GATTC_READ_DONE`` will be raised. + """ + ... + def gattc_exchange_mtu(self, conn_handle, /) -> Incomplete: + """ + Initiate MTU exchange with a connected server, using the preferred MTU + set using ``BLE.config(mtu=value)``. + + The ``_IRQ_MTU_EXCHANGED`` event will be raised when MTU exchange + completes. + + **Note:** MTU exchange is typically initiated by the central. When using + the BlueKitchen stack in the central role, it does not support a remote + peripheral initiating the MTU exchange. NimBLE works for both roles. + """ + ... + def gatts_read(self, value_handle, /) -> Incomplete: + """ + Reads the local value for this handle (which has either been written by + :meth:`gatts_write ` or by a remote client). + """ + ... + def gatts_write(self, value_handle, data, send_update=False, /) -> None: + """ + Writes the local value for this handle, which can be read by a client. + + If *send_update* is ``True``, then any subscribed clients will be notified + (or indicated, depending on what they're subscribed to and which operations + the characteristic supports) about this write. + """ + ... + def gatts_set_buffer(self, value_handle, len, append=False, /) -> None: + """ + Sets the internal buffer size for a value in bytes. This will limit the + largest possible write that can be received. The default is 20. + + Setting *append* to ``True`` will make all remote writes append to, rather + than replace, the current value. At most *len* bytes can be buffered in + this way. When you use :meth:`gatts_read `, the value will + be cleared after reading. This feature is useful when implementing something + like the Nordic UART Service. + """ + ... + def gatts_register_services(self, services_definition, /) -> Incomplete: + """ + Configures the server with the specified services, replacing any + existing services. + + *services_definition* is a list of **services**, where each **service** is a + two-element tuple containing a UUID and a list of **characteristics**. + + Each **characteristic** is a two-or-three-element tuple containing a UUID, a + **flags** value, and optionally a list of *descriptors*. + + Each **descriptor** is a two-element tuple containing a UUID and a **flags** + value. + + The **flags** are a bitwise-OR combination of the flags defined below. These + set both the behaviour of the characteristic (or descriptor) as well as the + security and privacy requirements. + + The return value is a list (one element per service) of tuples (each element + is a value handle). Characteristics and descriptor handles are flattened + into the same tuple, in the order that they are defined. + + The following example registers two services (Heart Rate, and Nordic UART):: + + HR_UUID = bluetooth.UUID(0x180D) + HR_CHAR = (bluetooth.UUID(0x2A37), bluetooth.FLAG_READ | bluetooth.FLAG_NOTIFY,) + HR_SERVICE = (HR_UUID, (HR_CHAR,),) + UART_UUID = bluetooth.UUID('6E400001-B5A3-F393-E0A9-E50E24DCCA9E') + UART_TX = (bluetooth.UUID('6E400003-B5A3-F393-E0A9-E50E24DCCA9E'), bluetooth.FLAG_READ | bluetooth.FLAG_NOTIFY,) + UART_RX = (bluetooth.UUID('6E400002-B5A3-F393-E0A9-E50E24DCCA9E'), bluetooth.FLAG_WRITE,) + UART_SERVICE = (UART_UUID, (UART_TX, UART_RX,),) + SERVICES = (HR_SERVICE, UART_SERVICE,) + ( (hr,), (tx, rx,), ) = bt.gatts_register_services(SERVICES) + + The three value handles (``hr``, ``tx``, ``rx``) can be used with + :meth:`gatts_read `, :meth:`gatts_write `, :meth:`gatts_notify `, and + :meth:`gatts_indicate `. + + **Note:** Advertising must be stopped before registering services. + + Available flags for characteristics and descriptors are:: + + from micropython import const + _FLAG_BROADCAST = const(0x0001) + _FLAG_READ = const(0x0002) + _FLAG_WRITE_NO_RESPONSE = const(0x0004) + _FLAG_WRITE = const(0x0008) + _FLAG_NOTIFY = const(0x0010) + _FLAG_INDICATE = const(0x0020) + _FLAG_AUTHENTICATED_SIGNED_WRITE = const(0x0040) + + _FLAG_AUX_WRITE = const(0x0100) + _FLAG_READ_ENCRYPTED = const(0x0200) + _FLAG_READ_AUTHENTICATED = const(0x0400) + _FLAG_READ_AUTHORIZED = const(0x0800) + _FLAG_WRITE_ENCRYPTED = const(0x1000) + _FLAG_WRITE_AUTHENTICATED = const(0x2000) + _FLAG_WRITE_AUTHORIZED = const(0x4000) + + As for the IRQs above, any required constants should be added to your Python code. + """ + ... + def irq(self, handler, /) -> int: + """ + Registers a callback for events from the BLE stack. The *handler* takes two + arguments, ``event`` (which will be one of the codes below) and ``data`` + (which is an event-specific tuple of values). + + **Note:** As an optimisation to prevent unnecessary allocations, the ``addr``, + ``adv_data``, ``char_data``, ``notify_data``, and ``uuid`` entries in the + tuples are read-only memoryview instances pointing to :mod:`bluetooth`'s internal + ringbuffer, and are only valid during the invocation of the IRQ handler + function. If your program needs to save one of these values to access after + the IRQ handler has returned (e.g. by saving it in a class instance or global + variable), then it needs to take a copy of the data, either by using ``bytes()`` + or ``bluetooth.UUID()``, like this:: + + connected_addr = bytes(addr) # equivalently: adv_data, char_data, or notify_data + matched_uuid = bluetooth.UUID(uuid) + + For example, the IRQ handler for a scan result might inspect the ``adv_data`` + to decide if it's the correct device, and only then copy the address data to be + used elsewhere in the program. And to print data from within the IRQ handler, + ``print(bytes(addr))`` will be needed. + + An event handler showing all possible events:: + + def bt_irq(event, data): + if event == _IRQ_CENTRAL_CONNECT: + # A central has connected to this peripheral. + conn_handle, addr_type, addr = data + elif event == _IRQ_CENTRAL_DISCONNECT: + # A central has disconnected from this peripheral. + conn_handle, addr_type, addr = data + elif event == _IRQ_GATTS_WRITE: + # A client has written to this characteristic or descriptor. + conn_handle, attr_handle = data + elif event == _IRQ_GATTS_READ_REQUEST: + # A client has issued a read. Note: this is only supported on STM32. + # Return a non-zero integer to deny the read (see below), or zero (or None) + # to accept the read. + conn_handle, attr_handle = data + elif event == _IRQ_SCAN_RESULT: + # A single scan result. + addr_type, addr, adv_type, rssi, adv_data = data + elif event == _IRQ_SCAN_DONE: + # Scan duration finished or manually stopped. + pass + elif event == _IRQ_PERIPHERAL_CONNECT: + # A successful gap_connect(). + conn_handle, addr_type, addr = data + elif event == _IRQ_PERIPHERAL_DISCONNECT: + # Connected peripheral has disconnected. + conn_handle, addr_type, addr = data + elif event == _IRQ_GATTC_SERVICE_RESULT: + # Called for each service found by gattc_discover_services(). + conn_handle, start_handle, end_handle, uuid = data + elif event == _IRQ_GATTC_SERVICE_DONE: + # Called once service discovery is complete. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, status = data + elif event == _IRQ_GATTC_CHARACTERISTIC_RESULT: + # Called for each characteristic found by gattc_discover_services(). + conn_handle, end_handle, value_handle, properties, uuid = data + elif event == _IRQ_GATTC_CHARACTERISTIC_DONE: + # Called once service discovery is complete. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, status = data + elif event == _IRQ_GATTC_DESCRIPTOR_RESULT: + # Called for each descriptor found by gattc_discover_descriptors(). + conn_handle, dsc_handle, uuid = data + elif event == _IRQ_GATTC_DESCRIPTOR_DONE: + # Called once service discovery is complete. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, status = data + elif event == _IRQ_GATTC_READ_RESULT: + # A gattc_read() has completed. + conn_handle, value_handle, char_data = data + elif event == _IRQ_GATTC_READ_DONE: + # A gattc_read() has completed. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, value_handle, status = data + elif event == _IRQ_GATTC_WRITE_DONE: + # A gattc_write() has completed. + # Note: Status will be zero on success, implementation-specific value otherwise. + conn_handle, value_handle, status = data + elif event == _IRQ_GATTC_NOTIFY: + # A server has sent a notify request. + conn_handle, value_handle, notify_data = data + elif event == _IRQ_GATTC_INDICATE: + # A server has sent an indicate request. + conn_handle, value_handle, notify_data = data + elif event == _IRQ_GATTS_INDICATE_DONE: + # A client has acknowledged the indication. + # Note: Status will be zero on successful acknowledgment, implementation-specific value otherwise. + conn_handle, value_handle, status = data + elif event == _IRQ_MTU_EXCHANGED: + # ATT MTU exchange complete (either initiated by us or the remote device). + conn_handle, mtu = data + elif event == _IRQ_L2CAP_ACCEPT: + # A new channel has been accepted. + # Return a non-zero integer to reject the connection, or zero (or None) to accept. + conn_handle, cid, psm, our_mtu, peer_mtu = data + elif event == _IRQ_L2CAP_CONNECT: + # A new channel is now connected (either as a result of connecting or accepting). + conn_handle, cid, psm, our_mtu, peer_mtu = data + elif event == _IRQ_L2CAP_DISCONNECT: + # Existing channel has disconnected (status is zero), or a connection attempt failed (non-zero status). + conn_handle, cid, psm, status = data + elif event == _IRQ_L2CAP_RECV: + # New data is available on the channel. Use l2cap_recvinto to read. + conn_handle, cid = data + elif event == _IRQ_L2CAP_SEND_READY: + # A previous l2cap_send that returned False has now completed and the channel is ready to send again. + # If status is non-zero, then the transmit buffer overflowed and the application should re-send the data. + conn_handle, cid, status = data + elif event == _IRQ_CONNECTION_UPDATE: + # The remote device has updated connection parameters. + conn_handle, conn_interval, conn_latency, supervision_timeout, status = data + elif event == _IRQ_ENCRYPTION_UPDATE: + # The encryption state has changed (likely as a result of pairing or bonding). + conn_handle, encrypted, authenticated, bonded, key_size = data + elif event == _IRQ_GET_SECRET: + # Return a stored secret. + # If key is None, return the index'th value of this sec_type. + # Otherwise return the corresponding value for this sec_type and key. + sec_type, index, key = data + return value + elif event == _IRQ_SET_SECRET: + # Save a secret to the store for this sec_type and key. + sec_type, key, value = data + return True + elif event == _IRQ_PASSKEY_ACTION: + # Respond to a passkey request during pairing. + # See gap_passkey() for details. + # action will be an action that is compatible with the configured "io" config. + # passkey will be non-zero if action is "numeric comparison". + conn_handle, action, passkey = data + + + The event codes are:: + + from micropython import const + _IRQ_CENTRAL_CONNECT = const(1) + _IRQ_CENTRAL_DISCONNECT = const(2) + _IRQ_GATTS_WRITE = const(3) + _IRQ_GATTS_READ_REQUEST = const(4) + _IRQ_SCAN_RESULT = const(5) + _IRQ_SCAN_DONE = const(6) + _IRQ_PERIPHERAL_CONNECT = const(7) + _IRQ_PERIPHERAL_DISCONNECT = const(8) + _IRQ_GATTC_SERVICE_RESULT = const(9) + _IRQ_GATTC_SERVICE_DONE = const(10) + _IRQ_GATTC_CHARACTERISTIC_RESULT = const(11) + _IRQ_GATTC_CHARACTERISTIC_DONE = const(12) + _IRQ_GATTC_DESCRIPTOR_RESULT = const(13) + _IRQ_GATTC_DESCRIPTOR_DONE = const(14) + _IRQ_GATTC_READ_RESULT = const(15) + _IRQ_GATTC_READ_DONE = const(16) + _IRQ_GATTC_WRITE_DONE = const(17) + _IRQ_GATTC_NOTIFY = const(18) + _IRQ_GATTC_INDICATE = const(19) + _IRQ_GATTS_INDICATE_DONE = const(20) + _IRQ_MTU_EXCHANGED = const(21) + _IRQ_L2CAP_ACCEPT = const(22) + _IRQ_L2CAP_CONNECT = const(23) + _IRQ_L2CAP_DISCONNECT = const(24) + _IRQ_L2CAP_RECV = const(25) + _IRQ_L2CAP_SEND_READY = const(26) + _IRQ_CONNECTION_UPDATE = const(27) + _IRQ_ENCRYPTION_UPDATE = const(28) + _IRQ_GET_SECRET = const(29) + _IRQ_SET_SECRET = const(30) + + For the ``_IRQ_GATTS_READ_REQUEST`` event, the available return codes are:: + + _GATTS_NO_ERROR = const(0x00) + _GATTS_ERROR_READ_NOT_PERMITTED = const(0x02) + _GATTS_ERROR_WRITE_NOT_PERMITTED = const(0x03) + _GATTS_ERROR_INSUFFICIENT_AUTHENTICATION = const(0x05) + _GATTS_ERROR_INSUFFICIENT_AUTHORIZATION = const(0x08) + _GATTS_ERROR_INSUFFICIENT_ENCRYPTION = const(0x0f) + + For the ``_IRQ_PASSKEY_ACTION`` event, the available actions are:: + + _PASSKEY_ACTION_NONE = const(0) + _PASSKEY_ACTION_INPUT = const(2) + _PASSKEY_ACTION_DISPLAY = const(3) + _PASSKEY_ACTION_NUMERIC_COMPARISON = const(4) + + In order to save space in the firmware, these constants are not included on the + :mod:`bluetooth` module. Add the ones that you need from the list above to your + program. + """ + ... + def gap_connect(self, addr_type, addr, scan_duration_ms=2000, min_conn_interval_us=None, max_conn_interval_us=None, /) -> None: + """ + Connect to a peripheral. + + See :meth:`gap_scan ` for details about address types. + + To cancel an outstanding connection attempt early, call + ``gap_connect(None)``. + + On success, the ``_IRQ_PERIPHERAL_CONNECT`` event will be raised. If + cancelling a connection attempt, the ``_IRQ_PERIPHERAL_DISCONNECT`` event + will be raised. + + The device will wait up to *scan_duration_ms* to receive an advertising + payload from the device. + + The connection interval can be configured in **micro** seconds using either + or both of *min_conn_interval_us* and *max_conn_interval_us*. Otherwise a + default interval will be chosen, typically between 30000 and 50000 + microseconds. A shorter interval will increase throughput, at the expense + of power usage. + """ + ... + def gap_advertise(self, interval_us, adv_data=None, *, resp_data=None, connectable=True) -> Incomplete: + """ + Starts advertising at the specified interval (in **micro** seconds). This + interval will be rounded down to the nearest 625us. To stop advertising, set + *interval_us* to ``None``. + + *adv_data* and *resp_data* can be any type that implements the buffer + protocol (e.g. ``bytes``, ``bytearray``, ``str``). *adv_data* is included + in all broadcasts, and *resp_data* is send in reply to an active scan. + + **Note:** if *adv_data* (or *resp_data*) is ``None``, then the data passed + to the previous call to ``gap_advertise`` will be re-used. This allows a + broadcaster to resume advertising with just ``gap_advertise(interval_us)``. + To clear the advertising payload pass an empty ``bytes``, i.e. ``b''``. + """ + ... + def config(self, param, /) -> Tuple: + """ + Get or set configuration values of the BLE interface. To get a value the + parameter name should be quoted as a string, and just one parameter is + queried at a time. To set values use the keyword syntax, and one or more + parameter can be set at a time. + + Currently supported values are: + + - ``'mac'``: The current address in use, depending on the current address mode. + This returns a tuple of ``(addr_type, addr)``. + + See :meth:`gatts_write ` for details about address type. + + This may only be queried while the interface is currently active. + + - ``'addr_mode'``: Sets the address mode. Values can be: + + * 0x00 - PUBLIC - Use the controller's public address. + * 0x01 - RANDOM - Use a generated static address. + * 0x02 - RPA - Use resolvable private addresses. + * 0x03 - NRPA - Use non-resolvable private addresses. + + By default the interface mode will use a PUBLIC address if available, otherwise + it will use a RANDOM address. + + - ``'gap_name'``: Get/set the GAP device name used by service 0x1800, + characteristic 0x2a00. This can be set at any time and changed multiple + times. + + - ``'rxbuf'``: Get/set the size in bytes of the internal buffer used to store + incoming events. This buffer is global to the entire BLE driver and so + handles incoming data for all events, including all characteristics. + Increasing this allows better handling of bursty incoming data (for + example scan results) and the ability to receive larger characteristic values. + + - ``'mtu'``: Get/set the MTU that will be used during a ATT MTU exchange. The + resulting MTU will be the minimum of this and the remote device's MTU. + ATT MTU exchange will not happen automatically (unless the remote device initiates + it), and must be manually initiated with + :meth:`gattc_exchange_mtu`. + Use the ``_IRQ_MTU_EXCHANGED`` event to discover the MTU for a given connection. + + - ``'bond'``: Sets whether bonding will be enabled during pairing. When + enabled, pairing requests will set the "bond" flag and the keys will be stored + by both devices. + + - ``'mitm'``: Sets whether MITM-protection is required for pairing. + + - ``'io'``: Sets the I/O capabilities of this device. + + Available options are:: + + _IO_CAPABILITY_DISPLAY_ONLY = const(0) + _IO_CAPABILITY_DISPLAY_YESNO = const(1) + _IO_CAPABILITY_KEYBOARD_ONLY = const(2) + _IO_CAPABILITY_NO_INPUT_OUTPUT = const(3) + _IO_CAPABILITY_KEYBOARD_DISPLAY = const(4) + + - ``'le_secure'``: Sets whether "LE Secure" pairing is required. Default is + false (i.e. allow "Legacy Pairing"). + """ + ... + def active(self, active: Optional[Any] = None, /) -> Incomplete: + """ + Optionally changes the active state of the BLE radio, and returns the + current state. + + The radio must be made active before using any other methods on this class. + """ + ... + def gattc_discover_services(self, conn_handle, uuid=None, /) -> Incomplete: + """ + Query a connected server for its services. + + Optionally specify a service *uuid* to query for that service only. + + For each service discovered, the ``_IRQ_GATTC_SERVICE_RESULT`` event will + be raised, followed by ``_IRQ_GATTC_SERVICE_DONE`` on completion. + """ + ... + def gap_disconnect(self, conn_handle, /) -> bool: + """ + Disconnect the specified connection handle. This can either be a + central that has connected to this device (if acting as a peripheral) + or a peripheral that was previously connected to by this device (if acting + as a central). + + On success, the ``_IRQ_PERIPHERAL_DISCONNECT`` or ``_IRQ_CENTRAL_DISCONNECT`` + event will be raised. + + Returns ``False`` if the connection handle wasn't connected, and ``True`` + otherwise. + """ + ... + def gattc_discover_descriptors(self, conn_handle, start_handle, end_handle, /) -> Incomplete: + """ + Query a connected server for descriptors in the specified range. + + For each descriptor discovered, the ``_IRQ_GATTC_DESCRIPTOR_RESULT`` event + will be raised, followed by ``_IRQ_GATTC_DESCRIPTOR_DONE`` on completion. + """ + ... + def gattc_discover_characteristics(self, conn_handle, start_handle, end_handle, uuid=None, /) -> Incomplete: + """ + Query a connected server for characteristics in the specified range. + + Optionally specify a characteristic *uuid* to query for that + characteristic only. + + You can use ``start_handle=1``, ``end_handle=0xffff`` to search for a + characteristic in any service. + + For each characteristic discovered, the ``_IRQ_GATTC_CHARACTERISTIC_RESULT`` + event will be raised, followed by ``_IRQ_GATTC_CHARACTERISTIC_DONE`` on completion. + """ + ... + def gap_scan(self, duration_ms, interval_us=1280000, window_us=11250, active=False, /) -> Incomplete: + """ + Run a scan operation lasting for the specified duration (in **milli** seconds). + + To scan indefinitely, set *duration_ms* to ``0``. + + To stop scanning, set *duration_ms* to ``None``. + + Use *interval_us* and *window_us* to optionally configure the duty cycle. + The scanner will run for *window_us* **micro** seconds every *interval_us* + **micro** seconds for a total of *duration_ms* **milli** seconds. The default + interval and window are 1.28 seconds and 11.25 milliseconds respectively + (background scanning). + + For each scan result the ``_IRQ_SCAN_RESULT`` event will be raised, with event + data ``(addr_type, addr, adv_type, rssi, adv_data)``. + + ``addr_type`` values indicate public or random addresses: + * 0x00 - PUBLIC + * 0x01 - RANDOM (either static, RPA, or NRPA, the type is encoded in the address itself) + + ``adv_type`` values correspond to the Bluetooth Specification: + + * 0x00 - ADV_IND - connectable and scannable undirected advertising + * 0x01 - ADV_DIRECT_IND - connectable directed advertising + * 0x02 - ADV_SCAN_IND - scannable undirected advertising + * 0x03 - ADV_NONCONN_IND - non-connectable undirected advertising + * 0x04 - SCAN_RSP - scan response + + ``active`` can be set ``True`` if you want to receive scan responses in the results. + + When scanning is stopped (either due to the duration finishing or when + explicitly stopped), the ``_IRQ_SCAN_DONE`` event will be raised. + """ + ... + def __init__(self) -> None: ... diff --git a/.vscode/Pico-W-Stub/ucollections.pyi b/.vscode/Pico-W-Stub/ucollections.pyi new file mode 100644 index 0000000..b4597a6 --- /dev/null +++ b/.vscode/Pico-W-Stub/ucollections.pyi @@ -0,0 +1,105 @@ +""" +Collection and container types. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/collections.html + +CPython module: :mod:`python:collections` https://docs.python.org/3/library/collections.html . + +This module implements advanced collection and container types to +hold/accumulate various objects. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.collections import OrderedDict as stdlib_OrderedDict, deque as stdlib_deque +from typing_extensions import NamedTuple as stdlib_NamedTuple +from typing import Any, Optional + +def namedtuple(name, fields) -> stdlib_NamedTuple: + """ + This is factory function to create a new namedtuple type with a specific + name and set of fields. A namedtuple is a subclass of tuple which allows + to access its fields not just by numeric index, but also with an attribute + access syntax using symbolic field names. Fields is a sequence of strings + specifying field names. For compatibility with CPython it can also be a + a string with space-separated field named (but this is less efficient). + Example of use:: + + from collections import namedtuple + + MyTuple = namedtuple("MyTuple", ("id", "name")) + t1 = MyTuple(1, "foo") + t2 = MyTuple(2, "bar") + print(t1.name) + assert t2.name == t2[1] + """ + ... + +class OrderedDict(stdlib_OrderedDict): + """ + ``dict`` type subclass which remembers and preserves the order of keys + added. When ordered dict is iterated over, keys/items are returned in + the order they were added:: + + from collections import OrderedDict + + # To make benefit of ordered keys, OrderedDict should be initialized + # from sequence of (key, value) pairs. + d = OrderedDict([("z", 1), ("a", 2)]) + # More items can be added as usual + d["w"] = 5 + d["b"] = 3 + for k, v in d.items(): + print(k, v) + + Output:: + + z 1 + a 2 + w 5 + b 3 + """ + + def popitem(self, *args, **kwargs) -> Incomplete: ... + def pop(self, *args, **kwargs) -> Incomplete: ... + def values(self, *args, **kwargs) -> Incomplete: ... + def setdefault(self, *args, **kwargs) -> Incomplete: ... + def update(self, *args, **kwargs) -> Incomplete: ... + def copy(self, *args, **kwargs) -> Incomplete: ... + def clear(self, *args, **kwargs) -> Incomplete: ... + def keys(self, *args, **kwargs) -> Incomplete: ... + def get(self, *args, **kwargs) -> Incomplete: ... + def items(self, *args, **kwargs) -> Incomplete: ... + @classmethod + def fromkeys(cls, *args, **kwargs) -> Incomplete: ... + def __init__(self, *args, **kwargs) -> None: ... + +class deque(stdlib_deque): + """ + Deques (double-ended queues) are a list-like container that support O(1) + appends and pops from either side of the deque. New deques are created + using the following arguments: + + - *iterable* must be the empty tuple, and the new deque is created empty. + + - *maxlen* must be specified and the deque will be bounded to this + maximum length. Once the deque is full, any new items added will + discard items from the opposite end. + + - The optional *flags* can be 1 to check for overflow when adding items. + + As well as supporting `bool` and `len`, deque objects have the following + methods: + """ + + def popleft(self) -> Incomplete: + """ + Remove and return an item from the left side of the deque. + Raises IndexError if no items are present. + """ + ... + def append(self, x) -> Incomplete: + """ + Add *x* to the right side of the deque. + Raises IndexError if overflow checking is enabled and there is no more room left. + """ + ... + def __init__(self, iterable, maxlen, flags: Optional[Any] = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/ucryptolib.pyi b/.vscode/Pico-W-Stub/ucryptolib.pyi new file mode 100644 index 0000000..eeb2919 --- /dev/null +++ b/.vscode/Pico-W-Stub/ucryptolib.pyi @@ -0,0 +1,42 @@ +""" +Cryptographic ciphers. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/cryptolib.html +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +class aes: + def encrypt(self, in_buf, out_buf: Optional[Any] = None) -> Incomplete: + """ + Encrypt *in_buf*. If no *out_buf* is given result is returned as a + newly allocated `bytes` object. Otherwise, result is written into + mutable buffer *out_buf*. *in_buf* and *out_buf* can also refer + to the same mutable buffer, in which case data is encrypted in-place. + """ + ... + def decrypt(self, in_buf, out_buf: Optional[Any] = None) -> Incomplete: + """ + Like `encrypt()`, but for decryption. + """ + ... + def __init__(self, key, mode, IV: Optional[Any] = None) -> None: + """ + Initialize cipher object, suitable for encryption/decryption. Note: + after initialization, cipher object can be use only either for + encryption or decryption. Running decrypt() operation after encrypt() + or vice versa is not supported. + + Parameters are: + + * *key* is an encryption/decryption key (bytes-like). + * *mode* is: + + * ``1`` (or ``cryptolib.MODE_ECB`` if it exists) for Electronic Code Book (ECB). + * ``2`` (or ``cryptolib.MODE_CBC`` if it exists) for Cipher Block Chaining (CBC). + * ``6`` (or ``cryptolib.MODE_CTR`` if it exists) for Counter mode (CTR). + + * *IV* is an initialization vector for CBC mode. + * For Counter mode, *IV* is the initial value for the counter. + """ + ... diff --git a/.vscode/Pico-W-Stub/uctypes.pyi b/.vscode/Pico-W-Stub/uctypes.pyi new file mode 100644 index 0000000..c51909b --- /dev/null +++ b/.vscode/Pico-W-Stub/uctypes.pyi @@ -0,0 +1,88 @@ +""" +Access binary data in a structured way. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/uctypes.html + +This module implements "foreign data interface" for MicroPython. The idea +behind it is similar to CPython's ``ctypes`` modules, but the actual API is +different, streamlined and optimized for small size. The basic idea of the +module is to define data structure layout with about the same power as the +C language allows, and then access it using familiar dot-syntax to reference +sub-fields. +""" +from _typeshed import Incomplete, Incomplete as Incomplete + +VOID: int +NATIVE: int +PTR: int +SHORT: int +LONGLONG: int +INT8: int +LITTLE_ENDIAN: int +LONG: int +UINT: int +ULONG: int +ULONGLONG: int +USHORT: int +UINT8: int +UINT16: int +UINT32: int +UINT64: int +INT64: int +BFUINT16: int +BFUINT32: int +BFUINT8: int +BFINT8: int +ARRAY: int +BFINT16: int +BFINT32: int +BF_LEN: int +INT: int +INT16: int +INT32: int +FLOAT64: int +BF_POS: int +BIG_ENDIAN: int +FLOAT32: int + +def sizeof(struct, layout_type=NATIVE, /) -> int: + """ + Return size of data structure in bytes. The *struct* argument can be + either a structure class or a specific instantiated structure object + (or its aggregate field). + """ + ... + +def bytes_at(addr, size) -> bytes: + """ + Capture memory at the given address and size as bytes object. As bytes + object is immutable, memory is actually duplicated and copied into + bytes object, so if memory contents change later, created object + retains original value. + """ + ... + +def bytearray_at(addr, size) -> bytearray: + """ + Capture memory at the given address and size as bytearray object. + Unlike bytes_at() function above, memory is captured by reference, + so it can be both written too, and you will access current value + at the given memory address. + """ + ... + +def addressof(obj) -> int: + """ + Return address of an object. Argument should be bytes, bytearray or + other object supporting buffer protocol (and address of this buffer + is what actually returned). + """ + ... + +class struct: + """ + Instantiate a "foreign data structure" object based on structure address in + memory, descriptor (encoded as a dictionary), and layout type (see below). + """ + + def __init__(self, addr, descriptor, layout_type=NATIVE, /) -> None: ... diff --git a/.vscode/Pico-W-Stub/uerrno.pyi b/.vscode/Pico-W-Stub/uerrno.pyi new file mode 100644 index 0000000..b1c95b3 --- /dev/null +++ b/.vscode/Pico-W-Stub/uerrno.pyi @@ -0,0 +1,36 @@ +""" +System error codes. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/errno.html + +CPython module: :mod:`python:errno` https://docs.python.org/3/library/errno.html . + +This module provides access to symbolic error codes for `OSError` exception. +A particular inventory of codes depends on :term:`MicroPython port`. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Dict + +ENOBUFS: int +ENODEV: int +ENOENT: int +EISDIR: int +EIO: int +EINVAL: int +EPERM: int +ETIMEDOUT: int +ENOMEM: int +EOPNOTSUPP: int +ENOTCONN: int +errorcode: dict +EAGAIN: int +EALREADY: int +EBADF: int +EADDRINUSE: int +EACCES: int +EINPROGRESS: int +EEXIST: int +EHOSTUNREACH: int +ECONNABORTED: int +ECONNRESET: int +ECONNREFUSED: int diff --git a/.vscode/Pico-W-Stub/uhashlib.pyi b/.vscode/Pico-W-Stub/uhashlib.pyi new file mode 100644 index 0000000..11bff03 --- /dev/null +++ b/.vscode/Pico-W-Stub/uhashlib.pyi @@ -0,0 +1,45 @@ +""" +Hashing algorithms. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/hashlib.html + +CPython module: :mod:`python:hashlib` https://docs.python.org/3/library/hashlib.html . + +This module implements binary data hashing algorithms. The exact inventory +of available algorithms depends on a board. Among the algorithms which may +be implemented: + +* SHA256 - The current generation, modern hashing algorithm (of SHA2 series). + It is suitable for cryptographically-secure purposes. Included in the + MicroPython core and any board is recommended to provide this, unless + it has particular code size constraints. + +* SHA1 - A previous generation algorithm. Not recommended for new usages, + but SHA1 is a part of number of Internet standards and existing + applications, so boards targeting network connectivity and + interoperability will try to provide this. + +* MD5 - A legacy algorithm, not considered cryptographically secure. Only + selected boards, targeting interoperability with legacy applications, + will offer this. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +class sha256: + """ + Create an SHA256 hasher object and optionally feed ``data`` into it. + """ + + def digest(self, *args, **kwargs) -> Incomplete: ... + def update(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, data: Optional[Any] = None) -> None: ... + +class sha1: + """ + Create an SHA1 hasher object and optionally feed ``data`` into it. + """ + + def digest(self, *args, **kwargs) -> Incomplete: ... + def update(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, data: Optional[Any] = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/uheapq.pyi b/.vscode/Pico-W-Stub/uheapq.pyi new file mode 100644 index 0000000..0ae5364 --- /dev/null +++ b/.vscode/Pico-W-Stub/uheapq.pyi @@ -0,0 +1,35 @@ +""" +Heap queue algorithm. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/heapq.html + +CPython module: :mod:`python:heapq` https://docs.python.org/3/library/heapq.html . + +This module implements the +`min heap queue algorithm `_. + +A heap queue is essentially a list that has its elements stored in such a way +that the first item of the list is always the smallest. +""" +from _typeshed import Incomplete, Incomplete as Incomplete + +def heappop(heap) -> Incomplete: + """ + Pop the first item from the ``heap``, and return it. Raise ``IndexError`` if + ``heap`` is empty. + + The returned item will be the smallest item in the ``heap``. + """ + ... + +def heappush(heap, item) -> Incomplete: + """ + Push the ``item`` onto the ``heap``. + """ + ... + +def heapify(x) -> Incomplete: + """ + Convert the list ``x`` into a heap. This is an in-place operation. + """ + ... diff --git a/.vscode/Pico-W-Stub/uio.pyi b/.vscode/Pico-W-Stub/uio.pyi new file mode 100644 index 0000000..343844d --- /dev/null +++ b/.vscode/Pico-W-Stub/uio.pyi @@ -0,0 +1,64 @@ +""" +Input/output streams. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/io.html + +CPython module: :mod:`python:io` https://docs.python.org/3/library/io.html . + +This module contains additional types of `stream` (file-like) objects +and helper functions. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.io import * +from typing import Any, IO, Optional + +def open(name, mode="r", **kwargs) -> Incomplete: + """ + Open a file. Builtin ``open()`` function is aliased to this function. + All ports (which provide access to file system) are required to support + *mode* parameter, but support for other arguments vary by port. + """ + ... + +class IOBase: + def __init__(self, *argv, **kwargs) -> None: ... + +class StringIO(IO): + def write(self, *args, **kwargs) -> Incomplete: ... + def flush(self, *args, **kwargs) -> Incomplete: ... + def getvalue(self, *args, **kwargs) -> Incomplete: ... + def seek(self, *args, **kwargs) -> Incomplete: ... + def tell(self, *args, **kwargs) -> Incomplete: ... + def readline(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, string: Optional[Any] = None) -> None: ... + +class BytesIO(IO): + """ + In-memory file-like objects for input/output. `StringIO` is used for + text-mode I/O (similar to a normal file opened with "t" modifier). + `BytesIO` is used for binary-mode I/O (similar to a normal file + opened with "b" modifier). Initial contents of file-like objects + can be specified with *string* parameter (should be normal string + for `StringIO` or bytes object for `BytesIO`). All the usual file + methods like ``read()``, ``write()``, ``seek()``, ``flush()``, + ``close()`` are available on these objects, and additionally, a + following method: + """ + + def write(self, *args, **kwargs) -> Incomplete: ... + def flush(self, *args, **kwargs) -> Incomplete: ... + def getvalue(self) -> Incomplete: + """ + Get the current contents of the underlying buffer which holds data. + """ + ... + def seek(self, *args, **kwargs) -> Incomplete: ... + def tell(self, *args, **kwargs) -> Incomplete: ... + def readline(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, string: Optional[Any] = None) -> None: ... diff --git a/.vscode/Pico-W-Stub/ujson.pyi b/.vscode/Pico-W-Stub/ujson.pyi new file mode 100644 index 0000000..f77f758 --- /dev/null +++ b/.vscode/Pico-W-Stub/ujson.pyi @@ -0,0 +1,47 @@ +""" +JSON encoding and decoding. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/json.html + +CPython module: :mod:`python:json` https://docs.python.org/3/library/json.html . + +This modules allows to convert between Python objects and the JSON +data format. +""" +from _typeshed import Incomplete, Incomplete as Incomplete + +def loads(str) -> Incomplete: + """ + Parse the JSON *str* and return an object. Raises :exc:`ValueError` if the + string is not correctly formed. + """ + ... + +def load(stream) -> Incomplete: + """ + Parse the given *stream*, interpreting it as a JSON string and + deserialising the data to a Python object. The resulting object is + returned. + + Parsing continues until end-of-file is encountered. + A :exc:`ValueError` is raised if the data in *stream* is not correctly formed. + """ + ... + +def dumps(obj, separators=None) -> str: + """ + Return *obj* represented as a JSON string. + + The arguments have the same meaning as in `dump`. + """ + ... + +def dump(obj, stream, separators=None) -> Incomplete: + """ + Serialise *obj* to a JSON string, writing it to the given *stream*. + + If specified, separators should be an ``(item_separator, key_separator)`` + tuple. The default is ``(', ', ': ')``. To get the most compact JSON + representation, you should specify ``(',', ':')`` to eliminate whitespace. + """ + ... diff --git a/.vscode/Pico-W-Stub/umachine.pyi b/.vscode/Pico-W-Stub/umachine.pyi new file mode 100644 index 0000000..66b5aa7 --- /dev/null +++ b/.vscode/Pico-W-Stub/umachine.pyi @@ -0,0 +1,1193 @@ +""" +Functions related to the hardware. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/machine.html + +The ``machine`` module contains specific functions related to the hardware +on a particular board. Most functions in this module allow to achieve direct +and unrestricted access to and control of hardware blocks on a system +(like CPU, timers, buses, etc.). Used incorrectly, this can lead to +malfunction, lockups, crashes of your board, and in extreme cases, hardware +damage. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Callable, List, NoReturn, Optional, Tuple, Union + +WDT_RESET: int +PWRON_RESET: int + +def dht_readinto(*args, **kwargs) -> Incomplete: ... +def enable_irq(state) -> Incomplete: + """ + Re-enable interrupt requests. + The *state* parameter should be the value that was returned from the most + recent call to the `disable_irq()` function. + """ + ... + +def disable_irq() -> Incomplete: + """ + Disable interrupt requests. + Returns the previous IRQ state which should be considered an opaque value. + This return value should be passed to the `enable_irq()` function to restore + interrupts to their original state, before `disable_irq()` was called. + """ + ... + +def bitstream(pin, encoding, timing, data, /) -> Incomplete: + """ + Transmits *data* by bit-banging the specified *pin*. The *encoding* argument + specifies how the bits are encoded, and *timing* is an encoding-specific timing + specification. + + The supported encodings are: + + - ``0`` for "high low" pulse duration modulation. This will transmit 0 and + 1 bits as timed pulses, starting with the most significant bit. + The *timing* must be a four-tuple of nanoseconds in the format + ``(high_time_0, low_time_0, high_time_1, low_time_1)``. For example, + ``(400, 850, 800, 450)`` is the timing specification for WS2812 RGB LEDs + at 800kHz. + + The accuracy of the timing varies between ports. On Cortex M0 at 48MHz, it is + at best +/- 120ns, however on faster MCUs (ESP8266, ESP32, STM32, Pyboard), it + will be closer to +/-30ns. + + ``Note:`` For controlling WS2812 / NeoPixel strips, see the :mod:`neopixel` + module for a higher-level API. + """ + ... + +def deepsleep(time_ms: Optional[Any] = None) -> NoReturn: + """ + Stops execution in an attempt to enter a low power state. + + If *time_ms* is specified then this will be the maximum time in milliseconds that + the sleep will last for. Otherwise the sleep can last indefinitely. + + With or without a timeout, execution may resume at any time if there are events + that require processing. Such events, or wake sources, should be configured before + sleeping, like `Pin` change or `RTC` timeout. + + The precise behaviour and power-saving capabilities of lightsleep and deepsleep is + highly dependent on the underlying hardware, but the general properties are: + + * A lightsleep has full RAM and state retention. Upon wake execution is resumed + from the point where the sleep was requested, with all subsystems operational. + + * A deepsleep may not retain RAM or any other state of the system (for example + peripherals or network interfaces). Upon wake execution is resumed from the main + script, similar to a hard or power-on reset. The `reset_cause()` function will + return `machine.DEEPSLEEP` and this can be used to distinguish a deepsleep wake + from other resets. + """ + ... + +def bootloader(value: Optional[Any] = None) -> None: + """ + Reset the device and enter its bootloader. This is typically used to put the + device into a state where it can be programmed with new firmware. + + Some ports support passing in an optional *value* argument which can control + which bootloader to enter, what to pass to it, or other things. + """ + ... + +def soft_reset() -> NoReturn: + """ + Performs a soft reset of the interpreter, deleting all Python objects and + resetting the Python heap. It tries to retain the method by which the user + is connected to the MicroPython REPL (eg serial, USB, Wifi). + """ + ... + +def reset() -> NoReturn: + """ + Resets the device in a manner similar to pushing the external RESET + button. + """ + ... + +def freq(hz: Optional[Any] = None) -> Incomplete: + """ + Returns the CPU frequency in hertz. + + On some ports this can also be used to set the CPU frequency by passing in *hz*. + """ + ... + +def reset_cause() -> int: + """ + Get the reset cause. See :ref:`constants ` for the possible return values. + """ + ... + +def idle() -> Incomplete: + """ + Gates the clock to the CPU, useful to reduce power consumption at any time during + short or long periods. Peripherals continue working and execution resumes as soon + as any interrupt is triggered (on many ports this includes system timer + interrupt occurring at regular intervals on the order of millisecond). + """ + ... + +def time_pulse_us(pin, pulse_level, timeout_us=1000000, /) -> int: + """ + Time a pulse on the given *pin*, and return the duration of the pulse in + microseconds. The *pulse_level* argument should be 0 to time a low pulse + or 1 to time a high pulse. + + If the current input value of the pin is different to *pulse_level*, + the function first (*) waits until the pin input becomes equal to *pulse_level*, + then (**) times the duration that the pin is equal to *pulse_level*. + If the pin is already equal to *pulse_level* then timing starts straight away. + + The function will return -2 if there was timeout waiting for condition marked + (*) above, and -1 if there was timeout during the main measurement, marked (**) + above. The timeout is the same for both cases and given by *timeout_us* (which + is in microseconds). + """ + ... + +def lightsleep(time_ms: Optional[Any] = None) -> Incomplete: + """ + Stops execution in an attempt to enter a low power state. + + If *time_ms* is specified then this will be the maximum time in milliseconds that + the sleep will last for. Otherwise the sleep can last indefinitely. + + With or without a timeout, execution may resume at any time if there are events + that require processing. Such events, or wake sources, should be configured before + sleeping, like `Pin` change or `RTC` timeout. + + The precise behaviour and power-saving capabilities of lightsleep and deepsleep is + highly dependent on the underlying hardware, but the general properties are: + + * A lightsleep has full RAM and state retention. Upon wake execution is resumed + from the point where the sleep was requested, with all subsystems operational. + + * A deepsleep may not retain RAM or any other state of the system (for example + peripherals or network interfaces). Upon wake execution is resumed from the main + script, similar to a hard or power-on reset. The `reset_cause()` function will + return `machine.DEEPSLEEP` and this can be used to distinguish a deepsleep wake + from other resets. + """ + ... + +def unique_id() -> bytes: + """ + Returns a byte string with a unique identifier of a board/SoC. It will vary + from a board/SoC instance to another, if underlying hardware allows. Length + varies by hardware (so use substring of a full value if you expect a short + ID). In some MicroPython ports, ID corresponds to the network MAC address. + """ + ... + +class WDT: + """ + Create a WDT object and start it. The timeout must be given in milliseconds. + Once it is running the timeout cannot be changed and the WDT cannot be stopped either. + + Notes: On the esp8266 a timeout cannot be specified, it is determined by the underlying system. + On rp2040 devices, the maximum timeout is 8388 ms. + """ + + def feed(self) -> None: + """ + Feed the WDT to prevent it from resetting the system. The application + should place this call in a sensible place ensuring that the WDT is + only fed after verifying that everything is functioning correctly. + """ + ... + def __init__(self, id=0, timeout=5000) -> None: ... + +mem8: Incomplete +mem32: Incomplete +mem16: Incomplete + +class PWM: + """ + Construct and return a new PWM object using the following parameters: + + - *dest* is the entity on which the PWM is output, which is usually a + :ref:`machine.Pin ` object, but a port may allow other values, + like integers. + - *freq* should be an integer which sets the frequency in Hz for the + PWM cycle. + - *duty_u16* sets the duty cycle as a ratio ``duty_u16 / 65535``. + - *duty_ns* sets the pulse width in nanoseconds. + - *invert* inverts the respective output if the value is True + + Setting *freq* may affect other PWM objects if the objects share the same + underlying PWM generator (this is hardware specific). + Only one of *duty_u16* and *duty_ns* should be specified at a time. + *invert* is not available at all ports. + """ + + def duty_u16(self, value: Optional[Any] = None) -> int: + """ + Get or set the current duty cycle of the PWM output, as an unsigned 16-bit + value in the range 0 to 65535 inclusive. + + With no arguments the duty cycle is returned. + + With a single *value* argument the duty cycle is set to that value, measured + as the ratio ``value / 65535``. + """ + ... + def freq(self, value: Optional[Any] = None) -> Incomplete: + """ + Get or set the current frequency of the PWM output. + + With no arguments the frequency in Hz is returned. + + With a single *value* argument the frequency is set to that value in Hz. The + method may raise a ``ValueError`` if the frequency is outside the valid range. + """ + ... + def init(self, *, freq, duty_u16, duty_ns) -> None: + """ + Modify settings for the PWM object. See the above constructor for details + about the parameters. + """ + ... + def duty_ns(self, value: Optional[Any] = None) -> int: + """ + Get or set the current pulse width of the PWM output, as a value in nanoseconds. + + With no arguments the pulse width in nanoseconds is returned. + + With a single *value* argument the pulse width is set to that value. + """ + ... + def deinit(self) -> None: + """ + Disable the PWM output. + """ + ... + def __init__(self, dest, *, freq=0, duty=0, duty_u16=0, duty_ns=0, invert=False) -> None: ... + +class ADC: + """ + Access the ADC associated with a source identified by *id*. This + *id* may be an integer (usually specifying a channel number), a + :ref:`Pin ` object, or other value supported by the + underlying machine. + + If additional keyword-arguments are given then they will configure + various aspects of the ADC. If not given, these settings will take + previous or default values. The settings are: + + - *sample_ns* is the sampling time in nanoseconds. + + - *atten* specifies the input attenuation. + """ + + CORE_TEMP: int + def read_u16(self) -> int: + """ + Take an analog reading and return an integer in the range 0-65535. + The return value represents the raw reading taken by the ADC, scaled + such that the minimum value is 0 and the maximum value is 65535. + """ + ... + def __init__(self, id, *, sample_ns: Optional[int] = 0, atten: Optional[int] = ATTN_0DB) -> None: ... + +class I2C: + """ + Construct and return a new I2C object using the following parameters: + + - *id* identifies a particular I2C peripheral. Allowed values for + depend on the particular port/board + - *scl* should be a pin object specifying the pin to use for SCL. + - *sda* should be a pin object specifying the pin to use for SDA. + - *freq* should be an integer which sets the maximum frequency + for SCL. + - *timeout* is the maximum time in microseconds to allow for I2C + transactions. This parameter is not allowed on some ports. + + Note that some ports/boards will have default values of *scl* and *sda* + that can be changed in this constructor. Others will have fixed values + of *scl* and *sda* that cannot be changed. + """ + + def readfrom_mem_into(self, addr, memaddr, buf, *, addrsize=8) -> None: + """ + Read into *buf* from the peripheral specified by *addr* starting from the + memory address specified by *memaddr*. The number of bytes read is the + length of *buf*. + The argument *addrsize* specifies the address size in bits (on ESP8266 + this argument is not recognised and the address size is always 8 bits). + + The method returns ``None``. + """ + ... + def readfrom_into(self, addr, buf, stop=True, /) -> None: + """ + Read into *buf* from the peripheral specified by *addr*. + The number of bytes read will be the length of *buf*. + If *stop* is true then a STOP condition is generated at the end of the transfer. + + The method returns ``None``. + """ + ... + def readfrom_mem(self, addr, memaddr, nbytes, *, addrsize=8) -> bytes: + """ + Read *nbytes* from the peripheral specified by *addr* starting from the memory + address specified by *memaddr*. + The argument *addrsize* specifies the address size in bits. + Returns a `bytes` object with the data read. + """ + ... + def writeto_mem(self, addr, memaddr, buf, *, addrsize=8) -> None: + """ + Write *buf* to the peripheral specified by *addr* starting from the + memory address specified by *memaddr*. + The argument *addrsize* specifies the address size in bits (on ESP8266 + this argument is not recognised and the address size is always 8 bits). + + The method returns ``None``. + """ + ... + def scan(self) -> List: + """ + Scan all I2C addresses between 0x08 and 0x77 inclusive and return a list of + those that respond. A device responds if it pulls the SDA line low after + its address (including a write bit) is sent on the bus. + """ + ... + def writeto(self, addr, buf, stop=True, /) -> int: + """ + Write the bytes from *buf* to the peripheral specified by *addr*. If a + NACK is received following the write of a byte from *buf* then the + remaining bytes are not sent. If *stop* is true then a STOP condition is + generated at the end of the transfer, even if a NACK is received. + The function returns the number of ACKs that were received. + """ + ... + def writevto(self, addr, vector, stop=True, /) -> int: + """ + Write the bytes contained in *vector* to the peripheral specified by *addr*. + *vector* should be a tuple or list of objects with the buffer protocol. + The *addr* is sent once and then the bytes from each object in *vector* + are written out sequentially. The objects in *vector* may be zero bytes + in length in which case they don't contribute to the output. + + If a NACK is received following the write of a byte from one of the + objects in *vector* then the remaining bytes, and any remaining objects, + are not sent. If *stop* is true then a STOP condition is generated at + the end of the transfer, even if a NACK is received. The function + returns the number of ACKs that were received. + """ + ... + def start(self) -> None: + """ + Generate a START condition on the bus (SDA transitions to low while SCL is high). + """ + ... + def readfrom(self, addr, nbytes, stop=True, /) -> bytes: + """ + Read *nbytes* from the peripheral specified by *addr*. + If *stop* is true then a STOP condition is generated at the end of the transfer. + Returns a `bytes` object with the data read. + """ + ... + def readinto(self, buf, nack=True, /) -> Incomplete: + """ + Reads bytes from the bus and stores them into *buf*. The number of bytes + read is the length of *buf*. An ACK will be sent on the bus after + receiving all but the last byte. After the last byte is received, if *nack* + is true then a NACK will be sent, otherwise an ACK will be sent (and in this + case the peripheral assumes more bytes are going to be read in a later call). + """ + ... + def init(self, scl, sda, *, freq=400000) -> None: + """ + Initialise the I2C bus with the given arguments: + + - *scl* is a pin object for the SCL line + - *sda* is a pin object for the SDA line + - *freq* is the SCL clock rate + + In the case of hardware I2C the actual clock frequency may be lower than the + requested frequency. This is dependent on the platform hardware. The actual + rate may be determined by printing the I2C object. + """ + ... + def stop(self) -> None: + """ + Generate a STOP condition on the bus (SDA transitions to high while SCL is high). + """ + ... + def write(self, buf) -> int: + """ + Write the bytes from *buf* to the bus. Checks that an ACK is received + after each byte and stops transmitting the remaining bytes if a NACK is + received. The function returns the number of ACKs that were received. + """ + ... + def __init__( + self, + id: Union[int, str] = -1, + *, + scl: Optional[Union[Pin, str]] = None, + sda: Optional[Union[Pin, str]] = None, + freq=400_000, + timeout=50000, + ) -> None: ... + +class I2S: + """ + Construct an I2S object of the given id: + + - ``id`` identifies a particular I2S bus; it is board and port specific + + Keyword-only parameters that are supported on all ports: + + - ``sck`` is a pin object for the serial clock line + - ``ws`` is a pin object for the word select line + - ``sd`` is a pin object for the serial data line + - ``mck`` is a pin object for the master clock line; + master clock frequency is sampling rate * 256 + - ``mode`` specifies receive or transmit + - ``bits`` specifies sample size (bits), 16 or 32 + - ``format`` specifies channel format, STEREO or MONO + - ``rate`` specifies audio sampling rate (Hz); + this is the frequency of the ``ws`` signal + - ``ibuf`` specifies internal buffer length (bytes) + + For all ports, DMA runs continuously in the background and allows user applications to perform other operations while + sample data is transferred between the internal buffer and the I2S peripheral unit. + Increasing the size of the internal buffer has the potential to increase the time that user applications can perform non-I2S operations + before underflow (e.g. ``write`` method) or overflow (e.g. ``readinto`` method). + """ + + RX: int + MONO: int + STEREO: int + TX: int + @staticmethod + def shift(*, buf, bits, shift) -> Incomplete: + """ + bitwise shift of all samples contained in ``buf``. ``bits`` specifies sample size in bits. ``shift`` specifies the number of bits to shift each sample. + Positive for left shift, negative for right shift. + Typically used for volume control. Each bit shift changes sample volume by 6dB. + """ + ... + def init(self, sck, *args, **kwargs) -> Incomplete: + """ + see Constructor for argument descriptions + """ + ... + def irq(self, handler) -> Incomplete: + """ + Set a callback. ``handler`` is called when ``buf`` is emptied (``write`` method) or becomes full (``readinto`` method). + Setting a callback changes the ``write`` and ``readinto`` methods to non-blocking operation. + ``handler`` is called in the context of the MicroPython scheduler. + """ + ... + def readinto(self, buf) -> int: + """ + Read audio samples into the buffer specified by ``buf``. ``buf`` must support the buffer protocol, such as bytearray or array. + "buf" byte ordering is little-endian. For Stereo format, left channel sample precedes right channel sample. For Mono format, + the left channel sample data is used. + Returns number of bytes read + """ + ... + def deinit(self) -> Incomplete: + """ + Deinitialize the I2S bus + """ + ... + def write(self, buf) -> int: + """ + Write audio samples contained in ``buf``. ``buf`` must support the buffer protocol, such as bytearray or array. + "buf" byte ordering is little-endian. For Stereo format, left channel sample precedes right channel sample. For Mono format, + the sample data is written to both the right and left channels. + Returns number of bytes written + """ + ... + def __init__(self, id, *, sck, ws, sd, mck=None, mode, bits, format, rate, ibuf) -> None: ... + +class Pin: + """ + Access the pin peripheral (GPIO pin) associated with the given ``id``. If + additional arguments are given in the constructor then they are used to initialise + the pin. Any settings that are not specified will remain in their previous state. + + The arguments are: + + - ``id`` is mandatory and can be an arbitrary object. Among possible value + types are: int (an internal Pin identifier), str (a Pin name), and tuple + (pair of [port, pin]). + + - ``mode`` specifies the pin mode, which can be one of: + + - ``Pin.IN`` - Pin is configured for input. If viewed as an output the pin + is in high-impedance state. + + - ``Pin.OUT`` - Pin is configured for (normal) output. + + - ``Pin.OPEN_DRAIN`` - Pin is configured for open-drain output. Open-drain + output works in the following way: if the output value is set to 0 the pin + is active at a low level; if the output value is 1 the pin is in a high-impedance + state. Not all ports implement this mode, or some might only on certain pins. + + - ``Pin.ALT`` - Pin is configured to perform an alternative function, which is + port specific. For a pin configured in such a way any other Pin methods + (except :meth:`Pin.init`) are not applicable (calling them will lead to undefined, + or a hardware-specific, result). Not all ports implement this mode. + + - ``Pin.ALT_OPEN_DRAIN`` - The Same as ``Pin.ALT``, but the pin is configured as + open-drain. Not all ports implement this mode. + + - ``Pin.ANALOG`` - Pin is configured for analog input, see the :class:`ADC` class. + + - ``pull`` specifies if the pin has a (weak) pull resistor attached, and can be + one of: + + - ``None`` - No pull up or down resistor. + - ``Pin.PULL_UP`` - Pull up resistor enabled. + - ``Pin.PULL_DOWN`` - Pull down resistor enabled. + + - ``value`` is valid only for Pin.OUT and Pin.OPEN_DRAIN modes and specifies initial + output pin value if given, otherwise the state of the pin peripheral remains + unchanged. + + - ``drive`` specifies the output power of the pin and can be one of: ``Pin.DRIVE_0``, + ``Pin.DRIVE_1``, etc., increasing in drive strength. The actual current driving + capabilities are port dependent. Not all ports implement this argument. + + - ``alt`` specifies an alternate function for the pin and the values it can take are + port dependent. This argument is valid only for ``Pin.ALT`` and ``Pin.ALT_OPEN_DRAIN`` + modes. It may be used when a pin supports more than one alternate function. If only + one pin alternate function is supported the this argument is not required. Not all + ports implement this argument. + + As specified above, the Pin class allows to set an alternate function for a particular + pin, but it does not specify any further operations on such a pin. Pins configured in + alternate-function mode are usually not used as GPIO but are instead driven by other + hardware peripherals. The only operation supported on such a pin is re-initialising, + by calling the constructor or :meth:`Pin.init` method. If a pin that is configured in + alternate-function mode is re-initialised with ``Pin.IN``, ``Pin.OUT``, or + ``Pin.OPEN_DRAIN``, the alternate function will be removed from the pin. + """ + + ALT_SPI: int + IN: int + ALT_USB: int + ALT_UART: int + IRQ_FALLING: int + OUT: int + OPEN_DRAIN: int + IRQ_RISING: int + PULL_DOWN: int + ALT_SIO: int + ALT_GPCK: int + ALT: int + PULL_UP: int + ALT_I2C: int + ALT_PWM: int + ALT_PIO1: int + ALT_PIO0: int + def low(self) -> None: + """ + Set pin to "0" output level. + + Availability: nrf, rp2, stm32 ports. + """ + ... + def irq(self, handler=None, trigger=IRQ_FALLING, *, priority=1, wake=None, hard=False) -> Callable[..., Incomplete]: + """ + Configure an interrupt handler to be called when the trigger source of the + pin is active. If the pin mode is ``Pin.IN`` then the trigger source is + the external value on the pin. If the pin mode is ``Pin.OUT`` then the + trigger source is the output buffer of the pin. Otherwise, if the pin mode + is ``Pin.OPEN_DRAIN`` then the trigger source is the output buffer for + state '0' and the external pin value for state '1'. + + The arguments are: + + - ``handler`` is an optional function to be called when the interrupt + triggers. The handler must take exactly one argument which is the + ``Pin`` instance. + + - ``trigger`` configures the event which can generate an interrupt. + Possible values are: + + - ``Pin.IRQ_FALLING`` interrupt on falling edge. + - ``Pin.IRQ_RISING`` interrupt on rising edge. + - ``Pin.IRQ_LOW_LEVEL`` interrupt on low level. + - ``Pin.IRQ_HIGH_LEVEL`` interrupt on high level. + + These values can be OR'ed together to trigger on multiple events. + + - ``priority`` sets the priority level of the interrupt. The values it + can take are port-specific, but higher values always represent higher + priorities. + + - ``wake`` selects the power mode in which this interrupt can wake up the + system. It can be ``machine.IDLE``, ``machine.SLEEP`` or ``machine.DEEPSLEEP``. + These values can also be OR'ed together to make a pin generate interrupts in + more than one power mode. + + - ``hard`` if true a hardware interrupt is used. This reduces the delay + between the pin change and the handler being called. Hard interrupt + handlers may not allocate memory; see :ref:`isr_rules`. + Not all ports support this argument. + + This method returns a callback object. + + The following methods are not part of the core Pin API and only implemented on certain ports. + """ + ... + def toggle(self, *args, **kwargs) -> Incomplete: ... + def off(self) -> None: + """ + Set pin to "0" output level. + """ + ... + def on(self) -> None: + """ + Set pin to "1" output level. + """ + ... + def init(self, mode=-1, pull=-1, *, value=None, drive=0, alt=-1) -> None: + """ + Re-initialise the pin using the given parameters. Only those arguments that + are specified will be set. The rest of the pin peripheral state will remain + unchanged. See the constructor documentation for details of the arguments. + + Returns ``None``. + """ + ... + def value(self, x: Optional[Any] = None) -> int: + """ + This method allows to set and get the value of the pin, depending on whether + the argument ``x`` is supplied or not. + + If the argument is omitted then this method gets the digital logic level of + the pin, returning 0 or 1 corresponding to low and high voltage signals + respectively. The behaviour of this method depends on the mode of the pin: + + - ``Pin.IN`` - The method returns the actual input value currently present + on the pin. + - ``Pin.OUT`` - The behaviour and return value of the method is undefined. + - ``Pin.OPEN_DRAIN`` - If the pin is in state '0' then the behaviour and + return value of the method is undefined. Otherwise, if the pin is in + state '1', the method returns the actual input value currently present + on the pin. + + If the argument is supplied then this method sets the digital logic level of + the pin. The argument ``x`` can be anything that converts to a boolean. + If it converts to ``True``, the pin is set to state '1', otherwise it is set + to state '0'. The behaviour of this method depends on the mode of the pin: + + - ``Pin.IN`` - The value is stored in the output buffer for the pin. The + pin state does not change, it remains in the high-impedance state. The + stored value will become active on the pin as soon as it is changed to + ``Pin.OUT`` or ``Pin.OPEN_DRAIN`` mode. + - ``Pin.OUT`` - The output buffer is set to the given value immediately. + - ``Pin.OPEN_DRAIN`` - If the value is '0' the pin is set to a low voltage + state. Otherwise the pin is set to high-impedance state. + + When setting the value this method returns ``None``. + """ + ... + def high(self) -> None: + """ + Set pin to "1" output level. + + Availability: nrf, rp2, stm32 ports. + """ + ... + + class cpu: + GPIO20: Incomplete + GPIO25: Incomplete + GPIO26: Incomplete + GPIO27: Incomplete + GPIO24: Incomplete + GPIO21: Incomplete + GPIO22: Incomplete + GPIO23: Incomplete + GPIO28: Incomplete + GPIO6: Incomplete + GPIO7: Incomplete + GPIO8: Incomplete + GPIO5: Incomplete + GPIO29: Incomplete + GPIO3: Incomplete + GPIO4: Incomplete + GPIO9: Incomplete + GPIO2: Incomplete + GPIO1: Incomplete + GPIO10: Incomplete + GPIO11: Incomplete + GPIO0: Incomplete + EXT_GPIO0: Incomplete + EXT_GPIO1: Incomplete + EXT_GPIO2: Incomplete + GPIO12: Incomplete + GPIO17: Incomplete + GPIO18: Incomplete + GPIO19: Incomplete + GPIO16: Incomplete + GPIO13: Incomplete + GPIO14: Incomplete + GPIO15: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + + class board: + GP3: Incomplete + GP28: Incomplete + GP4: Incomplete + GP5: Incomplete + GP22: Incomplete + GP27: Incomplete + GP26: Incomplete + WL_GPIO2: Incomplete + WL_GPIO0: Incomplete + LED: Incomplete + WL_GPIO1: Incomplete + GP6: Incomplete + GP7: Incomplete + GP9: Incomplete + GP8: Incomplete + GP12: Incomplete + GP11: Incomplete + GP13: Incomplete + GP14: Incomplete + GP0: Incomplete + GP10: Incomplete + GP1: Incomplete + GP21: Incomplete + GP2: Incomplete + GP19: Incomplete + GP20: Incomplete + GP15: Incomplete + GP16: Incomplete + GP18: Incomplete + GP17: Incomplete + def __init__(self, *argv, **kwargs) -> None: ... + + def __init__(self, id, mode=-1, pull=-1, *, value=None, drive=0, alt=-1) -> None: ... + def __call__(self, x: Optional[Any] = None) -> Incomplete: + """ + Pin objects are callable. The call method provides a (fast) shortcut to set + and get the value of the pin. It is equivalent to Pin.value([x]). + See :meth:`Pin.value` for more details. + """ + ... + +class SoftSPI: + """ + Construct a new software SPI object. Additional parameters must be + given, usually at least *sck*, *mosi* and *miso*, and these are used + to initialise the bus. See `SPI.init` for a description of the parameters. + """ + + LSB: int + MSB: int + def deinit(self, *args, **kwargs) -> Incomplete: ... + def init(self, *args, **kwargs) -> Incomplete: ... + def write_readinto(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, baudrate=500000, *, polarity=0, phase=0, bits=8, firstbit=MSB, sck=None, mosi=None, miso=None) -> None: ... + +class Timer: + """ + Construct a new timer object of the given ``id``. ``id`` of -1 constructs a + virtual timer (if supported by a board). + ``id`` shall not be passed as a keyword argument. + + See ``init`` for parameters of initialisation. + """ + + PERIODIC: int + ONE_SHOT: int + def init(self, *, mode=PERIODIC, freq=-1, period=-1, callback=None) -> None: + """ + Initialise the timer. Example:: + + def mycallback(t): + pass + + # periodic at 1kHz + tim.init(mode=Timer.PERIODIC, freq=1000, callback=mycallback) + + # periodic with 100ms period + tim.init(period=100, callback=mycallback) + + # one shot firing after 1000ms + tim.init(mode=Timer.ONE_SHOT, period=1000, callback=mycallback) + + Keyword arguments: + + - ``mode`` can be one of: + + - ``Timer.ONE_SHOT`` - The timer runs once until the configured + period of the channel expires. + - ``Timer.PERIODIC`` - The timer runs periodically at the configured + frequency of the channel. + + - ``freq`` - The timer frequency, in units of Hz. The upper bound of + the frequency is dependent on the port. When both the ``freq`` and + ``period`` arguments are given, ``freq`` has a higher priority and + ``period`` is ignored. + + - ``period`` - The timer period, in milliseconds. + + - ``callback`` - The callable to call upon expiration of the timer period. + The callback must take one argument, which is passed the Timer object. + The ``callback`` argument shall be specified. Otherwise an exception + will occur upon timer expiration: + ``TypeError: 'NoneType' object isn't callable`` + """ + ... + def deinit(self) -> None: + """ + Deinitialises the timer. Stops the timer, and disables the timer peripheral. + """ + ... + def __init__(self, id=-1, *args, **kwargs) -> None: ... + +class UART: + """ + Construct a UART object of the given id. + """ + + INV_TX: int + RTS: int + CTS: int + INV_RX: int + def deinit(self) -> None: + """ + Turn off the UART bus. + + .. note:: + You will not be able to call ``init()`` on the object after ``deinit()``. + A new instance needs to be created in that case. + """ + ... + def sendbreak(self) -> None: + """ + Send a break condition on the bus. This drives the bus low for a duration + longer than required for a normal transmission of a character. + """ + ... + def init(self, baudrate=9600, bits=8, parity=None, stop=1, *args, **kwargs) -> None: + """ + Initialise the UART bus with the given parameters: + + - *baudrate* is the clock rate. + - *bits* is the number of bits per character, 7, 8 or 9. + - *parity* is the parity, ``None``, 0 (even) or 1 (odd). + - *stop* is the number of stop bits, 1 or 2. + + Additional keyword-only parameters that may be supported by a port are: + + - *tx* specifies the TX pin to use. + - *rx* specifies the RX pin to use. + - *rts* specifies the RTS (output) pin to use for hardware receive flow control. + - *cts* specifies the CTS (input) pin to use for hardware transmit flow control. + - *txbuf* specifies the length in characters of the TX buffer. + - *rxbuf* specifies the length in characters of the RX buffer. + - *timeout* specifies the time to wait for the first character (in ms). + - *timeout_char* specifies the time to wait between characters (in ms). + - *invert* specifies which lines to invert. + + - ``0`` will not invert lines (idle state of both lines is logic high). + - ``UART.INV_TX`` will invert TX line (idle state of TX line now logic low). + - ``UART.INV_RX`` will invert RX line (idle state of RX line now logic low). + - ``UART.INV_TX | UART.INV_RX`` will invert both lines (idle state at logic low). + + - *flow* specifies which hardware flow control signals to use. The value + is a bitmask. + + - ``0`` will ignore hardware flow control signals. + - ``UART.RTS`` will enable receive flow control by using the RTS output pin to + signal if the receive FIFO has sufficient space to accept more data. + - ``UART.CTS`` will enable transmit flow control by pausing transmission when the + CTS input pin signals that the receiver is running low on buffer space. + - ``UART.RTS | UART.CTS`` will enable both, for full hardware flow control. + + On the WiPy only the following keyword-only parameter is supported: + + - *pins* is a 4 or 2 item list indicating the TX, RX, RTS and CTS pins (in that order). + Any of the pins can be None if one wants the UART to operate with limited functionality. + If the RTS pin is given the the RX pin must be given as well. The same applies to CTS. + When no pins are given, then the default set of TX and RX pins is taken, and hardware + flow control will be disabled. If *pins* is ``None``, no pin assignment will be made. + + .. note:: + It is possible to call ``init()`` multiple times on the same object in + order to reconfigure UART on the fly. That allows using single UART + peripheral to serve different devices attached to different GPIO pins. + Only one device can be served at a time in that case. + Also do not call ``deinit()`` as it will prevent calling ``init()`` + again. + """ + ... + def flush(self) -> Incomplete: + """ + Waits until all data has been sent. In case of a timeout, an exception is raised. The timeout + duration depends on the tx buffer size and the baud rate. Unless flow control is enabled, a timeout + should not occur. + + .. note:: + + For the rp2, esp8266 and nrf ports the call returns while the last byte is sent. + If required, a one character wait time has to be added in the calling script. + + Availability: rp2, esp32, esp8266, mimxrt, cc3200, stm32, nrf ports, renesas-ra + """ + ... + def txdone(self) -> bool: + """ + Tells whether all data has been sent or no data transfer is happening. In this case, + it returns ``True``. If a data transmission is ongoing it returns ``False``. + + .. note:: + + For the rp2, esp8266 and nrf ports the call may return ``True`` even if the last byte + of a transfer is still being sent. If required, a one character wait time has to be + added in the calling script. + + Availability: rp2, esp32, esp8266, mimxrt, cc3200, stm32, nrf ports, renesas-ra + """ + ... + def read(self, nbytes: Optional[Any] = None) -> bytes: + """ + Read characters. If ``nbytes`` is specified then read at most that many bytes, + otherwise read as much data as possible. It may return sooner if a timeout + is reached. The timeout is configurable in the constructor. + + Return value: a bytes object containing the bytes read in. Returns ``None`` + on timeout. + """ + ... + def any(self) -> int: + """ + Returns an integer counting the number of characters that can be read without + blocking. It will return 0 if there are no characters available and a positive + number if there are characters. The method may return 1 even if there is more + than one character available for reading. + + For more sophisticated querying of available characters use select.poll:: + + poll = select.poll() + poll.register(uart, select.POLLIN) + poll.poll(timeout) + """ + ... + def write(self, buf) -> int: + """ + Write the buffer of bytes to the bus. + + Return value: number of bytes written or ``None`` on timeout. + """ + ... + def readinto(self, buf, nbytes: Optional[Any] = None) -> int: + """ + Read bytes into the ``buf``. If ``nbytes`` is specified then read at most + that many bytes. Otherwise, read at most ``len(buf)`` bytes. It may return sooner if a timeout + is reached. The timeout is configurable in the constructor. + + Return value: number of bytes read and stored into ``buf`` or ``None`` on + timeout. + """ + ... + def readline(self) -> None: + """ + Read a line, ending in a newline character. It may return sooner if a timeout + is reached. The timeout is configurable in the constructor. + + Return value: the line read or ``None`` on timeout. + """ + ... + def __init__(self, id, *args, **kwargs) -> None: ... + +class SoftI2C(I2C): + """ + Construct a new software I2C object. The parameters are: + + - *scl* should be a pin object specifying the pin to use for SCL. + - *sda* should be a pin object specifying the pin to use for SDA. + - *freq* should be an integer which sets the maximum frequency + for SCL. + - *timeout* is the maximum time in microseconds to wait for clock + stretching (SCL held low by another device on the bus), after + which an ``OSError(ETIMEDOUT)`` exception is raised. + """ + + def readfrom_mem_into(self, *args, **kwargs) -> Incomplete: ... + def readfrom_into(self, *args, **kwargs) -> Incomplete: ... + def readfrom_mem(self, *args, **kwargs) -> Incomplete: ... + def writeto_mem(self, *args, **kwargs) -> Incomplete: ... + def scan(self, *args, **kwargs) -> Incomplete: ... + def writeto(self, *args, **kwargs) -> Incomplete: ... + def writevto(self, *args, **kwargs) -> Incomplete: ... + def start(self, *args, **kwargs) -> Incomplete: ... + def readfrom(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def init(self, *args, **kwargs) -> Incomplete: ... + def stop(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, scl, sda, *, freq=400000, timeout=50000) -> None: ... + +class RTC: + """ + Create an RTC object. See init for parameters of initialization. + """ + + def datetime(self, datetimetuple: Optional[Any] = None) -> Tuple: + """ + Get or set the date and time of the RTC. + + With no arguments, this method returns an 8-tuple with the current + date and time. With 1 argument (being an 8-tuple) it sets the date + and time. + + The 8-tuple has the following format: + + (year, month, day, weekday, hours, minutes, seconds, subseconds) + + The meaning of the ``subseconds`` field is hardware dependent. + """ + ... + def __init__(self, id=0, *args, **kwargs) -> None: ... + +class SPI: + """ + Construct an SPI object on the given bus, *id*. Values of *id* depend + on a particular port and its hardware. Values 0, 1, etc. are commonly used + to select hardware SPI block #0, #1, etc. + + With no additional parameters, the SPI object is created but not + initialised (it has the settings from the last initialisation of + the bus, if any). If extra arguments are given, the bus is initialised. + See ``init`` for parameters of initialisation. + """ + + LSB: int + MSB: int + def deinit(self) -> None: + """ + Turn off the SPI bus. + """ + ... + def init( + self, baudrate=1000000, *, polarity=0, phase=0, bits=8, firstbit=MSB, sck=None, mosi=None, miso=None, pins: Optional[Tuple] + ) -> None: + """ + Initialise the SPI bus with the given parameters: + + - ``baudrate`` is the SCK clock rate. + - ``polarity`` can be 0 or 1, and is the level the idle clock line sits at. + - ``phase`` can be 0 or 1 to sample data on the first or second clock edge + respectively. + - ``bits`` is the width in bits of each transfer. Only 8 is guaranteed to be supported by all hardware. + - ``firstbit`` can be ``SPI.MSB`` or ``SPI.LSB``. + - ``sck``, ``mosi``, ``miso`` are pins (machine.Pin) objects to use for bus signals. For most + hardware SPI blocks (as selected by ``id`` parameter to the constructor), pins are fixed + and cannot be changed. In some cases, hardware blocks allow 2-3 alternative pin sets for + a hardware SPI block. Arbitrary pin assignments are possible only for a bitbanging SPI driver + (``id`` = -1). + - ``pins`` - WiPy port doesn't ``sck``, ``mosi``, ``miso`` arguments, and instead allows to + specify them as a tuple of ``pins`` parameter. + + In the case of hardware SPI the actual clock frequency may be lower than the + requested baudrate. This is dependent on the platform hardware. The actual + rate may be determined by printing the SPI object. + """ + ... + def write_readinto(self, write_buf, read_buf) -> int: + """ + Write the bytes from ``write_buf`` while reading into ``read_buf``. The + buffers can be the same or different, but both buffers must have the + same length. + Returns ``None``. + + Note: on WiPy this function returns the number of bytes written. + """ + ... + def read(self, nbytes, write=0x00) -> bytes: + """ + Read a number of bytes specified by ``nbytes`` while continuously writing + the single byte given by ``write``. + Returns a ``bytes`` object with the data that was read. + """ + ... + def write(self, buf) -> int: + """ + Write the bytes contained in ``buf``. + Returns ``None``. + + Note: on WiPy this function returns the number of bytes written. + """ + ... + def readinto(self, buf, write=0x00) -> int: + """ + Read into the buffer specified by ``buf`` while continuously writing the + single byte given by ``write``. + Returns ``None``. + + Note: on WiPy this function returns the number of bytes read. + """ + ... + def __init__(self, id, *args, **kwargs) -> None: ... + +class Signal(Pin): + """ + Signal(pin_arguments..., *, invert=False) + + Create a Signal object. There're two ways to create it: + + * By wrapping existing Pin object - universal method which works for + any board. + * By passing required Pin parameters directly to Signal constructor, + skipping the need to create intermediate Pin object. Available on + many, but not all boards. + + The arguments are: + + - ``pin_obj`` is existing Pin object. + + - ``pin_arguments`` are the same arguments as can be passed to Pin constructor. + + - ``invert`` - if True, the signal will be inverted (active low). + """ + + def off(self) -> None: + """ + Deactivate signal. + """ + ... + def on(self) -> None: + """ + Activate signal. + """ + ... + def value(self, x: Optional[Any] = None) -> int: + """ + This method allows to set and get the value of the signal, depending on whether + the argument ``x`` is supplied or not. + + If the argument is omitted then this method gets the signal level, 1 meaning + signal is asserted (active) and 0 - signal inactive. + + If the argument is supplied then this method sets the signal level. The + argument ``x`` can be anything that converts to a boolean. If it converts + to ``True``, the signal is active, otherwise it is inactive. + + Correspondence between signal being active and actual logic level on the + underlying pin depends on whether signal is inverted (active-low) or not. + For non-inverted signal, active status corresponds to logical 1, inactive - + to logical 0. For inverted/active-low signal, active status corresponds + to logical 0, while inactive - to logical 1. + """ + ... + def __init__(self, pin_obj, *args, invert=False) -> None: ... diff --git a/.vscode/Pico-W-Stub/uos.pyi b/.vscode/Pico-W-Stub/uos.pyi new file mode 100644 index 0000000..17fd6a2 --- /dev/null +++ b/.vscode/Pico-W-Stub/uos.pyi @@ -0,0 +1,253 @@ +""" +Basic "operating system" services. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/os.html + +CPython module: :mod:`python:os` https://docs.python.org/3/library/os.html . + +The ``os`` module contains functions for filesystem access and mounting, +terminal redirection and duplication, and the ``uname`` and ``urandom`` +functions. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.os import * +from typing import Any, IO, Iterator, Optional, Tuple + +def statvfs(path) -> Tuple: + """ + Get the status of a filesystem. + + Returns a tuple with the filesystem information in the following order: + + * ``f_bsize`` -- file system block size + * ``f_frsize`` -- fragment size + * ``f_blocks`` -- size of fs in f_frsize units + * ``f_bfree`` -- number of free blocks + * ``f_bavail`` -- number of free blocks for unprivileged users + * ``f_files`` -- number of inodes + * ``f_ffree`` -- number of free inodes + * ``f_favail`` -- number of free inodes for unprivileged users + * ``f_flag`` -- mount flags + * ``f_namemax`` -- maximum filename length + + Parameters related to inodes: ``f_files``, ``f_ffree``, ``f_avail`` + and the ``f_flags`` parameter may return ``0`` as they can be unavailable + in a port-specific implementation. + """ + ... + +def stat(path) -> Incomplete: + """ + Get the status of a file or directory. + """ + ... + +def rmdir(path) -> None: + """ + Remove a directory. + """ + ... + +def rename(old_path, new_path) -> None: + """ + Rename a file. + """ + ... + +def mount(fsobj, mount_point, *, readonly=False) -> Incomplete: + """ + Mount the filesystem object *fsobj* at the location in the VFS given by the + *mount_point* string. *fsobj* can be a a VFS object that has a ``mount()`` + method, or a block device. If it's a block device then the filesystem type + is automatically detected (an exception is raised if no filesystem was + recognised). *mount_point* may be ``'/'`` to mount *fsobj* at the root, + or ``'/'`` to mount it at a subdirectory under the root. + + If *readonly* is ``True`` then the filesystem is mounted read-only. + + During the mount process the method ``mount()`` is called on the filesystem + object. + + Will raise ``OSError(EPERM)`` if *mount_point* is already mounted. + """ + ... + +def sync() -> None: + """ + Sync all filesystems. + """ + ... + +def unlink(*args, **kwargs) -> Incomplete: ... +def uname() -> uname_result: + """ + Return a tuple (possibly a named tuple) containing information about the + underlying machine and/or its operating system. The tuple has five fields + in the following order, each of them being a string: + + * ``sysname`` -- the name of the underlying system + * ``nodename`` -- the network name (can be the same as ``sysname``) + * ``release`` -- the version of the underlying system + * ``version`` -- the MicroPython version and build date + * ``machine`` -- an identifier for the underlying hardware (eg board, CPU) + """ + ... + +def umount(mount_point) -> Incomplete: + """ + Unmount a filesystem. *mount_point* can be a string naming the mount location, + or a previously-mounted filesystem object. During the unmount process the + method ``umount()`` is called on the filesystem object. + + Will raise ``OSError(EINVAL)`` if *mount_point* is not found. + """ + ... + +def urandom(n) -> bytes: + """ + Return a bytes object with *n* random bytes. Whenever possible, it is + generated by the hardware random number generator. + """ + ... + +def chdir(path) -> Incomplete: + """ + Change current directory. + """ + ... + +def dupterm(stream_object, index=0, /) -> IO: + """ + Duplicate or switch the MicroPython terminal (the REPL) on the given `stream`-like + object. The *stream_object* argument must be a native stream object, or derive + from ``io.IOBase`` and implement the ``readinto()`` and + ``write()`` methods. The stream should be in non-blocking mode and + ``readinto()`` should return ``None`` if there is no data available for reading. + + After calling this function all terminal output is repeated on this stream, + and any input that is available on the stream is passed on to the terminal input. + + The *index* parameter should be a non-negative integer and specifies which + duplication slot is set. A given port may implement more than one slot (slot 0 + will always be available) and in that case terminal input and output is + duplicated on all the slots that are set. + + If ``None`` is passed as the *stream_object* then duplication is cancelled on + the slot given by *index*. + + The function returns the previous stream-like object in the given slot. + """ + ... + +def remove(path) -> None: + """ + Remove a file. + """ + ... + +def mkdir(path) -> Incomplete: + """ + Create a new directory. + """ + ... + +def getcwd() -> Incomplete: + """ + Get the current directory. + """ + ... + +def listdir(dir: Optional[Any] = None) -> Incomplete: + """ + With no argument, list the current directory. Otherwise list the given directory. + """ + ... + +def ilistdir(dir: Optional[Any] = None) -> Iterator[Tuple]: + """ + This function returns an iterator which then yields tuples corresponding to + the entries in the directory that it is listing. With no argument it lists the + current directory, otherwise it lists the directory given by *dir*. + + The tuples have the form *(name, type, inode[, size])*: + + - *name* is a string (or bytes if *dir* is a bytes object) and is the name of + the entry; + - *type* is an integer that specifies the type of the entry, with 0x4000 for + directories and 0x8000 for regular files; + - *inode* is an integer corresponding to the inode of the file, and may be 0 + for filesystems that don't have such a notion. + - Some platforms may return a 4-tuple that includes the entry's *size*. For + file entries, *size* is an integer representing the size of the file + or -1 if unknown. Its meaning is currently undefined for directory + entries. + """ + ... + +class VfsLfs2: + """ + Create a filesystem object that uses the `littlefs v2 filesystem format`_. + Storage of the littlefs filesystem is provided by *block_dev*, which must + support the :ref:`extended interface `. + Objects created by this constructor can be mounted using :func:`mount`. + + The *mtime* argument enables modification timestamps for files, stored using + littlefs attributes. This option can be disabled or enabled differently each + mount time and timestamps will only be added or updated if *mtime* is enabled, + otherwise the timestamps will remain untouched. Littlefs v2 filesystems without + timestamps will work without reformatting and timestamps will be added + transparently to existing files once they are opened for writing. When *mtime* + is enabled `os.stat` on files without timestamps will return 0 for the timestamp. + + See :ref:`filesystem` for more information. + """ + + def rename(self, *args, **kwargs) -> Incomplete: ... + @staticmethod + def mkfs(block_dev, readsize=32, progsize=32, lookahead=32) -> None: + """ + Build a Lfs2 filesystem on *block_dev*. + + ``Note:`` There are reports of littlefs v2 failing in certain situations, + for details see `littlefs issue 295`_. + """ + ... + def mount(self, *args, **kwargs) -> Incomplete: ... + def statvfs(self, *args, **kwargs) -> Incomplete: ... + def rmdir(self, *args, **kwargs) -> Incomplete: ... + def stat(self, *args, **kwargs) -> Incomplete: ... + def umount(self, *args, **kwargs) -> Incomplete: ... + def remove(self, *args, **kwargs) -> Incomplete: ... + def mkdir(self, *args, **kwargs) -> Incomplete: ... + def open(self, *args, **kwargs) -> Incomplete: ... + def ilistdir(self, *args, **kwargs) -> Incomplete: ... + def chdir(self, *args, **kwargs) -> Incomplete: ... + def getcwd(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, block_dev, readsize=32, progsize=32, lookahead=32, mtime=True) -> None: ... + +class VfsFat: + """ + Create a filesystem object that uses the FAT filesystem format. Storage of + the FAT filesystem is provided by *block_dev*. + Objects created by this constructor can be mounted using :func:`mount`. + """ + + def rename(self, *args, **kwargs) -> Incomplete: ... + @staticmethod + def mkfs(block_dev) -> None: + """ + Build a FAT filesystem on *block_dev*. + """ + ... + def mount(self, *args, **kwargs) -> Incomplete: ... + def statvfs(self, *args, **kwargs) -> Incomplete: ... + def rmdir(self, *args, **kwargs) -> Incomplete: ... + def stat(self, *args, **kwargs) -> Incomplete: ... + def umount(self, *args, **kwargs) -> Incomplete: ... + def remove(self, *args, **kwargs) -> Incomplete: ... + def mkdir(self, *args, **kwargs) -> Incomplete: ... + def open(self, *args, **kwargs) -> Incomplete: ... + def ilistdir(self, *args, **kwargs) -> Incomplete: ... + def chdir(self, *args, **kwargs) -> Incomplete: ... + def getcwd(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, block_dev) -> None: ... diff --git a/.vscode/Pico-W-Stub/uplatform.pyi b/.vscode/Pico-W-Stub/uplatform.pyi new file mode 100644 index 0000000..8a66667 --- /dev/null +++ b/.vscode/Pico-W-Stub/uplatform.pyi @@ -0,0 +1,43 @@ +""" +Access to underlying platform’s identifying data. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/platform.html + +CPython module: :mod:`python:platform` https://docs.python.org/3/library/platform.html . + +This module tries to retrieve as much platform-identifying data as possible. It +makes this information available via function APIs. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Tuple + +def platform() -> str: + """ + Returns a string identifying the underlying platform. This string is composed + of several substrings in the following order, delimited by dashes (``-``): + + - the name of the platform system (e.g. Unix, Windows or MicroPython) + - the MicroPython version + - the architecture of the platform + - the version of the underlying platform + - the concatenation of the name of the libc that MicroPython is linked to + and its corresponding version. + + For example, this could be + ``"MicroPython-1.20.0-xtensa-IDFv4.2.4-with-newlib3.0.0"``. + """ + ... + +def python_compiler() -> str: + """ + Returns a string identifying the compiler used for compiling MicroPython. + """ + ... + +def libc_ver() -> Tuple: + """ + Returns a tuple of strings *(lib, version)*, where *lib* is the name of the + libc that MicroPython is linked to, and *version* the corresponding version + of this libc. + """ + ... diff --git a/.vscode/Pico-W-Stub/urandom.pyi b/.vscode/Pico-W-Stub/urandom.pyi new file mode 100644 index 0000000..c0123b6 --- /dev/null +++ b/.vscode/Pico-W-Stub/urandom.pyi @@ -0,0 +1,84 @@ +""" +Random numbers. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/random.html + +This module implements a pseudo-random number generator (PRNG). + +CPython module: :mod:`python:random` https://docs.python.org/3/library/random.html . . + +.. note:: + + The following notation is used for intervals: + + - () are open interval brackets and do not include their endpoints. + For example, (0, 1) means greater than 0 and less than 1. + In set notation: (0, 1) = {x | 0 < x < 1}. + + - [] are closed interval brackets which include all their limit points. + For example, [0, 1] means greater than or equal to 0 and less than + or equal to 1. + In set notation: [0, 1] = {x | 0 <= x <= 1}. + +.. note:: + + The :func:`randrange`, :func:`randint` and :func:`choice` functions are only + available if the ``MICROPY_PY_RANDOM_EXTRA_FUNCS`` configuration option is + enabled. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional + +def randrange(start, stop, step: Optional[Any] = None) -> int: + """ + The first form returns a random integer from the range [0, *stop*). + The second form returns a random integer from the range [*start*, *stop*). + The third form returns a random integer from the range [*start*, *stop*) in + steps of *step*. For instance, calling ``randrange(1, 10, 2)`` will + return odd numbers between 1 and 9 inclusive. + """ + ... + +def random() -> int: + """ + Return a random floating point number in the range [0.0, 1.0). + """ + ... + +def seed(n=None, /) -> None: + """ + Initialise the random number generator module with the seed *n* which should + be an integer. When no argument (or ``None``) is passed in it will (if + supported by the port) initialise the PRNG with a true random number + (usually a hardware generated random number). + + The ``None`` case only works if ``MICROPY_PY_RANDOM_SEED_INIT_FUNC`` is + enabled by the port, otherwise it raises ``ValueError``. + """ + ... + +def uniform(a, b) -> int: + """ + Return a random floating point number N such that *a* <= N <= *b* for *a* <= *b*, + and *b* <= N <= *a* for *b* < *a*. + """ + ... + +def choice(sequence) -> Incomplete: + """ + Chooses and returns one item at random from *sequence* (tuple, list or + any object that supports the subscript operation). + """ + ... + +def randint(a, b) -> int: + """ + Return a random integer in the range [*a*, *b*]. + """ + ... + +def getrandbits(n) -> int: + """ + Return an integer with *n* random bits (0 <= n <= 32). + """ + ... diff --git a/.vscode/Pico-W-Stub/ure.pyi b/.vscode/Pico-W-Stub/ure.pyi new file mode 100644 index 0000000..6cd43ad --- /dev/null +++ b/.vscode/Pico-W-Stub/ure.pyi @@ -0,0 +1,6 @@ +from _typeshed import Incomplete as Incomplete + +def sub(*args, **kwargs) -> Incomplete: ... +def search(*args, **kwargs) -> Incomplete: ... +def match(*args, **kwargs) -> Incomplete: ... +def compile(*args, **kwargs) -> Incomplete: ... diff --git a/.vscode/Pico-W-Stub/urequests.pyi b/.vscode/Pico-W-Stub/urequests.pyi new file mode 100644 index 0000000..d53bcfb --- /dev/null +++ b/.vscode/Pico-W-Stub/urequests.pyi @@ -0,0 +1 @@ +def __getattr__(attr): ... diff --git a/.vscode/Pico-W-Stub/uselect.pyi b/.vscode/Pico-W-Stub/uselect.pyi new file mode 100644 index 0000000..9456f0d --- /dev/null +++ b/.vscode/Pico-W-Stub/uselect.pyi @@ -0,0 +1,103 @@ +""" +Wait for events on a set of streams. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/select.html + +CPython module: :mod:`python:select` https://docs.python.org/3/library/select.html . + +This module provides functions to efficiently wait for events on multiple +`streams ` (select streams which are ready for operations). +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Iterator, List, Optional, Tuple + +POLLOUT: int +POLLIN: int +POLLHUP: int +POLLERR: int + +def select(rlist, wlist, xlist, timeout: Optional[Any] = None) -> None: + """ + Wait for activity on a set of objects. + + This function is provided by some MicroPython ports for compatibility + and is not efficient. Usage of :class:`Poll` is recommended instead. + """ + ... + +class poll: + """ + Create an instance of the Poll class. + """ + + def __init__(self) -> None: ... + def register(self, obj, eventmask: Optional[Any] = None) -> None: + """ + Register `stream` *obj* for polling. *eventmask* is logical OR of: + + * ``select.POLLIN`` - data available for reading + * ``select.POLLOUT`` - more data can be written + + Note that flags like ``select.POLLHUP`` and ``select.POLLERR`` are + *not* valid as input eventmask (these are unsolicited events which + will be returned from `poll()` regardless of whether they are asked + for). This semantics is per POSIX. + + *eventmask* defaults to ``select.POLLIN | select.POLLOUT``. + + It is OK to call this function multiple times for the same *obj*. + Successive calls will update *obj*'s eventmask to the value of + *eventmask* (i.e. will behave as `modify()`). + """ + ... + def unregister(self, obj) -> Incomplete: + """ + Unregister *obj* from polling. + """ + ... + def modify(self, obj, eventmask) -> None: + """ + Modify the *eventmask* for *obj*. If *obj* is not registered, `OSError` + is raised with error of ENOENT. + """ + ... + def poll(self, timeout=-1, /) -> List: + """ + Wait for at least one of the registered objects to become ready or have an + exceptional condition, with optional timeout in milliseconds (if *timeout* + arg is not specified or -1, there is no timeout). + + Returns list of (``obj``, ``event``, ...) tuples. There may be other elements in + tuple, depending on a platform and version, so don't assume that its size is 2. + The ``event`` element specifies which events happened with a stream and + is a combination of ``select.POLL*`` constants described above. Note that + flags ``select.POLLHUP`` and ``select.POLLERR`` can be returned at any time + (even if were not asked for), and must be acted on accordingly (the + corresponding stream unregistered from poll and likely closed), because + otherwise all further invocations of `poll()` may return immediately with + these flags set for this stream again. + + In case of timeout, an empty list is returned. + + Difference to CPython + + Tuples returned may contain more than 2 elements as described above. + """ + ... + def ipoll(self, timeout=-1, flags=0, /) -> Iterator[Tuple]: + """ + Like :meth:`poll.poll`, but instead returns an iterator which yields a + `callee-owned tuple`. This function provides an efficient, allocation-free + way to poll on streams. + + If *flags* is 1, one-shot behaviour for events is employed: streams for + which events happened will have their event masks automatically reset + (equivalent to ``poll.modify(obj, 0)``), so new events for such a stream + won't be processed until new mask is set with `poll.modify()`. This + behaviour is useful for asynchronous I/O schedulers. + + Difference to CPython + + This function is a MicroPython extension. + """ + ... diff --git a/.vscode/Pico-W-Stub/usocket.pyi b/.vscode/Pico-W-Stub/usocket.pyi new file mode 100644 index 0000000..86a1183 --- /dev/null +++ b/.vscode/Pico-W-Stub/usocket.pyi @@ -0,0 +1,271 @@ +""" +Socket module. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/socket.html + +CPython module: :mod:`python:socket` https://docs.python.org/3/library/socket.html . + +This module provides access to the BSD socket interface. + +Difference to CPython + + For efficiency and consistency, socket objects in MicroPython implement a `stream` + (file-like) interface directly. In CPython, you need to convert a socket to + a file-like object using `makefile()` method. This method is still supported + by MicroPython (but is a no-op), so where compatibility with CPython matters, + be sure to use it. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.socket import * +from typing import Any, IO, Optional, Tuple + +SOCK_STREAM: int +SOCK_RAW: int +SOCK_DGRAM: int +SOL_SOCKET: int +SO_BROADCAST: int +SO_REUSEADDR: int +AF_INET6: int +AF_INET: int +IP_DROP_MEMBERSHIP: int +IPPROTO_IP: int +IP_ADD_MEMBERSHIP: int + +def reset(*args, **kwargs) -> Incomplete: ... +def print_pcbs(*args, **kwargs) -> Incomplete: ... +def getaddrinfo(host, port, af=0, type=0, proto=0, flags=0, /) -> Incomplete: + """ + Translate the host/port argument into a sequence of 5-tuples that contain all the + necessary arguments for creating a socket connected to that service. Arguments + *af*, *type*, and *proto* (which have the same meaning as for the `socket()` function) + can be used to filter which kind of addresses are returned. If a parameter is not + specified or zero, all combinations of addresses can be returned (requiring + filtering on the user side). + + The resulting list of 5-tuples has the following structure:: + + (family, type, proto, canonname, sockaddr) + + The following example shows how to connect to a given url:: + + s = socket.socket() + # This assumes that if "type" is not specified, an address for + # SOCK_STREAM will be returned, which may be not true + s.connect(socket.getaddrinfo('www.micropython.org', 80)[0][-1]) + + Recommended use of filtering params:: + + s = socket.socket() + # Guaranteed to return an address which can be connect'ed to for + # stream operation. + s.connect(socket.getaddrinfo('www.micropython.org', 80, 0, SOCK_STREAM)[0][-1]) + + Difference to CPython + + CPython raises a ``socket.gaierror`` exception (`OSError` subclass) in case + of error in this function. MicroPython doesn't have ``socket.gaierror`` + and raises OSError directly. Note that error numbers of `getaddrinfo()` + form a separate namespace and may not match error numbers from + the :mod:`errno` module. To distinguish `getaddrinfo()` errors, they are + represented by negative numbers, whereas standard system errors are + positive numbers (error numbers are accessible using ``e.args[0]`` property + from an exception object). The use of negative values is a provisional + detail which may change in the future. + """ + ... + +def callback(*args, **kwargs) -> Incomplete: ... + +class socket: + """ + Create a new socket using the given address family, socket type and + protocol number. Note that specifying *proto* in most cases is not + required (and not recommended, as some MicroPython ports may omit + ``IPPROTO_*`` constants). Instead, *type* argument will select needed + protocol automatically:: + + # Create STREAM TCP socket + socket(AF_INET, SOCK_STREAM) + # Create DGRAM UDP socket + socket(AF_INET, SOCK_DGRAM) + """ + + def recvfrom(self, bufsize) -> Tuple: + """ + Receive data from the socket. The return value is a pair *(bytes, address)* where *bytes* is a + bytes object representing the data received and *address* is the address of the socket sending + the data. + """ + ... + def recv(self, bufsize) -> bytes: + """ + Receive data from the socket. The return value is a bytes object representing the data + received. The maximum amount of data to be received at once is specified by bufsize. + """ + ... + def makefile(self, mode="rb", buffering=0, /) -> IO: + """ + Return a file object associated with the socket. The exact returned type depends on the arguments + given to makefile(). The support is limited to binary modes only ('rb', 'wb', and 'rwb'). + CPython's arguments: *encoding*, *errors* and *newline* are not supported. + + Difference to CPython + + As MicroPython doesn't support buffered streams, values of *buffering* + parameter is ignored and treated as if it was 0 (unbuffered). + + Difference to CPython + + Closing the file object returned by makefile() WILL close the + original socket as well. + """ + ... + def listen(self, backlog: Optional[Any] = None) -> None: + """ + Enable a server to accept connections. If *backlog* is specified, it must be at least 0 + (if it's lower, it will be set to 0); and specifies the number of unaccepted connections + that the system will allow before refusing new connections. If not specified, a default + reasonable value is chosen. + """ + ... + def settimeout(self, value) -> Incomplete: + """ + **Note**: Not every port supports this method, see below. + + Set a timeout on blocking socket operations. The value argument can be a nonnegative floating + point number expressing seconds, or None. If a non-zero value is given, subsequent socket operations + will raise an `OSError` exception if the timeout period value has elapsed before the operation has + completed. If zero is given, the socket is put in non-blocking mode. If None is given, the socket + is put in blocking mode. + + Not every :term:`MicroPython port` supports this method. A more portable and + generic solution is to use `select.poll` object. This allows to wait on + multiple objects at the same time (and not just on sockets, but on generic + `stream` objects which support polling). Example:: + + # Instead of: + s.settimeout(1.0) # time in seconds + s.read(10) # may timeout + + # Use: + poller = select.poll() + poller.register(s, select.POLLIN) + res = poller.poll(1000) # time in milliseconds + if not res: + # s is still not ready for input, i.e. operation timed out + + Difference to CPython + + CPython raises a ``socket.timeout`` exception in case of timeout, + which is an `OSError` subclass. MicroPython raises an OSError directly + instead. If you use ``except OSError:`` to catch the exception, + your code will work both in MicroPython and CPython. + """ + ... + def sendall(self, bytes) -> int: + """ + Send all data to the socket. The socket must be connected to a remote socket. + Unlike `send()`, this method will try to send all of data, by sending data + chunk by chunk consecutively. + + The behaviour of this method on non-blocking sockets is undefined. Due to this, + on MicroPython, it's recommended to use `write()` method instead, which + has the same "no short writes" policy for blocking sockets, and will return + number of bytes sent on non-blocking sockets. + """ + ... + def setsockopt(self, level, optname, value) -> None: + """ + Set the value of the given socket option. The needed symbolic constants are defined in the + socket module (SO_* etc.). The *value* can be an integer or a bytes-like object representing + a buffer. + """ + ... + def setblocking(self, flag) -> Incomplete: + """ + Set blocking or non-blocking mode of the socket: if flag is false, the socket is set to non-blocking, + else to blocking mode. + + This method is a shorthand for certain `settimeout()` calls: + + * ``sock.setblocking(True)`` is equivalent to ``sock.settimeout(None)`` + * ``sock.setblocking(False)`` is equivalent to ``sock.settimeout(0)`` + """ + ... + def sendto(self, bytes, address) -> None: + """ + Send data to the socket. The socket should not be connected to a remote socket, since the + destination socket is specified by *address*. + """ + ... + def readline(self) -> Incomplete: + """ + Read a line, ending in a newline character. + + Return value: the line read. + """ + ... + def readinto(self, buf, nbytes: Optional[Any] = None) -> int: + """ + Read bytes into the *buf*. If *nbytes* is specified then read at most + that many bytes. Otherwise, read at most *len(buf)* bytes. Just as + `read()`, this method follows "no short reads" policy. + + Return value: number of bytes read and stored into *buf*. + """ + ... + def read(self, size: Optional[Any] = None) -> bytes: + """ + Read up to size bytes from the socket. Return a bytes object. If *size* is not given, it + reads all data available from the socket until EOF; as such the method will not return until + the socket is closed. This function tries to read as much data as + requested (no "short reads"). This may be not possible with + non-blocking socket though, and then less data will be returned. + """ + ... + def close(self) -> Incomplete: + """ + Mark the socket closed and release all resources. Once that happens, all future operations + on the socket object will fail. The remote end will receive EOF indication if + supported by protocol. + + Sockets are automatically closed when they are garbage-collected, but it is recommended + to `close()` them explicitly as soon you finished working with them. + """ + ... + def connect(self, address) -> None: + """ + Connect to a remote socket at *address*. + """ + ... + def send(self, bytes) -> int: + """ + Send data to the socket. The socket must be connected to a remote socket. + Returns number of bytes sent, which may be smaller than the length of data + ("short write"). + """ + ... + def bind(self, address) -> Incomplete: + """ + Bind the socket to *address*. The socket must not already be bound. + """ + ... + def accept(self) -> Tuple: + """ + Accept a connection. The socket must be bound to an address and listening for connections. + The return value is a pair (conn, address) where conn is a new socket object usable to send + and receive data on the connection, and address is the address bound to the socket on the + other end of the connection. + """ + ... + def write(self, buf) -> int: + """ + Write the buffer of bytes to the socket. This function will try to + write all data to a socket (no "short writes"). This may be not possible + with a non-blocking socket though, and returned value will be less than + the length of *buf*. + + Return value: number of bytes written. + """ + ... + def __init__(self, af=AF_INET, type=SOCK_STREAM, proto=IPPROTO_TCP, /) -> None: ... diff --git a/.vscode/Pico-W-Stub/ussl.pyi b/.vscode/Pico-W-Stub/ussl.pyi new file mode 100644 index 0000000..60fa240 --- /dev/null +++ b/.vscode/Pico-W-Stub/ussl.pyi @@ -0,0 +1,74 @@ +""" +TLS/SSL wrapper for socket objects. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/ssl.html + +CPython module: :mod:`python:ssl` https://docs.python.org/3/library/ssl.html . + +This module provides access to Transport Layer Security (previously and +widely known as “Secure Sockets Layer”) encryption and peer authentication +facilities for network sockets, both client-side and server-side. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from stdlib.ssl import * +from typing import IO + +CERT_REQUIRED: int +PROTOCOL_TLS_CLIENT: int +PROTOCOL_TLS_SERVER: int +CERT_OPTIONAL: int +CERT_NONE: int + +def wrap_socket( + sock, server_side=False, keyfile=None, certfile=None, cert_reqs=None, cadata=None, server_hostname=None, do_handshake=True +) -> IO: + """ + Wrap the given *sock* and return a new wrapped-socket object. The implementation + of this function is to first create an `SSLContext` and then call the `SSLContext.wrap_socket` + method on that context object. The arguments *sock*, *server_side* and *server_hostname* are + passed through unchanged to the method call. The argument *do_handshake* is passed through as + *do_handshake_on_connect*. The remaining arguments have the following behaviour: + + - *cert_reqs* determines whether the peer (server or client) must present a valid certificate. + Note that for mbedtls based ports, ``ssl.CERT_NONE`` and ``ssl.CERT_OPTIONAL`` will not + validate any certificate, only ``ssl.CERT_REQUIRED`` will. + + - *cadata* is a bytes object containing the CA certificate chain (in DER format) that will + validate the peer's certificate. Currently only a single DER-encoded certificate is supported. + + Depending on the underlying module implementation in a particular + :term:`MicroPython port`, some or all keyword arguments above may be not supported. + """ + ... + +class SSLContext: + """ + Create a new SSLContext instance. The *protocol* argument must be one of the ``PROTOCOL_*`` + constants. + """ + + def wrap_socket(self, sock, *, server_side=False, do_handshake_on_connect=True, server_hostname=None) -> Incomplete: + """ + Takes a `stream` *sock* (usually socket.socket instance of ``SOCK_STREAM`` type), + and returns an instance of ssl.SSLSocket, wrapping the underlying stream. + The returned object has the usual `stream` interface methods like + ``read()``, ``write()``, etc. + + - *server_side* selects whether the wrapped socket is on the server or client side. + A server-side SSL socket should be created from a normal socket returned from + :meth:`~socket.socket.accept()` on a non-SSL listening server socket. + + - *do_handshake_on_connect* determines whether the handshake is done as part of the ``wrap_socket`` + or whether it is deferred to be done as part of the initial reads or writes + For blocking sockets doing the handshake immediately is standard. For non-blocking + sockets (i.e. when the *sock* passed into ``wrap_socket`` is in non-blocking mode) + the handshake should generally be deferred because otherwise ``wrap_socket`` blocks + until it completes. Note that in AXTLS the handshake can be deferred until the first + read or write but it then blocks until completion. + + - *server_hostname* is for use as a client, and sets the hostname to check against the received + server certificate. It also sets the name for Server Name Indication (SNI), allowing the server + to present the proper certificate. + """ + ... + def __init__(self, protocol, /) -> None: ... diff --git a/.vscode/Pico-W-Stub/ustruct.pyi b/.vscode/Pico-W-Stub/ustruct.pyi new file mode 100644 index 0000000..8638bc5 --- /dev/null +++ b/.vscode/Pico-W-Stub/ustruct.pyi @@ -0,0 +1,93 @@ +""" +Pack and unpack primitive data types. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/struct.html + +CPython module: :mod:`python:struct` https://docs.python.org/3/library/struct.html . + +The following byte orders are supported: + ++-----------+------------------------+----------+-----------+ +| Character | Byte order | Size | Alignment | ++===========+========================+==========+===========+ +| @ | native | native | native | ++-----------+------------------------+----------+-----------+ +| < | little-endian | standard | none | ++-----------+------------------------+----------+-----------+ +| > | big-endian | standard | none | ++-----------+------------------------+----------+-----------+ +| ! | network (= big-endian) | standard | none | ++-----------+------------------------+----------+-----------+ + +The following data types are supported: + ++--------+--------------------+-------------------+---------------+ +| Format | C Type | Python type | Standard size | ++========+====================+===================+===============+ +| b | signed char | integer | 1 | ++--------+--------------------+-------------------+---------------+ +| B | unsigned char | integer | 1 | ++--------+--------------------+-------------------+---------------+ +| h | short | integer | 2 | ++--------+--------------------+-------------------+---------------+ +| H | unsigned short | integer | 2 | ++--------+--------------------+-------------------+---------------+ +| i | int | integer (`1`) | 4 | ++--------+--------------------+-------------------+---------------+ +| I | unsigned int | integer (`1`) | 4 | ++--------+--------------------+-------------------+---------------+ +| l | long | integer (`1`) | 4 | ++--------+--------------------+-------------------+---------------+ +| L | unsigned long | integer (`1`) | 4 | ++--------+--------------------+-------------------+---------------+ +| q | long long | integer (`1`) | 8 | ++--------+--------------------+-------------------+---------------+ +| Q | unsigned long long | integer (`1`) | 8 | ++--------+--------------------+-------------------+---------------+ +| f | float | float (`2`) | 4 | ++--------+--------------------+-------------------+---------------+ +| d | double | float (`2`) | 8 | ++--------+--------------------+-------------------+---------------+ +| s | char[] | bytes | | ++--------+--------------------+-------------------+---------------+ +| P | void * | integer | | ++--------+--------------------+-------------------+---------------+ +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Tuple + +def pack_into(fmt, buffer, offset, v1, *args, **kwargs) -> Incomplete: + """ + Pack the values *v1*, *v2*, ... according to the format string *fmt* + into a *buffer* starting at *offset*. *offset* may be negative to count + from the end of *buffer*. + """ + ... + +def unpack(fmt, data) -> Tuple: + """ + Unpack from the *data* according to the format string *fmt*. + The return value is a tuple of the unpacked values. + """ + ... + +def unpack_from(fmt, data, offset=0, /) -> Tuple: + """ + Unpack from the *data* starting at *offset* according to the format string + *fmt*. *offset* may be negative to count from the end of *data*. The return + value is a tuple of the unpacked values. + """ + ... + +def pack(fmt, v1, *args, **kwargs) -> bytes: + """ + Pack the values *v1*, *v2*, ... according to the format string *fmt*. + The return value is a bytes object encoding the values. + """ + ... + +def calcsize(fmt) -> int: + """ + Return the number of bytes needed to store the given *fmt*. + """ + ... diff --git a/.vscode/Pico-W-Stub/usys.pyi b/.vscode/Pico-W-Stub/usys.pyi new file mode 100644 index 0000000..9cabee8 --- /dev/null +++ b/.vscode/Pico-W-Stub/usys.pyi @@ -0,0 +1,49 @@ +""" +System specific functions. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/sys.html + +CPython module: :mod:`python:sys` https://docs.python.org/3/library/sys.html . +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Dict, List, Tuple + +platform: str +version_info: tuple +path: list +version: str +ps1: str +ps2: str +byteorder: str +modules: dict +argv: list +implementation: tuple +maxsize: int + +def print_exception(exc, file=stdout, /) -> None: + """ + Print exception with a traceback to a file-like object *file* (or + `sys.stdout` by default). + + Difference to CPython + + This is simplified version of a function which appears in the + ``traceback`` module in CPython. Unlike ``traceback.print_exception()``, + this function takes just exception value instead of exception type, + exception value, and traceback object; *file* argument should be + positional; further arguments are not supported. CPython-compatible + ``traceback`` module can be found in `micropython-lib`. + """ + ... + +def exit(retval=0, /) -> Incomplete: + """ + Terminate current program with a given exit code. Underlyingly, this + function raise as `SystemExit` exception. If an argument is given, its + value given as an argument to `SystemExit`. + """ + ... + +stderr: Incomplete +stdout: Incomplete +stdin: Incomplete diff --git a/.vscode/Pico-W-Stub/utime.pyi b/.vscode/Pico-W-Stub/utime.pyi new file mode 100644 index 0000000..39399c1 --- /dev/null +++ b/.vscode/Pico-W-Stub/utime.pyi @@ -0,0 +1,290 @@ +""" +Time related functions. + +MicroPython module: https://docs.micropython.org/en/v1.21.0/library/time.html + +CPython module: :mod:`python:time` https://docs.python.org/3/library/time.html . + +The ``time`` module provides functions for getting the current time and date, +measuring time intervals, and for delays. + +**Time Epoch**: Unix port uses standard for POSIX systems epoch of +1970-01-01 00:00:00 UTC. However, some embedded ports use epoch of +2000-01-01 00:00:00 UTC. Epoch year may be determined with ``gmtime(0)[0]``. + +**Maintaining actual calendar date/time**: This requires a +Real Time Clock (RTC). On systems with underlying OS (including some +RTOS), an RTC may be implicit. Setting and maintaining actual calendar +time is responsibility of OS/RTOS and is done outside of MicroPython, +it just uses OS API to query date/time. On baremetal ports however +system time depends on ``machine.RTC()`` object. The current calendar time +may be set using ``machine.RTC().datetime(tuple)`` function, and maintained +by following means: + +* By a backup battery (which may be an additional, optional component for + a particular board). +* Using networked time protocol (requires setup by a port/user). +* Set manually by a user on each power-up (many boards then maintain + RTC time across hard resets, though some may require setting it again + in such case). + +If actual calendar time is not maintained with a system/MicroPython RTC, +functions below which require reference to current absolute time may +behave not as expected. +""" +from _typeshed import Incomplete, Incomplete as Incomplete +from typing import Any, Optional, Tuple + +def ticks_diff(ticks1, ticks2) -> int: + """ + Measure ticks difference between values returned from `ticks_ms()`, `ticks_us()`, + or `ticks_cpu()` functions, as a signed value which may wrap around. + + The argument order is the same as for subtraction + operator, ``ticks_diff(ticks1, ticks2)`` has the same meaning as ``ticks1 - ticks2``. + However, values returned by `ticks_ms()`, etc. functions may wrap around, so + directly using subtraction on them will produce incorrect result. That is why + `ticks_diff()` is needed, it implements modular (or more specifically, ring) + arithmetic to produce correct result even for wrap-around values (as long as they not + too distant in between, see below). The function returns **signed** value in the range + [*-TICKS_PERIOD/2* .. *TICKS_PERIOD/2-1*] (that's a typical range definition for + two's-complement signed binary integers). If the result is negative, it means that + *ticks1* occurred earlier in time than *ticks2*. Otherwise, it means that + *ticks1* occurred after *ticks2*. This holds **only** if *ticks1* and *ticks2* + are apart from each other for no more than *TICKS_PERIOD/2-1* ticks. If that does + not hold, incorrect result will be returned. Specifically, if two tick values are + apart for *TICKS_PERIOD/2-1* ticks, that value will be returned by the function. + However, if *TICKS_PERIOD/2* of real-time ticks has passed between them, the + function will return *-TICKS_PERIOD/2* instead, i.e. result value will wrap around + to the negative range of possible values. + + Informal rationale of the constraints above: Suppose you are locked in a room with no + means to monitor passing of time except a standard 12-notch clock. Then if you look at + dial-plate now, and don't look again for another 13 hours (e.g., if you fall for a + long sleep), then once you finally look again, it may seem to you that only 1 hour + has passed. To avoid this mistake, just look at the clock regularly. Your application + should do the same. "Too long sleep" metaphor also maps directly to application + behaviour: don't let your application run any single task for too long. Run tasks + in steps, and do time-keeping in between. + + `ticks_diff()` is designed to accommodate various usage patterns, among them: + + * Polling with timeout. In this case, the order of events is known, and you will deal + only with positive results of `ticks_diff()`:: + + # Wait for GPIO pin to be asserted, but at most 500us + start = time.ticks_us() + while pin.value() == 0: + if time.ticks_diff(time.ticks_us(), start) > 500: + raise TimeoutError + + * Scheduling events. In this case, `ticks_diff()` result may be negative + if an event is overdue:: + + # This code snippet is not optimized + now = time.ticks_ms() + scheduled_time = task.scheduled_time() + if ticks_diff(scheduled_time, now) > 0: + print("Too early, let's nap") + sleep_ms(ticks_diff(scheduled_time, now)) + task.run() + elif ticks_diff(scheduled_time, now) == 0: + print("Right at time!") + task.run() + elif ticks_diff(scheduled_time, now) < 0: + print("Oops, running late, tell task to run faster!") + task.run(run_faster=true) + + Note: Do not pass `time()` values to `ticks_diff()`, you should use + normal mathematical operations on them. But note that `time()` may (and will) + also overflow. This is known as https://en.wikipedia.org/wiki/Year_2038_problem . + """ + ... + +def ticks_add(ticks, delta) -> Incomplete: + """ + Offset ticks value by a given number, which can be either positive or negative. + Given a *ticks* value, this function allows to calculate ticks value *delta* + ticks before or after it, following modular-arithmetic definition of tick values + (see `ticks_ms()` above). *ticks* parameter must be a direct result of call + to `ticks_ms()`, `ticks_us()`, or `ticks_cpu()` functions (or from previous + call to `ticks_add()`). However, *delta* can be an arbitrary integer number + or numeric expression. `ticks_add()` is useful for calculating deadlines for + events/tasks. (Note: you must use `ticks_diff()` function to work with + deadlines.) + + Examples:: + + # Find out what ticks value there was 100ms ago + print(ticks_add(time.ticks_ms(), -100)) + + # Calculate deadline for operation and test for it + deadline = ticks_add(time.ticks_ms(), 200) + while ticks_diff(deadline, time.ticks_ms()) > 0: + do_a_little_of_something() + + # Find out TICKS_MAX used by this port + print(ticks_add(0, -1)) + """ + ... + +def ticks_cpu() -> Incomplete: + """ + Similar to `ticks_ms()` and `ticks_us()`, but with the highest possible resolution + in the system. This is usually CPU clocks, and that's why the function is named that + way. But it doesn't have to be a CPU clock, some other timing source available in a + system (e.g. high-resolution timer) can be used instead. The exact timing unit + (resolution) of this function is not specified on ``time`` module level, but + documentation for a specific port may provide more specific information. This + function is intended for very fine benchmarking or very tight real-time loops. + Avoid using it in portable code. + + Availability: Not every port implements this function. + """ + ... + +def time() -> int: + """ + Returns the number of seconds, as an integer, since the Epoch, assuming that + underlying RTC is set and maintained as described above. If an RTC is not set, this + function returns number of seconds since a port-specific reference point in time (for + embedded boards without a battery-backed RTC, usually since power up or reset). If you + want to develop portable MicroPython application, you should not rely on this function + to provide higher than second precision. If you need higher precision, absolute + timestamps, use `time_ns()`. If relative times are acceptable then use the + `ticks_ms()` and `ticks_us()` functions. If you need calendar time, `gmtime()` or + `localtime()` without an argument is a better choice. + + Difference to CPython + + In CPython, this function returns number of + seconds since Unix epoch, 1970-01-01 00:00 UTC, as a floating-point, + usually having microsecond precision. With MicroPython, only Unix port + uses the same Epoch, and if floating-point precision allows, + returns sub-second precision. Embedded hardware usually doesn't have + floating-point precision to represent both long time ranges and subsecond + precision, so they use integer value with second precision. Some embedded + hardware also lacks battery-powered RTC, so returns number of seconds + since last power-up or from other relative, hardware-specific point + (e.g. reset). + """ + ... + +def ticks_ms() -> int: + """ + Returns an increasing millisecond counter with an arbitrary reference point, that + wraps around after some value. + + The wrap-around value is not explicitly exposed, but we will + refer to it as *TICKS_MAX* to simplify discussion. Period of the values is + *TICKS_PERIOD = TICKS_MAX + 1*. *TICKS_PERIOD* is guaranteed to be a power of + two, but otherwise may differ from port to port. The same period value is used + for all of `ticks_ms()`, `ticks_us()`, `ticks_cpu()` functions (for + simplicity). Thus, these functions will return a value in range [*0* .. + *TICKS_MAX*], inclusive, total *TICKS_PERIOD* values. Note that only + non-negative values are used. For the most part, you should treat values returned + by these functions as opaque. The only operations available for them are + `ticks_diff()` and `ticks_add()` functions described below. + + Note: Performing standard mathematical operations (+, -) or relational + operators (<, <=, >, >=) directly on these value will lead to invalid + result. Performing mathematical operations and then passing their results + as arguments to `ticks_diff()` or `ticks_add()` will also lead to + invalid results from the latter functions. + """ + ... + +def ticks_us() -> Incomplete: + """ + Just like `ticks_ms()` above, but in microseconds. + """ + ... + +def time_ns() -> int: + """ + Similar to `time()` but returns nanoseconds since the Epoch, as an integer (usually + a big integer, so will allocate on the heap). + """ + ... + +def localtime(secs: Optional[Any] = None) -> Tuple: + """ + Convert the time *secs* expressed in seconds since the Epoch (see above) into an + 8-tuple which contains: ``(year, month, mday, hour, minute, second, weekday, yearday)`` + If *secs* is not provided or None, then the current time from the RTC is used. + + The `gmtime()` function returns a date-time tuple in UTC, and `localtime()` returns a + date-time tuple in local time. + + The format of the entries in the 8-tuple are: + + * year includes the century (for example 2014). + * month is 1-12 + * mday is 1-31 + * hour is 0-23 + * minute is 0-59 + * second is 0-59 + * weekday is 0-6 for Mon-Sun + * yearday is 1-366 + """ + ... + +def sleep_us(us) -> None: + """ + Delay for given number of microseconds, should be positive or 0. + + This function attempts to provide an accurate delay of at least *us* + microseconds, but it may take longer if the system has other higher priority + processing to perform. + """ + ... + +def gmtime(secs: Optional[Any] = None) -> Tuple: + """ + Convert the time *secs* expressed in seconds since the Epoch (see above) into an + 8-tuple which contains: ``(year, month, mday, hour, minute, second, weekday, yearday)`` + If *secs* is not provided or None, then the current time from the RTC is used. + + The `gmtime()` function returns a date-time tuple in UTC, and `localtime()` returns a + date-time tuple in local time. + + The format of the entries in the 8-tuple are: + + * year includes the century (for example 2014). + * month is 1-12 + * mday is 1-31 + * hour is 0-23 + * minute is 0-59 + * second is 0-59 + * weekday is 0-6 for Mon-Sun + * yearday is 1-366 + """ + ... + +def sleep_ms(ms) -> None: + """ + Delay for given number of milliseconds, should be positive or 0. + + This function will delay for at least the given number of milliseconds, but + may take longer than that if other processing must take place, for example + interrupt handlers or other threads. Passing in 0 for *ms* will still allow + this other processing to occur. Use `sleep_us()` for more precise delays. + """ + ... + +def mktime() -> int: + """ + This is inverse function of localtime. It's argument is a full 8-tuple + which expresses a time as per localtime. It returns an integer which is + the number of seconds since Jan 1, 2000. + """ + ... + +def sleep(seconds) -> Incomplete: + """ + Sleep for the given number of seconds. Some boards may accept *seconds* as a + floating-point number to sleep for a fractional number of seconds. Note that + other boards may not accept a floating-point argument, for compatibility with + them use `sleep_ms()` and `sleep_us()` functions. + """ + ... diff --git a/.vscode/Pico-W-Stub/uwebsocket.pyi b/.vscode/Pico-W-Stub/uwebsocket.pyi new file mode 100644 index 0000000..8fd1ed6 --- /dev/null +++ b/.vscode/Pico-W-Stub/uwebsocket.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete as Incomplete + +class websocket: + def readline(self, *args, **kwargs) -> Incomplete: ... + def ioctl(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... diff --git a/.vscode/Pico-W-Stub/webrepl.pyi b/.vscode/Pico-W-Stub/webrepl.pyi new file mode 100644 index 0000000..1898247 --- /dev/null +++ b/.vscode/Pico-W-Stub/webrepl.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +listen_s: Incomplete +client_s: Incomplete +DEBUG: int +_DEFAULT_STATIC_HOST: Incomplete +static_host = _DEFAULT_STATIC_HOST + +def server_handshake(cl): ... +def send_html(cl) -> None: ... +def setup_conn(port, accept_handler): ... +def accept_conn(listen_sock): ... +def stop() -> None: ... +def start(port: int = ..., password: Incomplete | None = ..., accept_handler=...) -> None: ... +def start_foreground(port: int = ..., password: Incomplete | None = ...) -> None: ... diff --git a/.vscode/Pico-W-Stub/webrepl_setup.pyi b/.vscode/Pico-W-Stub/webrepl_setup.pyi new file mode 100644 index 0000000..4fb5ba7 --- /dev/null +++ b/.vscode/Pico-W-Stub/webrepl_setup.pyi @@ -0,0 +1,10 @@ +RC: str +CONFIG: str + +def input_choice(prompt, choices): ... +def getpass(prompt): ... +def input_pass(): ... +def exists(fname): ... +def get_daemon_status(): ... +def change_daemon(action) -> None: ... +def main() -> None: ... diff --git a/.vscode/Pico-W-Stub/websocket.pyi b/.vscode/Pico-W-Stub/websocket.pyi new file mode 100644 index 0000000..8fd1ed6 --- /dev/null +++ b/.vscode/Pico-W-Stub/websocket.pyi @@ -0,0 +1,10 @@ +from _typeshed import Incomplete as Incomplete + +class websocket: + def readline(self, *args, **kwargs) -> Incomplete: ... + def ioctl(self, *args, **kwargs) -> Incomplete: ... + def write(self, *args, **kwargs) -> Incomplete: ... + def close(self, *args, **kwargs) -> Incomplete: ... + def readinto(self, *args, **kwargs) -> Incomplete: ... + def read(self, *args, **kwargs) -> Incomplete: ... + def __init__(self, *argv, **kwargs) -> None: ... diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..fbc7999 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,8 @@ +{ + "recommendations": [ + "ms-python.python", + "visualstudioexptteam.vscodeintellicode", + "ms-python.vscode-pylance", + "paulober.pico-w-go" + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..dc3612e --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,16 @@ +{ + "python.linting.enabled": true, + "python.languageServer": "Pylance", + "python.analysis.typeCheckingMode": "basic", + "python.analysis.diagnosticSeverityOverrides": { + "reportMissingModuleSource": "none" + }, + "python.analysis.typeshedPaths": [ + ".vscode\\Pico-W-Stub" + ], + "python.analysis.extraPaths": [ + ".vscode\\Pico-W-Stub" + ], + "micropico.syncFolder": "", + "micropico.openOnStart": true +} \ No newline at end of file