Micropython project base

This commit is contained in:
Ádám Kovács
2023-11-10 15:09:19 +01:00
parent 25215733a6
commit 8aa7fb473b
171 changed files with 23640 additions and 0 deletions

161
.gitignore vendored Normal file
View File

@@ -0,0 +1,161 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
secrets.py

3
.micropico Normal file
View File

@@ -0,0 +1,3 @@
{
"info": "This file is just used to identify a project folder."
}

27
.vscode/Pico-W-Stub/__builtins__.pyi vendored Normal file
View File

@@ -0,0 +1,27 @@
# allows for type checking of additional builtins by pyright
from typing import Tuple, TypeVar
Const_T = TypeVar("Const_T", int, float, str, bytes, Tuple) # constant
def const(expr: Const_T) -> Const_T:
"""
Used to declare that the expression is a constant so that the compiler can
optimise it. The use of this function should be as follows::
from micropython import const
CONST_X = const(123)
CONST_Y = const(2 * CONST_X + 1)
Constants declared this way are still accessible as global variables from
outside the module they are declared in. On the other hand, if a constant
begins with an underscore then it is hidden, it is not available as a global
variable, and does not take up any memory during execution.
This `const` function is recognised directly by the MicroPython parser and is
provided as part of the :mod:`micropython` module mainly so that scripts can be
written which run under both CPython and MicroPython, by following the above
pattern.
"""
...

11
.vscode/Pico-W-Stub/_asyncio.pyi vendored Normal file
View File

@@ -0,0 +1,11 @@
from _typeshed import Incomplete as Incomplete
class TaskQueue:
def push(self, *args, **kwargs) -> Incomplete: ...
def peek(self, *args, **kwargs) -> Incomplete: ...
def remove(self, *args, **kwargs) -> Incomplete: ...
def pop(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, *argv, **kwargs) -> None: ...
class Task:
def __init__(self, *argv, **kwargs) -> None: ...

4
.vscode/Pico-W-Stub/_boot.pyi vendored Normal file
View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
bdev: Incomplete
vfs: Incomplete

4
.vscode/Pico-W-Stub/_boot_fat.pyi vendored Normal file
View File

@@ -0,0 +1,4 @@
from _typeshed import Incomplete
bdev: Incomplete
vfs: Incomplete

8
.vscode/Pico-W-Stub/_onewire.pyi vendored Normal file
View File

@@ -0,0 +1,8 @@
from _typeshed import Incomplete as Incomplete
def reset(*args, **kwargs) -> Incomplete: ...
def writebyte(*args, **kwargs) -> Incomplete: ...
def writebit(*args, **kwargs) -> Incomplete: ...
def crc8(*args, **kwargs) -> Incomplete: ...
def readbyte(*args, **kwargs) -> Incomplete: ...
def readbit(*args, **kwargs) -> Incomplete: ...

42
.vscode/Pico-W-Stub/_rp2.pyi vendored Normal file
View File

@@ -0,0 +1,42 @@
from _typeshed import Incomplete as Incomplete
def country(*args, **kwargs) -> Incomplete: ...
def bootsel_button(*args, **kwargs) -> Incomplete: ...
class Flash:
def readblocks(self, *args, **kwargs) -> Incomplete: ...
def writeblocks(self, *args, **kwargs) -> Incomplete: ...
def ioctl(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, *argv, **kwargs) -> None: ...
class PIO:
JOIN_TX: int
JOIN_NONE: int
JOIN_RX: int
SHIFT_LEFT: int
OUT_HIGH: int
OUT_LOW: int
SHIFT_RIGHT: int
IN_LOW: int
IRQ_SM3: int
IN_HIGH: int
IRQ_SM2: int
IRQ_SM0: int
IRQ_SM1: int
def state_machine(self, *args, **kwargs) -> Incomplete: ...
def remove_program(self, *args, **kwargs) -> Incomplete: ...
def irq(self, *args, **kwargs) -> Incomplete: ...
def add_program(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, *argv, **kwargs) -> None: ...
class StateMachine:
def irq(self, *args, **kwargs) -> Incomplete: ...
def put(self, *args, **kwargs) -> Incomplete: ...
def restart(self, *args, **kwargs) -> Incomplete: ...
def rx_fifo(self, *args, **kwargs) -> Incomplete: ...
def tx_fifo(self, *args, **kwargs) -> Incomplete: ...
def init(self, *args, **kwargs) -> Incomplete: ...
def exec(self, *args, **kwargs) -> Incomplete: ...
def get(self, *args, **kwargs) -> Incomplete: ...
def active(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, *argv, **kwargs) -> None: ...

25
.vscode/Pico-W-Stub/_thread.pyi vendored Normal file
View File

@@ -0,0 +1,25 @@
"""
Multithreading support.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/_thread.html
CPython module: :mod:`python:_thread` https://docs.python.org/3/library/_thread.html .
This module implements multithreading support.
This module is highly experimental and its API is not yet fully settled
and not yet described in this documentation.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
def get_ident(*args, **kwargs) -> Incomplete: ...
def start_new_thread(*args, **kwargs) -> Incomplete: ...
def stack_size(*args, **kwargs) -> Incomplete: ...
def exit(*args, **kwargs) -> Incomplete: ...
def allocate_lock(*args, **kwargs) -> Incomplete: ...
class LockType:
def locked(self, *args, **kwargs) -> Incomplete: ...
def release(self, *args, **kwargs) -> Incomplete: ...
def acquire(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, *argv, **kwargs) -> None: ...

15
.vscode/Pico-W-Stub/aioble/__init__.pyi vendored Normal file
View File

@@ -0,0 +1,15 @@
from .central import scan as scan
from .core import GattError as GattError, config as config, log_error as log_error, log_warn as log_warn, stop as stop
from .device import Device as Device, DeviceDisconnectedError as DeviceDisconnectedError
from .peripheral import advertise as advertise
from .server import (
BufferedCharacteristic as BufferedCharacteristic,
Characteristic as Characteristic,
Descriptor as Descriptor,
Service as Service,
register_services as register_services,
)
from _typeshed import Incomplete
ADDR_PUBLIC: Incomplete
ADDR_RANDOM: Incomplete

71
.vscode/Pico-W-Stub/aioble/central.pyi vendored Normal file
View File

@@ -0,0 +1,71 @@
from .core import (
ble as ble,
ensure_active as ensure_active,
log_error as log_error,
log_info as log_info,
log_warn as log_warn,
register_irq_handler as register_irq_handler,
)
from .device import Device as Device, DeviceConnection as DeviceConnection, DeviceTimeout as DeviceTimeout
from _typeshed import Incomplete
from collections.abc import Generator
_IRQ_SCAN_RESULT: Incomplete
_IRQ_SCAN_DONE: Incomplete
_IRQ_PERIPHERAL_CONNECT: Incomplete
_IRQ_PERIPHERAL_DISCONNECT: Incomplete
_ADV_IND: Incomplete
_ADV_DIRECT_IND: Incomplete
_ADV_SCAN_IND: Incomplete
_ADV_NONCONN_IND: Incomplete
_SCAN_RSP: Incomplete
_ADV_TYPE_FLAGS: Incomplete
_ADV_TYPE_NAME: Incomplete
_ADV_TYPE_SHORT_NAME: Incomplete
_ADV_TYPE_UUID16_INCOMPLETE: Incomplete
_ADV_TYPE_UUID16_COMPLETE: Incomplete
_ADV_TYPE_UUID32_INCOMPLETE: Incomplete
_ADV_TYPE_UUID32_COMPLETE: Incomplete
_ADV_TYPE_UUID128_INCOMPLETE: Incomplete
_ADV_TYPE_UUID128_COMPLETE: Incomplete
_ADV_TYPE_APPEARANCE: Incomplete
_ADV_TYPE_MANUFACTURER: Incomplete
_active_scanner: Incomplete
_connecting: Incomplete
def _central_irq(event, data) -> None: ...
def _central_shutdown() -> None: ...
async def _cancel_pending() -> None: ...
async def _connect(connection, timeout_ms) -> None: ...
class ScanResult:
device: Incomplete
adv_data: Incomplete
resp_data: Incomplete
rssi: Incomplete
connectable: bool
def __init__(self, device) -> None: ...
def _update(self, adv_type, rssi, adv_data): ...
def __str__(self): ...
def _decode_field(self, *adv_type) -> Generator[Incomplete, None, None]: ...
def name(self): ...
def services(self) -> Generator[Incomplete, None, None]: ...
def manufacturer(self, filter: Incomplete | None = ...) -> Generator[Incomplete, None, None]: ...
class scan:
_queue: Incomplete
_event: Incomplete
_done: bool
_results: Incomplete
_duration_ms: Incomplete
_interval_us: Incomplete
_window_us: Incomplete
_active: Incomplete
def __init__(
self, duration_ms, interval_us: Incomplete | None = ..., window_us: Incomplete | None = ..., active: bool = ...
) -> None: ...
async def __aenter__(self): ...
async def __aexit__(self, exc_type, exc_val, exc_traceback) -> None: ...
def __aiter__(self): ...
async def __anext__(self): ...
async def cancel(self) -> None: ...

100
.vscode/Pico-W-Stub/aioble/client.pyi vendored Normal file
View File

@@ -0,0 +1,100 @@
from .core import GattError as GattError, ble as ble, register_irq_handler as register_irq_handler
from .device import DeviceConnection as DeviceConnection
from _typeshed import Incomplete
_IRQ_GATTC_SERVICE_RESULT: Incomplete
_IRQ_GATTC_SERVICE_DONE: Incomplete
_IRQ_GATTC_CHARACTERISTIC_RESULT: Incomplete
_IRQ_GATTC_CHARACTERISTIC_DONE: Incomplete
_IRQ_GATTC_DESCRIPTOR_RESULT: Incomplete
_IRQ_GATTC_DESCRIPTOR_DONE: Incomplete
_IRQ_GATTC_READ_RESULT: Incomplete
_IRQ_GATTC_READ_DONE: Incomplete
_IRQ_GATTC_WRITE_DONE: Incomplete
_IRQ_GATTC_NOTIFY: Incomplete
_IRQ_GATTC_INDICATE: Incomplete
_CCCD_UUID: Incomplete
_CCCD_NOTIFY: Incomplete
_CCCD_INDICATE: Incomplete
_FLAG_READ: Incomplete
_FLAG_WRITE_NO_RESPONSE: Incomplete
_FLAG_WRITE: Incomplete
_FLAG_NOTIFY: Incomplete
_FLAG_INDICATE: Incomplete
def _client_irq(event, data) -> None: ...
class ClientDiscover:
_connection: Incomplete
_queue: Incomplete
_status: Incomplete
_event: Incomplete
_disc_type: Incomplete
_parent: Incomplete
_timeout_ms: Incomplete
_args: Incomplete
def __init__(self, connection, disc_type, parent, timeout_ms, *args) -> None: ...
async def _start(self) -> None: ...
def __aiter__(self): ...
async def __anext__(self): ...
def _discover_result(conn_handle, *args) -> None: ...
def _discover_done(conn_handle, status) -> None: ...
class ClientService:
connection: Incomplete
_start_handle: Incomplete
_end_handle: Incomplete
uuid: Incomplete
def __init__(self, connection, start_handle, end_handle, uuid) -> None: ...
def __str__(self): ...
async def characteristic(self, uuid, timeout_ms: int = ...): ...
def characteristics(self, uuid: Incomplete | None = ..., timeout_ms: int = ...): ...
def _start_discovery(connection, uuid: Incomplete | None = ...) -> None: ...
class BaseClientCharacteristic:
_value_handle: Incomplete
properties: Incomplete
uuid: Incomplete
_read_event: Incomplete
_read_data: Incomplete
_read_status: Incomplete
_write_event: Incomplete
_write_status: Incomplete
def __init__(self, value_handle, properties, uuid) -> None: ...
def _register_with_connection(self) -> None: ...
def _find(conn_handle, value_handle): ...
def _check(self, flag) -> None: ...
async def read(self, timeout_ms: int = ...): ...
def _read_result(conn_handle, value_handle, data) -> None: ...
def _read_done(conn_handle, value_handle, status) -> None: ...
async def write(self, data, response: Incomplete | None = ..., timeout_ms: int = ...) -> None: ...
def _write_done(conn_handle, value_handle, status) -> None: ...
class ClientCharacteristic(BaseClientCharacteristic):
service: Incomplete
connection: Incomplete
_end_handle: Incomplete
_notify_event: Incomplete
_notify_queue: Incomplete
_indicate_event: Incomplete
_indicate_queue: Incomplete
def __init__(self, service, end_handle, value_handle, properties, uuid) -> None: ...
def __str__(self): ...
def _connection(self): ...
async def descriptor(self, uuid, timeout_ms: int = ...): ...
def descriptors(self, timeout_ms: int = ...): ...
def _start_discovery(service, uuid: Incomplete | None = ...) -> None: ...
async def _notified_indicated(self, queue, event, timeout_ms): ...
async def notified(self, timeout_ms: Incomplete | None = ...): ...
def _on_notify_indicate(self, queue, event, data) -> None: ...
def _on_notify(conn_handle, value_handle, notify_data) -> None: ...
async def indicated(self, timeout_ms: Incomplete | None = ...): ...
def _on_indicate(conn_handle, value_handle, indicate_data) -> None: ...
async def subscribe(self, notify: bool = ..., indicate: bool = ...) -> None: ...
class ClientDescriptor(BaseClientCharacteristic):
characteristic: Incomplete
def __init__(self, characteristic, dsc_handle, uuid) -> None: ...
def __str__(self): ...
def _connection(self): ...
def _start_discovery(characteristic, uuid: Incomplete | None = ...) -> None: ...

23
.vscode/Pico-W-Stub/aioble/core.pyi vendored Normal file
View File

@@ -0,0 +1,23 @@
from _typeshed import Incomplete
log_level: int
def log_error(*args) -> None: ...
def log_warn(*args) -> None: ...
def log_info(*args) -> None: ...
class GattError(Exception):
_status: Incomplete
def __init__(self, status) -> None: ...
def ensure_active() -> None: ...
def config(*args, **kwargs): ...
_irq_handlers: Incomplete
_shutdown_handlers: Incomplete
def register_irq_handler(irq, shutdown) -> None: ...
def stop() -> None: ...
def ble_irq(event, data): ...
ble: Incomplete

62
.vscode/Pico-W-Stub/aioble/device.pyi vendored Normal file
View File

@@ -0,0 +1,62 @@
from .core import ble as ble, log_error as log_error, register_irq_handler as register_irq_handler
from _typeshed import Incomplete
_IRQ_MTU_EXCHANGED: Incomplete
class DeviceDisconnectedError(Exception): ...
def _device_irq(event, data) -> None: ...
class DeviceTimeout:
_connection: Incomplete
_timeout_ms: Incomplete
_timeout_task: Incomplete
_task: Incomplete
def __init__(self, connection, timeout_ms) -> None: ...
async def _timeout_sleep(self) -> None: ...
def __enter__(self) -> None: ...
def __exit__(self, exc_type, exc_val, exc_traceback) -> None: ...
class Device:
addr_type: Incomplete
addr: Incomplete
_connection: Incomplete
def __init__(self, addr_type, addr) -> None: ...
def __eq__(self, rhs): ...
def __hash__(self): ...
def __str__(self): ...
def addr_hex(self): ...
async def connect(self, timeout_ms: int = ...): ...
class DeviceConnection:
_connected: Incomplete
device: Incomplete
encrypted: bool
authenticated: bool
bonded: bool
key_size: bool
mtu: Incomplete
_conn_handle: Incomplete
_event: Incomplete
_mtu_event: Incomplete
_discover: Incomplete
_characteristics: Incomplete
_task: Incomplete
_timeouts: Incomplete
_pair_event: Incomplete
_l2cap_channel: Incomplete
def __init__(self, device) -> None: ...
async def device_task(self) -> None: ...
def _run_task(self) -> None: ...
async def disconnect(self, timeout_ms: int = ...) -> None: ...
async def disconnected(self, timeout_ms: int = ..., disconnect: bool = ...) -> None: ...
async def service(self, uuid, timeout_ms: int = ...): ...
def services(self, uuid: Incomplete | None = ..., timeout_ms: int = ...): ...
async def pair(self, *args, **kwargs) -> None: ...
def is_connected(self): ...
def timeout(self, timeout_ms): ...
async def exchange_mtu(self, mtu: Incomplete | None = ..., timeout_ms: int = ...): ...
async def l2cap_accept(self, psm, mtu, timeout_ms: Incomplete | None = ...): ...
async def l2cap_connect(self, psm, mtu, timeout_ms: int = ...): ...
async def __aenter__(self): ...
async def __aexit__(self, exc_type, exc_val, exc_traceback) -> None: ...

39
.vscode/Pico-W-Stub/aioble/l2cap.pyi vendored Normal file
View File

@@ -0,0 +1,39 @@
from .core import ble as ble, log_error as log_error, register_irq_handler as register_irq_handler
from .device import DeviceConnection as DeviceConnection
from _typeshed import Incomplete
_IRQ_L2CAP_ACCEPT: Incomplete
_IRQ_L2CAP_CONNECT: Incomplete
_IRQ_L2CAP_DISCONNECT: Incomplete
_IRQ_L2CAP_RECV: Incomplete
_IRQ_L2CAP_SEND_READY: Incomplete
_listening: bool
def _l2cap_irq(event, data) -> None: ...
def _l2cap_shutdown() -> None: ...
class L2CAPDisconnectedError(Exception): ...
class L2CAPConnectionError(Exception): ...
class L2CAPChannel:
_connection: Incomplete
our_mtu: int
peer_mtu: int
_cid: Incomplete
_status: int
_stalled: bool
_data_ready: bool
_event: Incomplete
def __init__(self, connection) -> None: ...
def _assert_connected(self) -> None: ...
async def recvinto(self, buf, timeout_ms: Incomplete | None = ...): ...
def available(self): ...
async def send(self, buf, timeout_ms: Incomplete | None = ..., chunk_size: Incomplete | None = ...) -> None: ...
async def flush(self, timeout_ms: Incomplete | None = ...) -> None: ...
async def disconnect(self, timeout_ms: int = ...) -> None: ...
async def disconnected(self, timeout_ms: int = ...) -> None: ...
async def __aenter__(self): ...
async def __aexit__(self, exc_type, exc_val, exc_traceback) -> None: ...
async def accept(connection, psm, mtu, timeout_ms): ...
async def connect(connection, psm, mtu, timeout_ms): ...

View File

@@ -0,0 +1,43 @@
from .core import (
ble as ble,
ensure_active as ensure_active,
log_error as log_error,
log_info as log_info,
log_warn as log_warn,
register_irq_handler as register_irq_handler,
)
from .device import Device as Device, DeviceConnection as DeviceConnection, DeviceTimeout as DeviceTimeout
from _typeshed import Incomplete
_IRQ_CENTRAL_CONNECT: Incomplete
_IRQ_CENTRAL_DISCONNECT: Incomplete
_ADV_TYPE_FLAGS: Incomplete
_ADV_TYPE_NAME: Incomplete
_ADV_TYPE_UUID16_COMPLETE: Incomplete
_ADV_TYPE_UUID32_COMPLETE: Incomplete
_ADV_TYPE_UUID128_COMPLETE: Incomplete
_ADV_TYPE_UUID16_MORE: Incomplete
_ADV_TYPE_UUID32_MORE: Incomplete
_ADV_TYPE_UUID128_MORE: Incomplete
_ADV_TYPE_APPEARANCE: Incomplete
_ADV_TYPE_MANUFACTURER: Incomplete
_ADV_PAYLOAD_MAX_LEN: Incomplete
_incoming_connection: Incomplete
_connect_event: Incomplete
def _peripheral_irq(event, data) -> None: ...
def _peripheral_shutdown() -> None: ...
def _append(adv_data, resp_data, adv_type, value): ...
async def advertise(
interval_us,
adv_data: Incomplete | None = ...,
resp_data: Incomplete | None = ...,
connectable: bool = ...,
limited_disc: bool = ...,
br_edr: bool = ...,
name: Incomplete | None = ...,
services: Incomplete | None = ...,
appearance: int = ...,
manufacturer: Incomplete | None = ...,
timeout_ms: Incomplete | None = ...,
): ...

26
.vscode/Pico-W-Stub/aioble/security.pyi vendored Normal file
View File

@@ -0,0 +1,26 @@
from .core import ble as ble, log_info as log_info, log_warn as log_warn, register_irq_handler as register_irq_handler
from .device import DeviceConnection as DeviceConnection
from _typeshed import Incomplete
_IRQ_ENCRYPTION_UPDATE: Incomplete
_IRQ_GET_SECRET: Incomplete
_IRQ_SET_SECRET: Incomplete
_IRQ_PASSKEY_ACTION: Incomplete
_IO_CAPABILITY_DISPLAY_ONLY: Incomplete
_IO_CAPABILITY_DISPLAY_YESNO: Incomplete
_IO_CAPABILITY_KEYBOARD_ONLY: Incomplete
_IO_CAPABILITY_NO_INPUT_OUTPUT: Incomplete
_IO_CAPABILITY_KEYBOARD_DISPLAY: Incomplete
_PASSKEY_ACTION_INPUT: Incomplete
_PASSKEY_ACTION_DISP: Incomplete
_PASSKEY_ACTION_NUMCMP: Incomplete
_DEFAULT_PATH: str
_secrets: Incomplete
_modified: bool
_path: Incomplete
def load_secrets(path: Incomplete | None = ...) -> None: ...
def _save_secrets(arg: Incomplete | None = ...) -> None: ...
def _security_irq(event, data): ...
def _security_shutdown() -> None: ...
async def pair(connection, bond: bool = ..., le_secure: bool = ..., mitm: bool = ..., io=..., timeout_ms: int = ...) -> None: ...

100
.vscode/Pico-W-Stub/aioble/server.pyi vendored Normal file
View File

@@ -0,0 +1,100 @@
from .core import (
GattError as GattError,
ble as ble,
ensure_active as ensure_active,
log_error as log_error,
log_info as log_info,
log_warn as log_warn,
register_irq_handler as register_irq_handler,
)
from .device import DeviceConnection as DeviceConnection, DeviceTimeout as DeviceTimeout
from _typeshed import Incomplete
_registered_characteristics: Incomplete
_IRQ_GATTS_WRITE: Incomplete
_IRQ_GATTS_READ_REQUEST: Incomplete
_IRQ_GATTS_INDICATE_DONE: Incomplete
_FLAG_READ: Incomplete
_FLAG_WRITE_NO_RESPONSE: Incomplete
_FLAG_WRITE: Incomplete
_FLAG_NOTIFY: Incomplete
_FLAG_INDICATE: Incomplete
_FLAG_READ_ENCRYPTED: Incomplete
_FLAG_READ_AUTHENTICATED: Incomplete
_FLAG_READ_AUTHORIZED: Incomplete
_FLAG_WRITE_ENCRYPTED: Incomplete
_FLAG_WRITE_AUTHENTICATED: Incomplete
_FLAG_WRITE_AUTHORIZED: Incomplete
_FLAG_WRITE_CAPTURE: Incomplete
_WRITE_CAPTURE_QUEUE_LIMIT: Incomplete
def _server_irq(event, data): ...
def _server_shutdown() -> None: ...
class Service:
uuid: Incomplete
characteristics: Incomplete
def __init__(self, uuid) -> None: ...
def _tuple(self): ...
class BaseCharacteristic:
_value_handle: Incomplete
_initial: Incomplete
def _register(self, value_handle) -> None: ...
def read(self): ...
def write(self, data, send_update: bool = ...) -> None: ...
@staticmethod
def _init_capture() -> None: ...
@staticmethod
async def _run_capture_task() -> None: ...
_write_data: Incomplete
async def written(self, timeout_ms: Incomplete | None = ...): ...
def on_read(self, connection): ...
def _remote_write(conn_handle, value_handle) -> None: ...
def _remote_read(conn_handle, value_handle): ...
class Characteristic(BaseCharacteristic):
descriptors: Incomplete
_write_event: Incomplete
_write_data: Incomplete
_indicate_connection: Incomplete
_indicate_event: Incomplete
_indicate_status: Incomplete
uuid: Incomplete
flags: Incomplete
_value_handle: Incomplete
_initial: Incomplete
def __init__(
self,
service,
uuid,
read: bool = ...,
write: bool = ...,
write_no_response: bool = ...,
notify: bool = ...,
indicate: bool = ...,
initial: Incomplete | None = ...,
capture: bool = ...,
) -> None: ...
def _tuple(self): ...
def notify(self, connection, data: Incomplete | None = ...) -> None: ...
async def indicate(self, connection, data: Incomplete | None = ..., timeout_ms: int = ...) -> None: ...
def _indicate_done(conn_handle, value_handle, status) -> None: ...
class BufferedCharacteristic(Characteristic):
_max_len: Incomplete
_append: Incomplete
def __init__(self, *args, max_len: int = ..., append: bool = ..., **kwargs) -> None: ...
def _register(self, value_handle) -> None: ...
class Descriptor(BaseCharacteristic):
_write_event: Incomplete
_write_data: Incomplete
uuid: Incomplete
flags: Incomplete
_value_handle: Incomplete
_initial: Incomplete
def __init__(self, characteristic, uuid, read: bool = ..., write: bool = ..., initial: Incomplete | None = ...) -> None: ...
def _tuple(self): ...
def register_services(*services) -> None: ...

33
.vscode/Pico-W-Stub/array.pyi vendored Normal file
View File

@@ -0,0 +1,33 @@
"""
Efficient arrays of numeric data.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/array.html
CPython module: :mod:`python:array` https://docs.python.org/3/library/array.html .
Supported format codes: ``b``, ``B``, ``h``, ``H``, ``i``, ``I``, ``l``,
``L``, ``q``, ``Q``, ``f``, ``d`` (the latter 2 depending on the
floating-point support).
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, List, Optional
class array:
"""
Create array with elements of given type. Initial contents of the
array are given by *iterable*. If it is not provided, an empty
array is created.
"""
def extend(self, iterable) -> Incomplete:
"""
Append new elements as contained in *iterable* to the end of
array, growing it.
"""
...
def append(self, val) -> Incomplete:
"""
Append new element *val* to the end of array, growing it.
"""
...
def __init__(self, typecode, iterable: Optional[Any] = None) -> None: ...

View File

@@ -0,0 +1,7 @@
from .core import *
from _typeshed import Incomplete
__version__: Incomplete
_attrs: Incomplete
def __getattr__(attr): ...

52
.vscode/Pico-W-Stub/asyncio/core.pyi vendored Normal file
View File

@@ -0,0 +1,52 @@
from .task import Task as Task, TaskQueue as TaskQueue
from _typeshed import Incomplete
class CancelledError(BaseException): ...
class TimeoutError(Exception): ...
_exc_context: Incomplete
class SingletonGenerator:
state: Incomplete
exc: Incomplete
def __init__(self) -> None: ...
def __iter__(self): ...
def __next__(self) -> None: ...
def sleep_ms(t, sgen=...): ...
def sleep(t): ...
class IOQueue:
poller: Incomplete
map: Incomplete
def __init__(self) -> None: ...
def _enqueue(self, s, idx) -> None: ...
def _dequeue(self, s) -> None: ...
def queue_read(self, s) -> None: ...
def queue_write(self, s) -> None: ...
def remove(self, task) -> None: ...
def wait_io_event(self, dt) -> None: ...
def _promote_to_task(aw): ...
def create_task(coro): ...
def run_until_complete(main_task: Incomplete | None = ...): ...
def run(coro): ...
async def _stopper() -> None: ...
_stop_task: Incomplete
class Loop:
_exc_handler: Incomplete
def create_task(coro): ...
def run_forever() -> None: ...
def run_until_complete(aw): ...
def stop() -> None: ...
def close() -> None: ...
def set_exception_handler(handler) -> None: ...
def get_exception_handler(): ...
def default_exception_handler(loop, context) -> None: ...
def call_exception_handler(context) -> None: ...
def get_event_loop(runq_len: int = ..., waitq_len: int = ...): ...
def current_task(): ...
def new_event_loop(): ...

21
.vscode/Pico-W-Stub/asyncio/event.pyi vendored Normal file
View File

@@ -0,0 +1,21 @@
import io
from . import core as core
from _typeshed import Incomplete
from collections.abc import Generator
class Event:
state: bool
waiting: Incomplete
def __init__(self) -> None: ...
def is_set(self): ...
def set(self) -> None: ...
def clear(self) -> None: ...
def wait(self) -> Generator[None, None, Incomplete]: ...
class ThreadSafeFlag(io.IOBase):
state: int
def __init__(self) -> None: ...
def ioctl(self, req, flags): ...
def set(self) -> None: ...
def clear(self) -> None: ...
async def wait(self) -> Generator[Incomplete, None, None]: ...

13
.vscode/Pico-W-Stub/asyncio/funcs.pyi vendored Normal file
View File

@@ -0,0 +1,13 @@
from . import core as core
from _typeshed import Incomplete
from collections.abc import Generator
async def _run(waiter, aw) -> None: ...
async def wait_for(aw, timeout, sleep=...): ...
def wait_for_ms(aw, timeout): ...
class _Remove:
@staticmethod
def remove(t) -> None: ...
def gather(*aws, return_exceptions: bool = ...) -> Generator[None, None, Incomplete]: ...

13
.vscode/Pico-W-Stub/asyncio/lock.pyi vendored Normal file
View File

@@ -0,0 +1,13 @@
from . import core as core
from _typeshed import Incomplete
from collections.abc import Generator
class Lock:
state: int
waiting: Incomplete
def __init__(self) -> None: ...
def locked(self): ...
def release(self) -> None: ...
def acquire(self) -> Generator[None, None, Incomplete]: ...
async def __aenter__(self): ...
async def __aexit__(self, exc_type, exc, tb): ...

36
.vscode/Pico-W-Stub/asyncio/stream.pyi vendored Normal file
View File

@@ -0,0 +1,36 @@
from . import core as core
from _typeshed import Incomplete
from collections.abc import Generator
class Stream:
s: Incomplete
e: Incomplete
out_buf: bytes
def __init__(self, s, e=...) -> None: ...
def get_extra_info(self, v): ...
async def __aenter__(self): ...
async def __aexit__(self, exc_type, exc, tb) -> None: ...
def close(self) -> None: ...
async def wait_closed(self) -> None: ...
def read(self, n: int = ...) -> Generator[Incomplete, None, Incomplete]: ...
def readinto(self, buf) -> Generator[Incomplete, None, Incomplete]: ...
def readexactly(self, n) -> Generator[Incomplete, None, Incomplete]: ...
def readline(self) -> Generator[Incomplete, None, Incomplete]: ...
def write(self, buf) -> None: ...
def drain(self) -> Generator[Incomplete, None, Incomplete]: ...
StreamReader = Stream
StreamWriter = Stream
def open_connection(host, port) -> Generator[Incomplete, None, Incomplete]: ...
class Server:
async def __aenter__(self): ...
async def __aexit__(self, exc_type, exc, tb) -> None: ...
state: bool
def close(self) -> None: ...
async def wait_closed(self) -> None: ...
async def _serve(self, s, cb) -> Generator[Incomplete, None, None]: ...
async def start_server(cb, host, port, backlog: int = ...): ...
async def stream_awrite(self, buf, off: int = ..., sz: int = ...) -> None: ...

46
.vscode/Pico-W-Stub/binascii.pyi vendored Normal file
View File

@@ -0,0 +1,46 @@
"""
Binary/ASCII conversions.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/binascii.html
CPython module: :mod:`python:binascii` https://docs.python.org/3/library/binascii.html .
This module implements conversions between binary data and various
encodings of it in ASCII form (in both directions).
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, Optional
def crc32(*args, **kwargs) -> Incomplete: ...
def hexlify(data, sep: Optional[Any] = None) -> bytes:
"""
Convert the bytes in the *data* object to a hexadecimal representation.
Returns a bytes object.
If the additional argument *sep* is supplied it is used as a separator
between hexadecimal values.
"""
...
def unhexlify(data) -> bytes:
"""
Convert hexadecimal data to binary representation. Returns bytes string.
(i.e. inverse of hexlify)
"""
...
def b2a_base64(data, *, newline=True) -> bytes:
"""
Encode binary data in base64 format, as in `RFC 3548
<https://tools.ietf.org/html/rfc3548.html>`_. Returns the encoded data
followed by a newline character if newline is true, as a bytes object.
"""
...
def a2b_base64(data) -> bytes:
"""
Decode base64-encoded data, ignoring invalid characters in the input.
Conforms to `RFC 2045 s.6.8 <https://tools.ietf.org/html/rfc2045#section-6.8>`_.
Returns a bytes object.
"""
...

592
.vscode/Pico-W-Stub/bluetooth.pyi vendored Normal file
View File

@@ -0,0 +1,592 @@
"""
Low-level Bluetooth radio functionality.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/bluetooth.html
This module provides an interface to a Bluetooth controller on a board.
Currently this supports Bluetooth Low Energy (BLE) in Central, Peripheral,
Broadcaster, and Observer roles, as well as GATT Server and Client and L2CAP
connection-oriented-channels. A device may operate in multiple roles
concurrently. Pairing (and bonding) is supported on some ports.
This API is intended to match the low-level Bluetooth protocol and provide
building-blocks for higher-level abstractions such as specific device types.
``Note:`` For most applications, we recommend using the higher-level
`aioble library <https://github.com/micropython/micropython-lib/tree/master/micropython/bluetooth/aioble>`_.
``Note:`` This module is still under development and its classes, functions,
methods and constants are subject to change.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, Optional, Tuple
FLAG_NOTIFY: int
FLAG_READ: int
FLAG_WRITE: int
FLAG_INDICATE: int
FLAG_WRITE_NO_RESPONSE: int
class UUID:
"""
Creates a UUID instance with the specified **value**.
The **value** can be either:
- A 16-bit integer. e.g. ``0x2908``.
- A 128-bit UUID string. e.g. ``'6E400001-B5A3-F393-E0A9-E50E24DCCA9E'``.
"""
def __init__(self, value, /) -> None: ...
class BLE:
"""
Returns the singleton BLE object.
"""
def gatts_notify(self, conn_handle, value_handle, data=None, /) -> None:
"""
Sends a notification request to a connected client.
If *data* is ``None`` (the default), then the current local value (as set
with :meth:`gatts_write <BLE.gatts_write>`) will be sent.
Otherwise, if *data* is not ``None``, then that value is sent to the client
as part of the notification. The local value will not be modified.
**Note:** The notification will be sent regardless of the subscription
status of the client to this characteristic.
"""
...
def gatts_indicate(self, conn_handle, value_handle, data=None, /) -> None:
"""
Sends a indication request to a connected client.
If *data* is ``None`` (the default), then the current local value (as set
with :meth:`gatts_write <BLE.gatts_write>`) will be sent.
Otherwise, if *data* is not ``None``, then that value is sent to the client
as part of the indication. The local value will not be modified.
On acknowledgment (or failure, e.g. timeout), the
``_IRQ_GATTS_INDICATE_DONE`` event will be raised.
**Note:** The indication will be sent regardless of the subscription
status of the client to this characteristic.
"""
...
def gattc_write(self, conn_handle, value_handle, data, mode=0, /) -> None:
"""
Issue a remote write to a connected server for the specified
characteristic or descriptor handle.
The argument *mode* specifies the write behaviour, with the currently
supported values being:
* ``mode=0`` (default) is a write-without-response: the write will
be sent to the remote server but no confirmation will be
returned, and no event will be raised.
* ``mode=1`` is a write-with-response: the remote server is
requested to send a response/acknowledgement that it received the
data.
If a response is received from the remote server the
``_IRQ_GATTC_WRITE_DONE`` event will be raised.
"""
...
def gattc_read(self, conn_handle, value_handle, /) -> None:
"""
Issue a remote read to a connected server for the specified
characteristic or descriptor handle.
When a value is available, the ``_IRQ_GATTC_READ_RESULT`` event will be
raised. Additionally, the ``_IRQ_GATTC_READ_DONE`` will be raised.
"""
...
def gattc_exchange_mtu(self, conn_handle, /) -> Incomplete:
"""
Initiate MTU exchange with a connected server, using the preferred MTU
set using ``BLE.config(mtu=value)``.
The ``_IRQ_MTU_EXCHANGED`` event will be raised when MTU exchange
completes.
**Note:** MTU exchange is typically initiated by the central. When using
the BlueKitchen stack in the central role, it does not support a remote
peripheral initiating the MTU exchange. NimBLE works for both roles.
"""
...
def gatts_read(self, value_handle, /) -> Incomplete:
"""
Reads the local value for this handle (which has either been written by
:meth:`gatts_write <BLE.gatts_write>` or by a remote client).
"""
...
def gatts_write(self, value_handle, data, send_update=False, /) -> None:
"""
Writes the local value for this handle, which can be read by a client.
If *send_update* is ``True``, then any subscribed clients will be notified
(or indicated, depending on what they're subscribed to and which operations
the characteristic supports) about this write.
"""
...
def gatts_set_buffer(self, value_handle, len, append=False, /) -> None:
"""
Sets the internal buffer size for a value in bytes. This will limit the
largest possible write that can be received. The default is 20.
Setting *append* to ``True`` will make all remote writes append to, rather
than replace, the current value. At most *len* bytes can be buffered in
this way. When you use :meth:`gatts_read <BLE.gatts_read>`, the value will
be cleared after reading. This feature is useful when implementing something
like the Nordic UART Service.
"""
...
def gatts_register_services(self, services_definition, /) -> Incomplete:
"""
Configures the server with the specified services, replacing any
existing services.
*services_definition* is a list of **services**, where each **service** is a
two-element tuple containing a UUID and a list of **characteristics**.
Each **characteristic** is a two-or-three-element tuple containing a UUID, a
**flags** value, and optionally a list of *descriptors*.
Each **descriptor** is a two-element tuple containing a UUID and a **flags**
value.
The **flags** are a bitwise-OR combination of the flags defined below. These
set both the behaviour of the characteristic (or descriptor) as well as the
security and privacy requirements.
The return value is a list (one element per service) of tuples (each element
is a value handle). Characteristics and descriptor handles are flattened
into the same tuple, in the order that they are defined.
The following example registers two services (Heart Rate, and Nordic UART)::
HR_UUID = bluetooth.UUID(0x180D)
HR_CHAR = (bluetooth.UUID(0x2A37), bluetooth.FLAG_READ | bluetooth.FLAG_NOTIFY,)
HR_SERVICE = (HR_UUID, (HR_CHAR,),)
UART_UUID = bluetooth.UUID('6E400001-B5A3-F393-E0A9-E50E24DCCA9E')
UART_TX = (bluetooth.UUID('6E400003-B5A3-F393-E0A9-E50E24DCCA9E'), bluetooth.FLAG_READ | bluetooth.FLAG_NOTIFY,)
UART_RX = (bluetooth.UUID('6E400002-B5A3-F393-E0A9-E50E24DCCA9E'), bluetooth.FLAG_WRITE,)
UART_SERVICE = (UART_UUID, (UART_TX, UART_RX,),)
SERVICES = (HR_SERVICE, UART_SERVICE,)
( (hr,), (tx, rx,), ) = bt.gatts_register_services(SERVICES)
The three value handles (``hr``, ``tx``, ``rx``) can be used with
:meth:`gatts_read <BLE.gatts_read>`, :meth:`gatts_write <BLE.gatts_write>`, :meth:`gatts_notify <BLE.gatts_notify>`, and
:meth:`gatts_indicate <BLE.gatts_indicate>`.
**Note:** Advertising must be stopped before registering services.
Available flags for characteristics and descriptors are::
from micropython import const
_FLAG_BROADCAST = const(0x0001)
_FLAG_READ = const(0x0002)
_FLAG_WRITE_NO_RESPONSE = const(0x0004)
_FLAG_WRITE = const(0x0008)
_FLAG_NOTIFY = const(0x0010)
_FLAG_INDICATE = const(0x0020)
_FLAG_AUTHENTICATED_SIGNED_WRITE = const(0x0040)
_FLAG_AUX_WRITE = const(0x0100)
_FLAG_READ_ENCRYPTED = const(0x0200)
_FLAG_READ_AUTHENTICATED = const(0x0400)
_FLAG_READ_AUTHORIZED = const(0x0800)
_FLAG_WRITE_ENCRYPTED = const(0x1000)
_FLAG_WRITE_AUTHENTICATED = const(0x2000)
_FLAG_WRITE_AUTHORIZED = const(0x4000)
As for the IRQs above, any required constants should be added to your Python code.
"""
...
def irq(self, handler, /) -> int:
"""
Registers a callback for events from the BLE stack. The *handler* takes two
arguments, ``event`` (which will be one of the codes below) and ``data``
(which is an event-specific tuple of values).
**Note:** As an optimisation to prevent unnecessary allocations, the ``addr``,
``adv_data``, ``char_data``, ``notify_data``, and ``uuid`` entries in the
tuples are read-only memoryview instances pointing to :mod:`bluetooth`'s internal
ringbuffer, and are only valid during the invocation of the IRQ handler
function. If your program needs to save one of these values to access after
the IRQ handler has returned (e.g. by saving it in a class instance or global
variable), then it needs to take a copy of the data, either by using ``bytes()``
or ``bluetooth.UUID()``, like this::
connected_addr = bytes(addr) # equivalently: adv_data, char_data, or notify_data
matched_uuid = bluetooth.UUID(uuid)
For example, the IRQ handler for a scan result might inspect the ``adv_data``
to decide if it's the correct device, and only then copy the address data to be
used elsewhere in the program. And to print data from within the IRQ handler,
``print(bytes(addr))`` will be needed.
An event handler showing all possible events::
def bt_irq(event, data):
if event == _IRQ_CENTRAL_CONNECT:
# A central has connected to this peripheral.
conn_handle, addr_type, addr = data
elif event == _IRQ_CENTRAL_DISCONNECT:
# A central has disconnected from this peripheral.
conn_handle, addr_type, addr = data
elif event == _IRQ_GATTS_WRITE:
# A client has written to this characteristic or descriptor.
conn_handle, attr_handle = data
elif event == _IRQ_GATTS_READ_REQUEST:
# A client has issued a read. Note: this is only supported on STM32.
# Return a non-zero integer to deny the read (see below), or zero (or None)
# to accept the read.
conn_handle, attr_handle = data
elif event == _IRQ_SCAN_RESULT:
# A single scan result.
addr_type, addr, adv_type, rssi, adv_data = data
elif event == _IRQ_SCAN_DONE:
# Scan duration finished or manually stopped.
pass
elif event == _IRQ_PERIPHERAL_CONNECT:
# A successful gap_connect().
conn_handle, addr_type, addr = data
elif event == _IRQ_PERIPHERAL_DISCONNECT:
# Connected peripheral has disconnected.
conn_handle, addr_type, addr = data
elif event == _IRQ_GATTC_SERVICE_RESULT:
# Called for each service found by gattc_discover_services().
conn_handle, start_handle, end_handle, uuid = data
elif event == _IRQ_GATTC_SERVICE_DONE:
# Called once service discovery is complete.
# Note: Status will be zero on success, implementation-specific value otherwise.
conn_handle, status = data
elif event == _IRQ_GATTC_CHARACTERISTIC_RESULT:
# Called for each characteristic found by gattc_discover_services().
conn_handle, end_handle, value_handle, properties, uuid = data
elif event == _IRQ_GATTC_CHARACTERISTIC_DONE:
# Called once service discovery is complete.
# Note: Status will be zero on success, implementation-specific value otherwise.
conn_handle, status = data
elif event == _IRQ_GATTC_DESCRIPTOR_RESULT:
# Called for each descriptor found by gattc_discover_descriptors().
conn_handle, dsc_handle, uuid = data
elif event == _IRQ_GATTC_DESCRIPTOR_DONE:
# Called once service discovery is complete.
# Note: Status will be zero on success, implementation-specific value otherwise.
conn_handle, status = data
elif event == _IRQ_GATTC_READ_RESULT:
# A gattc_read() has completed.
conn_handle, value_handle, char_data = data
elif event == _IRQ_GATTC_READ_DONE:
# A gattc_read() has completed.
# Note: Status will be zero on success, implementation-specific value otherwise.
conn_handle, value_handle, status = data
elif event == _IRQ_GATTC_WRITE_DONE:
# A gattc_write() has completed.
# Note: Status will be zero on success, implementation-specific value otherwise.
conn_handle, value_handle, status = data
elif event == _IRQ_GATTC_NOTIFY:
# A server has sent a notify request.
conn_handle, value_handle, notify_data = data
elif event == _IRQ_GATTC_INDICATE:
# A server has sent an indicate request.
conn_handle, value_handle, notify_data = data
elif event == _IRQ_GATTS_INDICATE_DONE:
# A client has acknowledged the indication.
# Note: Status will be zero on successful acknowledgment, implementation-specific value otherwise.
conn_handle, value_handle, status = data
elif event == _IRQ_MTU_EXCHANGED:
# ATT MTU exchange complete (either initiated by us or the remote device).
conn_handle, mtu = data
elif event == _IRQ_L2CAP_ACCEPT:
# A new channel has been accepted.
# Return a non-zero integer to reject the connection, or zero (or None) to accept.
conn_handle, cid, psm, our_mtu, peer_mtu = data
elif event == _IRQ_L2CAP_CONNECT:
# A new channel is now connected (either as a result of connecting or accepting).
conn_handle, cid, psm, our_mtu, peer_mtu = data
elif event == _IRQ_L2CAP_DISCONNECT:
# Existing channel has disconnected (status is zero), or a connection attempt failed (non-zero status).
conn_handle, cid, psm, status = data
elif event == _IRQ_L2CAP_RECV:
# New data is available on the channel. Use l2cap_recvinto to read.
conn_handle, cid = data
elif event == _IRQ_L2CAP_SEND_READY:
# A previous l2cap_send that returned False has now completed and the channel is ready to send again.
# If status is non-zero, then the transmit buffer overflowed and the application should re-send the data.
conn_handle, cid, status = data
elif event == _IRQ_CONNECTION_UPDATE:
# The remote device has updated connection parameters.
conn_handle, conn_interval, conn_latency, supervision_timeout, status = data
elif event == _IRQ_ENCRYPTION_UPDATE:
# The encryption state has changed (likely as a result of pairing or bonding).
conn_handle, encrypted, authenticated, bonded, key_size = data
elif event == _IRQ_GET_SECRET:
# Return a stored secret.
# If key is None, return the index'th value of this sec_type.
# Otherwise return the corresponding value for this sec_type and key.
sec_type, index, key = data
return value
elif event == _IRQ_SET_SECRET:
# Save a secret to the store for this sec_type and key.
sec_type, key, value = data
return True
elif event == _IRQ_PASSKEY_ACTION:
# Respond to a passkey request during pairing.
# See gap_passkey() for details.
# action will be an action that is compatible with the configured "io" config.
# passkey will be non-zero if action is "numeric comparison".
conn_handle, action, passkey = data
The event codes are::
from micropython import const
_IRQ_CENTRAL_CONNECT = const(1)
_IRQ_CENTRAL_DISCONNECT = const(2)
_IRQ_GATTS_WRITE = const(3)
_IRQ_GATTS_READ_REQUEST = const(4)
_IRQ_SCAN_RESULT = const(5)
_IRQ_SCAN_DONE = const(6)
_IRQ_PERIPHERAL_CONNECT = const(7)
_IRQ_PERIPHERAL_DISCONNECT = const(8)
_IRQ_GATTC_SERVICE_RESULT = const(9)
_IRQ_GATTC_SERVICE_DONE = const(10)
_IRQ_GATTC_CHARACTERISTIC_RESULT = const(11)
_IRQ_GATTC_CHARACTERISTIC_DONE = const(12)
_IRQ_GATTC_DESCRIPTOR_RESULT = const(13)
_IRQ_GATTC_DESCRIPTOR_DONE = const(14)
_IRQ_GATTC_READ_RESULT = const(15)
_IRQ_GATTC_READ_DONE = const(16)
_IRQ_GATTC_WRITE_DONE = const(17)
_IRQ_GATTC_NOTIFY = const(18)
_IRQ_GATTC_INDICATE = const(19)
_IRQ_GATTS_INDICATE_DONE = const(20)
_IRQ_MTU_EXCHANGED = const(21)
_IRQ_L2CAP_ACCEPT = const(22)
_IRQ_L2CAP_CONNECT = const(23)
_IRQ_L2CAP_DISCONNECT = const(24)
_IRQ_L2CAP_RECV = const(25)
_IRQ_L2CAP_SEND_READY = const(26)
_IRQ_CONNECTION_UPDATE = const(27)
_IRQ_ENCRYPTION_UPDATE = const(28)
_IRQ_GET_SECRET = const(29)
_IRQ_SET_SECRET = const(30)
For the ``_IRQ_GATTS_READ_REQUEST`` event, the available return codes are::
_GATTS_NO_ERROR = const(0x00)
_GATTS_ERROR_READ_NOT_PERMITTED = const(0x02)
_GATTS_ERROR_WRITE_NOT_PERMITTED = const(0x03)
_GATTS_ERROR_INSUFFICIENT_AUTHENTICATION = const(0x05)
_GATTS_ERROR_INSUFFICIENT_AUTHORIZATION = const(0x08)
_GATTS_ERROR_INSUFFICIENT_ENCRYPTION = const(0x0f)
For the ``_IRQ_PASSKEY_ACTION`` event, the available actions are::
_PASSKEY_ACTION_NONE = const(0)
_PASSKEY_ACTION_INPUT = const(2)
_PASSKEY_ACTION_DISPLAY = const(3)
_PASSKEY_ACTION_NUMERIC_COMPARISON = const(4)
In order to save space in the firmware, these constants are not included on the
:mod:`bluetooth` module. Add the ones that you need from the list above to your
program.
"""
...
def gap_connect(self, addr_type, addr, scan_duration_ms=2000, min_conn_interval_us=None, max_conn_interval_us=None, /) -> None:
"""
Connect to a peripheral.
See :meth:`gap_scan <BLE.gap_scan>` for details about address types.
To cancel an outstanding connection attempt early, call
``gap_connect(None)``.
On success, the ``_IRQ_PERIPHERAL_CONNECT`` event will be raised. If
cancelling a connection attempt, the ``_IRQ_PERIPHERAL_DISCONNECT`` event
will be raised.
The device will wait up to *scan_duration_ms* to receive an advertising
payload from the device.
The connection interval can be configured in **micro** seconds using either
or both of *min_conn_interval_us* and *max_conn_interval_us*. Otherwise a
default interval will be chosen, typically between 30000 and 50000
microseconds. A shorter interval will increase throughput, at the expense
of power usage.
"""
...
def gap_advertise(self, interval_us, adv_data=None, *, resp_data=None, connectable=True) -> Incomplete:
"""
Starts advertising at the specified interval (in **micro** seconds). This
interval will be rounded down to the nearest 625us. To stop advertising, set
*interval_us* to ``None``.
*adv_data* and *resp_data* can be any type that implements the buffer
protocol (e.g. ``bytes``, ``bytearray``, ``str``). *adv_data* is included
in all broadcasts, and *resp_data* is send in reply to an active scan.
**Note:** if *adv_data* (or *resp_data*) is ``None``, then the data passed
to the previous call to ``gap_advertise`` will be re-used. This allows a
broadcaster to resume advertising with just ``gap_advertise(interval_us)``.
To clear the advertising payload pass an empty ``bytes``, i.e. ``b''``.
"""
...
def config(self, param, /) -> Tuple:
"""
Get or set configuration values of the BLE interface. To get a value the
parameter name should be quoted as a string, and just one parameter is
queried at a time. To set values use the keyword syntax, and one or more
parameter can be set at a time.
Currently supported values are:
- ``'mac'``: The current address in use, depending on the current address mode.
This returns a tuple of ``(addr_type, addr)``.
See :meth:`gatts_write <BLE.gatts_write>` for details about address type.
This may only be queried while the interface is currently active.
- ``'addr_mode'``: Sets the address mode. Values can be:
* 0x00 - PUBLIC - Use the controller's public address.
* 0x01 - RANDOM - Use a generated static address.
* 0x02 - RPA - Use resolvable private addresses.
* 0x03 - NRPA - Use non-resolvable private addresses.
By default the interface mode will use a PUBLIC address if available, otherwise
it will use a RANDOM address.
- ``'gap_name'``: Get/set the GAP device name used by service 0x1800,
characteristic 0x2a00. This can be set at any time and changed multiple
times.
- ``'rxbuf'``: Get/set the size in bytes of the internal buffer used to store
incoming events. This buffer is global to the entire BLE driver and so
handles incoming data for all events, including all characteristics.
Increasing this allows better handling of bursty incoming data (for
example scan results) and the ability to receive larger characteristic values.
- ``'mtu'``: Get/set the MTU that will be used during a ATT MTU exchange. The
resulting MTU will be the minimum of this and the remote device's MTU.
ATT MTU exchange will not happen automatically (unless the remote device initiates
it), and must be manually initiated with
:meth:`gattc_exchange_mtu<BLE.gattc_exchange_mtu>`.
Use the ``_IRQ_MTU_EXCHANGED`` event to discover the MTU for a given connection.
- ``'bond'``: Sets whether bonding will be enabled during pairing. When
enabled, pairing requests will set the "bond" flag and the keys will be stored
by both devices.
- ``'mitm'``: Sets whether MITM-protection is required for pairing.
- ``'io'``: Sets the I/O capabilities of this device.
Available options are::
_IO_CAPABILITY_DISPLAY_ONLY = const(0)
_IO_CAPABILITY_DISPLAY_YESNO = const(1)
_IO_CAPABILITY_KEYBOARD_ONLY = const(2)
_IO_CAPABILITY_NO_INPUT_OUTPUT = const(3)
_IO_CAPABILITY_KEYBOARD_DISPLAY = const(4)
- ``'le_secure'``: Sets whether "LE Secure" pairing is required. Default is
false (i.e. allow "Legacy Pairing").
"""
...
def active(self, active: Optional[Any] = None, /) -> Incomplete:
"""
Optionally changes the active state of the BLE radio, and returns the
current state.
The radio must be made active before using any other methods on this class.
"""
...
def gattc_discover_services(self, conn_handle, uuid=None, /) -> Incomplete:
"""
Query a connected server for its services.
Optionally specify a service *uuid* to query for that service only.
For each service discovered, the ``_IRQ_GATTC_SERVICE_RESULT`` event will
be raised, followed by ``_IRQ_GATTC_SERVICE_DONE`` on completion.
"""
...
def gap_disconnect(self, conn_handle, /) -> bool:
"""
Disconnect the specified connection handle. This can either be a
central that has connected to this device (if acting as a peripheral)
or a peripheral that was previously connected to by this device (if acting
as a central).
On success, the ``_IRQ_PERIPHERAL_DISCONNECT`` or ``_IRQ_CENTRAL_DISCONNECT``
event will be raised.
Returns ``False`` if the connection handle wasn't connected, and ``True``
otherwise.
"""
...
def gattc_discover_descriptors(self, conn_handle, start_handle, end_handle, /) -> Incomplete:
"""
Query a connected server for descriptors in the specified range.
For each descriptor discovered, the ``_IRQ_GATTC_DESCRIPTOR_RESULT`` event
will be raised, followed by ``_IRQ_GATTC_DESCRIPTOR_DONE`` on completion.
"""
...
def gattc_discover_characteristics(self, conn_handle, start_handle, end_handle, uuid=None, /) -> Incomplete:
"""
Query a connected server for characteristics in the specified range.
Optionally specify a characteristic *uuid* to query for that
characteristic only.
You can use ``start_handle=1``, ``end_handle=0xffff`` to search for a
characteristic in any service.
For each characteristic discovered, the ``_IRQ_GATTC_CHARACTERISTIC_RESULT``
event will be raised, followed by ``_IRQ_GATTC_CHARACTERISTIC_DONE`` on completion.
"""
...
def gap_scan(self, duration_ms, interval_us=1280000, window_us=11250, active=False, /) -> Incomplete:
"""
Run a scan operation lasting for the specified duration (in **milli** seconds).
To scan indefinitely, set *duration_ms* to ``0``.
To stop scanning, set *duration_ms* to ``None``.
Use *interval_us* and *window_us* to optionally configure the duty cycle.
The scanner will run for *window_us* **micro** seconds every *interval_us*
**micro** seconds for a total of *duration_ms* **milli** seconds. The default
interval and window are 1.28 seconds and 11.25 milliseconds respectively
(background scanning).
For each scan result the ``_IRQ_SCAN_RESULT`` event will be raised, with event
data ``(addr_type, addr, adv_type, rssi, adv_data)``.
``addr_type`` values indicate public or random addresses:
* 0x00 - PUBLIC
* 0x01 - RANDOM (either static, RPA, or NRPA, the type is encoded in the address itself)
``adv_type`` values correspond to the Bluetooth Specification:
* 0x00 - ADV_IND - connectable and scannable undirected advertising
* 0x01 - ADV_DIRECT_IND - connectable directed advertising
* 0x02 - ADV_SCAN_IND - scannable undirected advertising
* 0x03 - ADV_NONCONN_IND - non-connectable undirected advertising
* 0x04 - SCAN_RSP - scan response
``active`` can be set ``True`` if you want to receive scan responses in the results.
When scanning is stopped (either due to the duration finishing or when
explicitly stopped), the ``_IRQ_SCAN_DONE`` event will be raised.
"""
...
def __init__(self) -> None: ...

72
.vscode/Pico-W-Stub/cmath.pyi vendored Normal file
View File

@@ -0,0 +1,72 @@
"""
Mathematical functions for complex numbers.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/cmath.html
CPython module: :mod:`python:cmath` https://docs.python.org/3/library/cmath.html .
The ``cmath`` module provides some basic mathematical functions for
working with complex numbers.
Availability: not available on WiPy and ESP8266. Floating point support
required for this module.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Tuple
e: float
pi: float
def polar(z) -> Tuple:
"""
Returns, as a tuple, the polar form of ``z``.
"""
...
def sqrt(z) -> Incomplete:
"""
Return the square-root of ``z``.
"""
...
def rect(r, phi) -> float:
"""
Returns the complex number with modulus ``r`` and phase ``phi``.
"""
...
def sin(z) -> float:
"""
Return the sine of ``z``.
"""
...
def exp(z) -> float:
"""
Return the exponential of ``z``.
"""
...
def cos(z) -> float:
"""
Return the cosine of ``z``.
"""
...
def phase(z) -> float:
"""
Returns the phase of the number ``z``, in the range (-pi, +pi].
"""
...
def log(z) -> float:
"""
Return the natural logarithm of ``z``. The branch cut is along the negative real axis.
"""
...
def log10(z) -> float:
"""
Return the base-10 logarithm of ``z``. The branch cut is along the negative real axis.
"""
...

105
.vscode/Pico-W-Stub/collections.pyi vendored Normal file
View File

@@ -0,0 +1,105 @@
"""
Collection and container types.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/collections.html
CPython module: :mod:`python:collections` https://docs.python.org/3/library/collections.html .
This module implements advanced collection and container types to
hold/accumulate various objects.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from stdlib.collections import OrderedDict as stdlib_OrderedDict, deque as stdlib_deque
from typing_extensions import NamedTuple as stdlib_NamedTuple
from typing import Any, Optional
def namedtuple(name, fields) -> stdlib_NamedTuple:
"""
This is factory function to create a new namedtuple type with a specific
name and set of fields. A namedtuple is a subclass of tuple which allows
to access its fields not just by numeric index, but also with an attribute
access syntax using symbolic field names. Fields is a sequence of strings
specifying field names. For compatibility with CPython it can also be a
a string with space-separated field named (but this is less efficient).
Example of use::
from collections import namedtuple
MyTuple = namedtuple("MyTuple", ("id", "name"))
t1 = MyTuple(1, "foo")
t2 = MyTuple(2, "bar")
print(t1.name)
assert t2.name == t2[1]
"""
...
class OrderedDict(stdlib_OrderedDict):
"""
``dict`` type subclass which remembers and preserves the order of keys
added. When ordered dict is iterated over, keys/items are returned in
the order they were added::
from collections import OrderedDict
# To make benefit of ordered keys, OrderedDict should be initialized
# from sequence of (key, value) pairs.
d = OrderedDict([("z", 1), ("a", 2)])
# More items can be added as usual
d["w"] = 5
d["b"] = 3
for k, v in d.items():
print(k, v)
Output::
z 1
a 2
w 5
b 3
"""
def popitem(self, *args, **kwargs) -> Incomplete: ...
def pop(self, *args, **kwargs) -> Incomplete: ...
def values(self, *args, **kwargs) -> Incomplete: ...
def setdefault(self, *args, **kwargs) -> Incomplete: ...
def update(self, *args, **kwargs) -> Incomplete: ...
def copy(self, *args, **kwargs) -> Incomplete: ...
def clear(self, *args, **kwargs) -> Incomplete: ...
def keys(self, *args, **kwargs) -> Incomplete: ...
def get(self, *args, **kwargs) -> Incomplete: ...
def items(self, *args, **kwargs) -> Incomplete: ...
@classmethod
def fromkeys(cls, *args, **kwargs) -> Incomplete: ...
def __init__(self, *args, **kwargs) -> None: ...
class deque(stdlib_deque):
"""
Deques (double-ended queues) are a list-like container that support O(1)
appends and pops from either side of the deque. New deques are created
using the following arguments:
- *iterable* must be the empty tuple, and the new deque is created empty.
- *maxlen* must be specified and the deque will be bounded to this
maximum length. Once the deque is full, any new items added will
discard items from the opposite end.
- The optional *flags* can be 1 to check for overflow when adding items.
As well as supporting `bool` and `len`, deque objects have the following
methods:
"""
def popleft(self) -> Incomplete:
"""
Remove and return an item from the left side of the deque.
Raises IndexError if no items are present.
"""
...
def append(self, x) -> Incomplete:
"""
Add *x* to the right side of the deque.
Raises IndexError if overflow checking is enabled and there is no more room left.
"""
...
def __init__(self, iterable, maxlen, flags: Optional[Any] = None) -> None: ...

42
.vscode/Pico-W-Stub/cryptolib.pyi vendored Normal file
View File

@@ -0,0 +1,42 @@
"""
Cryptographic ciphers.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/cryptolib.html
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, Optional
class aes:
def encrypt(self, in_buf, out_buf: Optional[Any] = None) -> Incomplete:
"""
Encrypt *in_buf*. If no *out_buf* is given result is returned as a
newly allocated `bytes` object. Otherwise, result is written into
mutable buffer *out_buf*. *in_buf* and *out_buf* can also refer
to the same mutable buffer, in which case data is encrypted in-place.
"""
...
def decrypt(self, in_buf, out_buf: Optional[Any] = None) -> Incomplete:
"""
Like `encrypt()`, but for decryption.
"""
...
def __init__(self, key, mode, IV: Optional[Any] = None) -> None:
"""
Initialize cipher object, suitable for encryption/decryption. Note:
after initialization, cipher object can be use only either for
encryption or decryption. Running decrypt() operation after encrypt()
or vice versa is not supported.
Parameters are:
* *key* is an encryption/decryption key (bytes-like).
* *mode* is:
* ``1`` (or ``cryptolib.MODE_ECB`` if it exists) for Electronic Code Book (ECB).
* ``2`` (or ``cryptolib.MODE_CBC`` if it exists) for Cipher Block Chaining (CBC).
* ``6`` (or ``cryptolib.MODE_CTR`` if it exists) for Counter mode (CTR).
* *IV* is an initialization vector for CBC mode.
* For Counter mode, *IV* is the initial value for the counter.
"""
...

76
.vscode/Pico-W-Stub/deflate.pyi vendored Normal file
View File

@@ -0,0 +1,76 @@
"""
Deflate compression & decompression.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/deflate.html
This module allows compression and decompression of binary data with the
`DEFLATE algorithm <https://en.wikipedia.org/wiki/DEFLATE>`_
(commonly used in the zlib library and gzip archiver).
**Availability:**
* Added in MicroPython v1.21.
* Decompression: Enabled via the ``MICROPY_PY_DEFLATE`` build option, on by default
on ports with the "extra features" level or higher (which is most boards).
* Compression: Enabled via the ``MICROPY_PY_DEFLATE_COMPRESS`` build option, on
by default on ports with the "full features" level or higher (generally this means
you need to build your own firmware to enable this).
"""
from _typeshed import Incomplete, Incomplete as Incomplete
GZIP: int
RAW: int
ZLIB: int
AUTO: int
class DeflateIO:
"""
This class can be used to wrap a *stream* which is any
:term:`stream-like <stream>` object such as a file, socket, or stream
(including :class:`io.BytesIO`). It is itself a stream and implements the
standard read/readinto/write/close methods.
The *stream* must be a blocking stream. Non-blocking streams are currently
not supported.
The *format* can be set to any of the constants defined below, and defaults
to ``AUTO`` which for decompressing will auto-detect gzip or zlib streams,
and for compressing it will generate a raw stream.
The *wbits* parameter sets the base-2 logarithm of the DEFLATE dictionary
window size. So for example, setting *wbits* to ``10`` sets the window size
to 1024 bytes. Valid values are ``5`` to ``15`` inclusive (corresponding to
window sizes of 32 to 32k bytes).
If *wbits* is set to ``0`` (the default), then for compression a window size
of 256 bytes will be used (as if *wbits* was set to 8). For decompression, it
depends on the format:
* ``RAW`` will use 256 bytes (corresponding to *wbits* set to 8).
* ``ZLIB`` (or ``AUTO`` with zlib detected) will use the value from the zlib
header.
* ``GZIP`` (or ``AUTO`` with gzip detected) will use 32 kilobytes
(corresponding to *wbits* set to 15).
See the :ref:`window size <deflate_wbits>` notes below for more information
about the window size, zlib, and gzip streams.
If *close* is set to ``True`` then the underlying stream will be closed
automatically when the :class:`deflate.DeflateIO` stream is closed. This is
useful if you want to return a :class:`deflate.DeflateIO` stream that wraps
another stream and not have the caller need to know about managing the
underlying stream.
If compression is enabled, a given :class:`deflate.DeflateIO` instance
supports both reading and writing. For example, a bidirectional stream like
a socket can be wrapped, which allows for compression/decompression in both
directions.
"""
def readline(self, *args, **kwargs) -> Incomplete: ...
def readinto(self, *args, **kwargs) -> Incomplete: ...
def read(self, *args, **kwargs) -> Incomplete: ...
def close(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, stream, format=AUTO, wbits=0, close=False, /) -> None: ...

15
.vscode/Pico-W-Stub/dht.pyi vendored Normal file
View File

@@ -0,0 +1,15 @@
from _typeshed import Incomplete
class DHTBase:
pin: Incomplete
buf: Incomplete
def __init__(self, pin) -> None: ...
def measure(self) -> None: ...
class DHT11(DHTBase):
def humidity(self): ...
def temperature(self): ...
class DHT22(DHTBase):
def humidity(self): ...
def temperature(self): ...

15
.vscode/Pico-W-Stub/ds18x20.pyi vendored Normal file
View File

@@ -0,0 +1,15 @@
from _typeshed import Incomplete
_CONVERT: Incomplete
_RD_SCRATCH: Incomplete
_WR_SCRATCH: Incomplete
class DS18X20:
ow: Incomplete
buf: Incomplete
def __init__(self, onewire) -> None: ...
def scan(self): ...
def convert_temp(self) -> None: ...
def read_scratch(self, rom): ...
def write_scratch(self, rom, buf) -> None: ...
def read_temp(self, rom): ...

36
.vscode/Pico-W-Stub/errno.pyi vendored Normal file
View File

@@ -0,0 +1,36 @@
"""
System error codes.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/errno.html
CPython module: :mod:`python:errno` https://docs.python.org/3/library/errno.html .
This module provides access to symbolic error codes for `OSError` exception.
A particular inventory of codes depends on :term:`MicroPython port`.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Dict
ENOBUFS: int
ENODEV: int
ENOENT: int
EISDIR: int
EIO: int
EINVAL: int
EPERM: int
ETIMEDOUT: int
ENOMEM: int
EOPNOTSUPP: int
ENOTCONN: int
errorcode: dict
EAGAIN: int
EALREADY: int
EBADF: int
EADDRINUSE: int
EACCES: int
EINPROGRESS: int
EEXIST: int
EHOSTUNREACH: int
ECONNABORTED: int
ECONNRESET: int
ECONNREFUSED: int

142
.vscode/Pico-W-Stub/framebuf.pyi vendored Normal file
View File

@@ -0,0 +1,142 @@
"""
Frame buffer manipulation.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/framebuf.html
This module provides a general frame buffer which can be used to create
bitmap images, which can then be sent to a display.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, Optional
MONO_HMSB: int
MONO_HLSB: int
RGB565: int
MONO_VLSB: int
MVLSB: int
GS2_HMSB: int
GS8: int
GS4_HMSB: int
def FrameBuffer1(*args, **kwargs) -> Incomplete: ...
class FrameBuffer:
"""
Construct a FrameBuffer object. The parameters are:
- *buffer* is an object with a buffer protocol which must be large
enough to contain every pixel defined by the width, height and
format of the FrameBuffer.
- *width* is the width of the FrameBuffer in pixels
- *height* is the height of the FrameBuffer in pixels
- *format* specifies the type of pixel used in the FrameBuffer;
permissible values are listed under Constants below. These set the
number of bits used to encode a color value and the layout of these
bits in *buffer*.
Where a color value c is passed to a method, c is a small integer
with an encoding that is dependent on the format of the FrameBuffer.
- *stride* is the number of pixels between each horizontal line
of pixels in the FrameBuffer. This defaults to *width* but may
need adjustments when implementing a FrameBuffer within another
larger FrameBuffer or screen. The *buffer* size must accommodate
an increased step size.
One must specify valid *buffer*, *width*, *height*, *format* and
optionally *stride*. Invalid *buffer* size or dimensions may lead to
unexpected errors.
"""
def poly(self, x, y, coords, c, f: Optional[Any] = None) -> Incomplete:
"""
Given a list of coordinates, draw an arbitrary (convex or concave) closed
polygon at the given x, y location using the given color.
The *coords* must be specified as a :mod:`array` of integers, e.g.
``array('h', [x0, y0, x1, y1, ... xn, yn])``.
The optional *f* parameter can be set to ``True`` to fill the polygon.
Otherwise just a one pixel outline is drawn.
"""
...
def vline(self, x, y, h, c) -> Incomplete: ...
def pixel(self, x, y, c: Optional[Any] = None) -> Incomplete:
"""
If *c* is not given, get the color value of the specified pixel.
If *c* is given, set the specified pixel to the given color.
"""
...
def text(self, s, x, y, c: Optional[Any] = None) -> None:
"""
Write text to the FrameBuffer using the the coordinates as the upper-left
corner of the text. The color of the text can be defined by the optional
argument but is otherwise a default value of 1. All characters have
dimensions of 8x8 pixels and there is currently no way to change the font.
"""
...
def rect(self, x, y, w, h, c, f: Optional[Any] = None) -> None:
"""
Draw a rectangle at the given location, size and color.
The optional *f* parameter can be set to ``True`` to fill the rectangle.
Otherwise just a one pixel outline is drawn.
"""
...
def scroll(self, xstep, ystep) -> Incomplete:
"""
Shift the contents of the FrameBuffer by the given vector. This may
leave a footprint of the previous colors in the FrameBuffer.
"""
...
def ellipse(self, x, y, xr, yr, c, f, m: Optional[Any] = None) -> None:
"""
Draw an ellipse at the given location. Radii *xr* and *yr* define the
geometry; equal values cause a circle to be drawn. The *c* parameter
defines the color.
The optional *f* parameter can be set to ``True`` to fill the ellipse.
Otherwise just a one pixel outline is drawn.
The optional *m* parameter enables drawing to be restricted to certain
quadrants of the ellipse. The LS four bits determine which quadrants are
to be drawn, with bit 0 specifying Q1, b1 Q2, b2 Q3 and b3 Q4. Quadrants
are numbered counterclockwise with Q1 being top right.
"""
...
def line(self, x1, y1, x2, y2, c) -> None:
"""
Draw a line from a set of coordinates using the given color and
a thickness of 1 pixel. The `line` method draws the line up to
a second set of coordinates whereas the `hline` and `vline`
methods draw horizontal and vertical lines respectively up to
a given length.
"""
...
def blit(self, fbuf, x, y, key=-1, palette=None) -> None:
"""
Draw another FrameBuffer on top of the current one at the given coordinates.
If *key* is specified then it should be a color integer and the
corresponding color will be considered transparent: all pixels with that
color value will not be drawn. (If the *palette* is specified then the *key*
is compared to the value from *palette*, not to the value directly from
*fbuf*.)
The *palette* argument enables blitting between FrameBuffers with differing
formats. Typical usage is to render a monochrome or grayscale glyph/icon to
a color display. The *palette* is a FrameBuffer instance whose format is
that of the current FrameBuffer. The *palette* height is one pixel and its
pixel width is the number of colors in the source FrameBuffer. The *palette*
for an N-bit source needs 2**N pixels; the *palette* for a monochrome source
would have 2 pixels representing background and foreground colors. The
application assigns a color to each pixel in the *palette*. The color of the
current pixel will be that of that *palette* pixel whose x position is the
color of the corresponding source pixel.
"""
...
def hline(self, x, y, w, c) -> Incomplete: ...
def fill(self, c) -> None:
"""
Fill the entire FrameBuffer with the specified color.
"""
...
def fill_rect(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, buffer, width, height, format, stride=-1, /) -> None: ...

75
.vscode/Pico-W-Stub/gc.pyi vendored Normal file
View File

@@ -0,0 +1,75 @@
"""
Control the garbage collector.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/gc.html
CPython module: :mod:`python:gc` https://docs.python.org/3/library/gc.html .
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, Optional
def mem_alloc() -> int:
"""
Return the number of bytes of heap RAM that are allocated by Python code.
Difference to CPython
This function is MicroPython extension.
"""
...
def isenabled(*args, **kwargs) -> Incomplete: ...
def mem_free() -> int:
"""
Return the number of bytes of heap RAM that is available for Python
code to allocate, or -1 if this amount is not known.
Difference to CPython
This function is MicroPython extension.
"""
...
def threshold(amount: Optional[Any] = None) -> Incomplete:
"""
Set or query the additional GC allocation threshold. Normally, a collection
is triggered only when a new allocation cannot be satisfied, i.e. on an
out-of-memory (OOM) condition. If this function is called, in addition to
OOM, a collection will be triggered each time after *amount* bytes have been
allocated (in total, since the previous time such an amount of bytes
have been allocated). *amount* is usually specified as less than the
full heap size, with the intention to trigger a collection earlier than when the
heap becomes exhausted, and in the hope that an early collection will prevent
excessive memory fragmentation. This is a heuristic measure, the effect
of which will vary from application to application, as well as
the optimal value of the *amount* parameter.
Calling the function without argument will return the current value of
the threshold. A value of -1 means a disabled allocation threshold.
Difference to CPython
This function is a MicroPython extension. CPython has a similar
function - ``set_threshold()``, but due to different GC
implementations, its signature and semantics are different.
"""
...
def collect() -> None:
"""
Run a garbage collection.
"""
...
def enable() -> None:
"""
Enable automatic garbage collection.
"""
...
def disable() -> None:
"""
Disable automatic garbage collection. Heap memory can still be allocated,
and garbage collection can still be initiated manually using :meth:`gc.collect`.
"""
...

45
.vscode/Pico-W-Stub/hashlib.pyi vendored Normal file
View File

@@ -0,0 +1,45 @@
"""
Hashing algorithms.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/hashlib.html
CPython module: :mod:`python:hashlib` https://docs.python.org/3/library/hashlib.html .
This module implements binary data hashing algorithms. The exact inventory
of available algorithms depends on a board. Among the algorithms which may
be implemented:
* SHA256 - The current generation, modern hashing algorithm (of SHA2 series).
It is suitable for cryptographically-secure purposes. Included in the
MicroPython core and any board is recommended to provide this, unless
it has particular code size constraints.
* SHA1 - A previous generation algorithm. Not recommended for new usages,
but SHA1 is a part of number of Internet standards and existing
applications, so boards targeting network connectivity and
interoperability will try to provide this.
* MD5 - A legacy algorithm, not considered cryptographically secure. Only
selected boards, targeting interoperability with legacy applications,
will offer this.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, Optional
class sha256:
"""
Create an SHA256 hasher object and optionally feed ``data`` into it.
"""
def digest(self, *args, **kwargs) -> Incomplete: ...
def update(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, data: Optional[Any] = None) -> None: ...
class sha1:
"""
Create an SHA1 hasher object and optionally feed ``data`` into it.
"""
def digest(self, *args, **kwargs) -> Incomplete: ...
def update(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, data: Optional[Any] = None) -> None: ...

35
.vscode/Pico-W-Stub/heapq.pyi vendored Normal file
View File

@@ -0,0 +1,35 @@
"""
Heap queue algorithm.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/heapq.html
CPython module: :mod:`python:heapq` https://docs.python.org/3/library/heapq.html .
This module implements the
`min heap queue algorithm <https://en.wikipedia.org/wiki/Heap_%28data_structure%29>`_.
A heap queue is essentially a list that has its elements stored in such a way
that the first item of the list is always the smallest.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
def heappop(heap) -> Incomplete:
"""
Pop the first item from the ``heap``, and return it. Raise ``IndexError`` if
``heap`` is empty.
The returned item will be the smallest item in the ``heap``.
"""
...
def heappush(heap, item) -> Incomplete:
"""
Push the ``item`` onto the ``heap``.
"""
...
def heapify(x) -> Incomplete:
"""
Convert the list ``x`` into a heap. This is an in-place operation.
"""
...

64
.vscode/Pico-W-Stub/io.pyi vendored Normal file
View File

@@ -0,0 +1,64 @@
"""
Input/output streams.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/io.html
CPython module: :mod:`python:io` https://docs.python.org/3/library/io.html .
This module contains additional types of `stream` (file-like) objects
and helper functions.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from stdlib.io import *
from typing import Any, IO, Optional
def open(name, mode="r", **kwargs) -> Incomplete:
"""
Open a file. Builtin ``open()`` function is aliased to this function.
All ports (which provide access to file system) are required to support
*mode* parameter, but support for other arguments vary by port.
"""
...
class IOBase:
def __init__(self, *argv, **kwargs) -> None: ...
class StringIO(IO):
def write(self, *args, **kwargs) -> Incomplete: ...
def flush(self, *args, **kwargs) -> Incomplete: ...
def getvalue(self, *args, **kwargs) -> Incomplete: ...
def seek(self, *args, **kwargs) -> Incomplete: ...
def tell(self, *args, **kwargs) -> Incomplete: ...
def readline(self, *args, **kwargs) -> Incomplete: ...
def close(self, *args, **kwargs) -> Incomplete: ...
def read(self, *args, **kwargs) -> Incomplete: ...
def readinto(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, string: Optional[Any] = None) -> None: ...
class BytesIO(IO):
"""
In-memory file-like objects for input/output. `StringIO` is used for
text-mode I/O (similar to a normal file opened with "t" modifier).
`BytesIO` is used for binary-mode I/O (similar to a normal file
opened with "b" modifier). Initial contents of file-like objects
can be specified with *string* parameter (should be normal string
for `StringIO` or bytes object for `BytesIO`). All the usual file
methods like ``read()``, ``write()``, ``seek()``, ``flush()``,
``close()`` are available on these objects, and additionally, a
following method:
"""
def write(self, *args, **kwargs) -> Incomplete: ...
def flush(self, *args, **kwargs) -> Incomplete: ...
def getvalue(self) -> Incomplete:
"""
Get the current contents of the underlying buffer which holds data.
"""
...
def seek(self, *args, **kwargs) -> Incomplete: ...
def tell(self, *args, **kwargs) -> Incomplete: ...
def readline(self, *args, **kwargs) -> Incomplete: ...
def close(self, *args, **kwargs) -> Incomplete: ...
def read(self, *args, **kwargs) -> Incomplete: ...
def readinto(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, string: Optional[Any] = None) -> None: ...

47
.vscode/Pico-W-Stub/json.pyi vendored Normal file
View File

@@ -0,0 +1,47 @@
"""
JSON encoding and decoding.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/json.html
CPython module: :mod:`python:json` https://docs.python.org/3/library/json.html .
This modules allows to convert between Python objects and the JSON
data format.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
def loads(str) -> Incomplete:
"""
Parse the JSON *str* and return an object. Raises :exc:`ValueError` if the
string is not correctly formed.
"""
...
def load(stream) -> Incomplete:
"""
Parse the given *stream*, interpreting it as a JSON string and
deserialising the data to a Python object. The resulting object is
returned.
Parsing continues until end-of-file is encountered.
A :exc:`ValueError` is raised if the data in *stream* is not correctly formed.
"""
...
def dumps(obj, separators=None) -> str:
"""
Return *obj* represented as a JSON string.
The arguments have the same meaning as in `dump`.
"""
...
def dump(obj, stream, separators=None) -> Incomplete:
"""
Serialise *obj* to a JSON string, writing it to the given *stream*.
If specified, separators should be an ``(item_separator, key_separator)``
tuple. The default is ``(', ', ': ')``. To get the most compact JSON
representation, you should specify ``(',', ':')`` to eliminate whitespace.
"""
...

39
.vscode/Pico-W-Stub/lwip.pyi vendored Normal file
View File

@@ -0,0 +1,39 @@
from _typeshed import Incomplete as Incomplete
SOCK_STREAM: int
SOCK_RAW: int
SOCK_DGRAM: int
SOL_SOCKET: int
SO_BROADCAST: int
SO_REUSEADDR: int
AF_INET6: int
AF_INET: int
IP_DROP_MEMBERSHIP: int
IPPROTO_IP: int
IP_ADD_MEMBERSHIP: int
def reset(*args, **kwargs) -> Incomplete: ...
def print_pcbs(*args, **kwargs) -> Incomplete: ...
def getaddrinfo(*args, **kwargs) -> Incomplete: ...
def callback(*args, **kwargs) -> Incomplete: ...
class socket:
def recvfrom(self, *args, **kwargs) -> Incomplete: ...
def recv(self, *args, **kwargs) -> Incomplete: ...
def makefile(self, *args, **kwargs) -> Incomplete: ...
def listen(self, *args, **kwargs) -> Incomplete: ...
def settimeout(self, *args, **kwargs) -> Incomplete: ...
def sendall(self, *args, **kwargs) -> Incomplete: ...
def setsockopt(self, *args, **kwargs) -> Incomplete: ...
def setblocking(self, *args, **kwargs) -> Incomplete: ...
def sendto(self, *args, **kwargs) -> Incomplete: ...
def readline(self, *args, **kwargs) -> Incomplete: ...
def readinto(self, *args, **kwargs) -> Incomplete: ...
def read(self, *args, **kwargs) -> Incomplete: ...
def close(self, *args, **kwargs) -> Incomplete: ...
def connect(self, *args, **kwargs) -> Incomplete: ...
def send(self, *args, **kwargs) -> Incomplete: ...
def bind(self, *args, **kwargs) -> Incomplete: ...
def accept(self, *args, **kwargs) -> Incomplete: ...
def write(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, *argv, **kwargs) -> None: ...

1193
.vscode/Pico-W-Stub/machine.pyi vendored Normal file

File diff suppressed because it is too large Load Diff

257
.vscode/Pico-W-Stub/math.pyi vendored Normal file
View File

@@ -0,0 +1,257 @@
"""
Mathematical functions.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/math.html
CPython module: :mod:`python:math` https://docs.python.org/3/library/math.html .
The ``math`` module provides some basic mathematical functions for
working with floating-point numbers.
*Note:* On the pyboard, floating-point numbers have 32-bit precision.
Availability: not available on WiPy. Floating point support required
for this module.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Tuple
inf: float
nan: float
pi: float
e: float
tau: float
def ldexp(x, exp) -> Incomplete:
"""
Return ``x * (2**exp)``.
"""
...
def lgamma(x) -> float:
"""
Return the natural logarithm of the gamma function of ``x``.
"""
...
def trunc(x) -> int:
"""
Return an integer, being ``x`` rounded towards 0.
"""
...
def isclose(*args, **kwargs) -> Incomplete: ...
def gamma(x) -> Incomplete:
"""
Return the gamma function of ``x``.
"""
...
def isnan(x) -> bool:
"""
Return ``True`` if ``x`` is not-a-number
"""
...
def isfinite(x) -> bool:
"""
Return ``True`` if ``x`` is finite.
"""
...
def isinf(x) -> bool:
"""
Return ``True`` if ``x`` is infinite.
"""
...
def sqrt(x) -> Incomplete:
"""
Return the square root of ``x``.
"""
...
def sinh(x) -> float:
"""
Return the hyperbolic sine of ``x``.
"""
...
def log(x) -> float:
"""
Return the natural logarithm of ``x``.
"""
...
def tan(x) -> float:
"""
Return the tangent of ``x``.
"""
...
def tanh(x) -> float:
"""
Return the hyperbolic tangent of ``x``.
"""
...
def log2(x) -> float:
"""
Return the base-2 logarithm of ``x``.
"""
...
def log10(x) -> float:
"""
Return the base-10 logarithm of ``x``.
"""
...
def sin(x) -> float:
"""
Return the sine of ``x``.
"""
...
def modf(x) -> Tuple:
"""
Return a tuple of two floats, being the fractional and integral parts of
``x``. Both return values have the same sign as ``x``.
"""
...
def radians(x) -> Incomplete:
"""
Return degrees ``x`` converted to radians.
"""
...
def atanh(x) -> float:
"""
Return the inverse hyperbolic tangent of ``x``.
"""
...
def atan2(y, x) -> float:
"""
Return the principal value of the inverse tangent of ``y/x``.
"""
...
def atan(x) -> float:
"""
Return the inverse tangent of ``x``.
"""
...
def ceil(x) -> int:
"""
Return an integer, being ``x`` rounded towards positive infinity.
"""
...
def copysign(x, y) -> Incomplete:
"""
Return ``x`` with the sign of ``y``.
"""
...
def frexp(x) -> Incomplete:
"""
Decomposes a floating-point number into its mantissa and exponent.
The returned value is the tuple ``(m, e)`` such that ``x == m * 2**e``
exactly. If ``x == 0`` then the function returns ``(0.0, 0)``, otherwise
the relation ``0.5 <= abs(m) < 1`` holds.
"""
...
def acos(x) -> float:
"""
Return the inverse cosine of ``x``.
"""
...
def pow(x, y) -> Incomplete:
"""
Returns ``x`` to the power of ``y``.
"""
...
def asinh(x) -> float:
"""
Return the inverse hyperbolic sine of ``x``.
"""
...
def acosh(x) -> float:
"""
Return the inverse hyperbolic cosine of ``x``.
"""
...
def asin(x) -> float:
"""
Return the inverse sine of ``x``.
"""
...
def factorial(*args, **kwargs) -> Incomplete: ...
def fabs(x) -> Incomplete:
"""
Return the absolute value of ``x``.
"""
...
def expm1(x) -> Incomplete:
"""
Return ``exp(x) - 1``.
"""
...
def floor(x) -> int:
"""
Return an integer, being ``x`` rounded towards negative infinity.
"""
...
def fmod(x, y) -> Incomplete:
"""
Return the remainder of ``x/y``.
"""
...
def cos(x) -> float:
"""
Return the cosine of ``x``.
"""
...
def degrees(x) -> Incomplete:
"""
Return radians ``x`` converted to degrees.
"""
...
def cosh(x) -> float:
"""
Return the hyperbolic cosine of ``x``.
"""
...
def exp(x) -> float:
"""
Return the exponential of ``x``.
"""
...
def erf(x) -> Incomplete:
"""
Return the error function of ``x``.
"""
...
def erfc(x) -> Incomplete:
"""
Return the complementary error function of ``x``.
"""
...

190
.vscode/Pico-W-Stub/micropython.pyi vendored Normal file
View File

@@ -0,0 +1,190 @@
"""
Access and control MicroPython internals.
MicroPython module: https://docs.micropython.org/en/latest/library/micropython.html
"""
# source version: v1_20_0
# origin module:: repos/micropython/docs/library/micropython.rst
from typing import Any, Callable, Optional, Tuple, TypeVar, Union
from _typeshed import Incomplete
Const_T = TypeVar("Const_T", int, float, str, bytes, Tuple) # constant
def const(expr: Const_T) -> Const_T:
"""
Used to declare that the expression is a constant so that the compiler can
optimise it. The use of this function should be as follows::
from micropython import const
CONST_X = const(123)
CONST_Y = const(2 * CONST_X + 1)
Constants declared this way are still accessible as global variables from
outside the module they are declared in. On the other hand, if a constant
begins with an underscore then it is hidden, it is not available as a global
variable, and does not take up any memory during execution.
This `const` function is recognised directly by the MicroPython parser and is
provided as part of the :mod:`micropython` module mainly so that scripts can be
written which run under both CPython and MicroPython, by following the above
pattern.
"""
...
def opt_level(level: Optional[Any] = None) -> Incomplete:
"""
If *level* is given then this function sets the optimisation level for subsequent
compilation of scripts, and returns ``None``. Otherwise it returns the current
optimisation level.
The optimisation level controls the following compilation features:
- Assertions: at level 0 assertion statements are enabled and compiled into the
bytecode; at levels 1 and higher assertions are not compiled.
- Built-in ``__debug__`` variable: at level 0 this variable expands to ``True``;
at levels 1 and higher it expands to ``False``.
- Source-code line numbers: at levels 0, 1 and 2 source-code line number are
stored along with the bytecode so that exceptions can report the line number
they occurred at; at levels 3 and higher line numbers are not stored.
The default optimisation level is usually level 0.
"""
...
def alloc_emergency_exception_buf(size) -> Incomplete:
"""
Allocate *size* bytes of RAM for the emergency exception buffer (a good
size is around 100 bytes). The buffer is used to create exceptions in cases
when normal RAM allocation would fail (eg within an interrupt handler) and
therefore give useful traceback information in these situations.
A good way to use this function is to put it at the start of your main script
(eg ``boot.py`` or ``main.py``) and then the emergency exception buffer will be active
for all the code following it.
"""
...
def mem_info(verbose: Optional[Any] = None) -> None:
"""
Print information about currently used memory. If the *verbose* argument
is given then extra information is printed.
The information that is printed is implementation dependent, but currently
includes the amount of stack and heap used. In verbose mode it prints out
the entire heap indicating which blocks are used and which are free.
"""
...
def qstr_info(verbose: Optional[Any] = None) -> None:
"""
Print information about currently interned strings. If the *verbose*
argument is given then extra information is printed.
The information that is printed is implementation dependent, but currently
includes the number of interned strings and the amount of RAM they use. In
verbose mode it prints out the names of all RAM-interned strings.
"""
...
def stack_use() -> int:
"""
Return an integer representing the current amount of stack that is being
used. The absolute value of this is not particularly useful, rather it
should be used to compute differences in stack usage at different points.
"""
...
def heap_lock() -> int: ...
def heap_unlock() -> int: ...
def heap_locked() -> bool:
"""
Lock or unlock the heap. When locked no memory allocation can occur and a
`MemoryError` will be raised if any heap allocation is attempted.
`heap_locked()` returns a true value if the heap is currently locked.
These functions can be nested, ie `heap_lock()` can be called multiple times
in a row and the lock-depth will increase, and then `heap_unlock()` must be
called the same number of times to make the heap available again.
Both `heap_unlock()` and `heap_locked()` return the current lock depth
(after unlocking for the former) as a non-negative integer, with 0 meaning
the heap is not locked.
If the REPL becomes active with the heap locked then it will be forcefully
unlocked.
Note: `heap_locked()` is not enabled on most ports by default,
requires ``MICROPY_PY_MICROPYTHON_HEAP_LOCKED``.
"""
...
def kbd_intr(chr) -> None:
"""
Set the character that will raise a `KeyboardInterrupt` exception. By
default this is set to 3 during script execution, corresponding to Ctrl-C.
Passing -1 to this function will disable capture of Ctrl-C, and passing 3
will restore it.
This function can be used to prevent the capturing of Ctrl-C on the
incoming stream of characters that is usually used for the REPL, in case
that stream is used for other purposes.
"""
...
def schedule(func, arg) -> Incomplete:
"""
Schedule the function *func* to be executed "very soon". The function
is passed the value *arg* as its single argument. "Very soon" means that
the MicroPython runtime will do its best to execute the function at the
earliest possible time, given that it is also trying to be efficient, and
that the following conditions hold:
- A scheduled function will never preempt another scheduled function.
- Scheduled functions are always executed "between opcodes" which means
that all fundamental Python operations (such as appending to a list)
are guaranteed to be atomic.
- A given port may define "critical regions" within which scheduled
functions will never be executed. Functions may be scheduled within
a critical region but they will not be executed until that region
is exited. An example of a critical region is a preempting interrupt
handler (an IRQ).
A use for this function is to schedule a callback from a preempting IRQ.
Such an IRQ puts restrictions on the code that runs in the IRQ (for example
the heap may be locked) and scheduling a function to call later will lift
those restrictions.
Note: If `schedule()` is called from a preempting IRQ, when memory
allocation is not allowed and the callback to be passed to `schedule()` is
a bound method, passing this directly will fail. This is because creating a
reference to a bound method causes memory allocation. A solution is to
create a reference to the method in the class constructor and to pass that
reference to `schedule()`. This is discussed in detail here
:ref:`reference documentation <isr_rules>` under "Creation of Python
objects".
There is a finite queue to hold the scheduled functions and `schedule()`
will raise a `RuntimeError` if the queue is full.
"""
...
def viper(func: Callable) -> Callable:
"""
The Viper code emitter is not fully compliant. It supports special Viper native data types in pursuit of performance.
Integer processing is non-compliant because it uses machine words: arithmetic on 32 bit hardware is performed modulo 2**32.
Like the Native emitter Viper produces machine instructions but further optimisations are performed, substantially increasing
performance especially for integer arithmetic and bit manipulations.
"""
...
def native(func: Callable) -> Callable:
"""
This causes the MicroPython compiler to emit native CPU opcodes rather than bytecode.
It covers the bulk of the MicroPython functionality, so most functions will require no adaptation.
See: https://docs.micropython.org/en/latest/reference/speed_python.html?highlight=viper#the-native-code-emitter
"""
...

View File

@@ -0,0 +1,22 @@
MIT License
Copyright (c) 2022 Jos Verlinde
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,67 @@
Metadata-Version: 2.1
Name: micropython-rp2-rpi_pico_w-stubs
Version: 1.21.0.post1
Summary: MicroPython stubs
Home-page: https://github.com/josverl/micropython-stubs#micropython-stubs
License: MIT
Author: josverl
Author-email: josverl@users.noreply.github.com
Requires-Python: >=3.8,<4.0
Classifier: Development Status :: 4 - Beta
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: Implementation :: MicroPython
Classifier: Topic :: Software Development :: Build Tools
Classifier: Topic :: Text Editors :: Integrated Development Environments (IDE)
Classifier: Typing :: Typed
Requires-Dist: micropython-stdlib-stubs (>=0.9.0)
Project-URL: Documentation, https://micropython-stubs.readthedocs.io/
Project-URL: Repository, https://github.com/josverl/micropython-stubs
Description-Content-Type: text/markdown
# micropython-rp2-rpi_pico_w-stubs
This is a stub-only package for MicroPython.
It is intended to be installed in a projects virtual environment to allow static type checkers and intellisense features to be used while writing Micropython code.
The version of this package is alligned the the version of the MicroPython firmware.
- Major, Minor and Patch levels are alligned to the same version as the firmware.
- The post release level is used to publish new releases of the stubs.
For `Micropython 1.17` the stubs are published as `1.17.post1` ... `1.17.post2`
for `Micropython 1.18` the stubs are published as `1.18.post1` ... `1.18.post2`
To install the latest stubs:
`pip install -I micropython-<port>-stubs` where port is the port of the MicroPython firmware.
To install the stubs for an older version, such as MicroPython 1.17:
`pip install micropython-stm32-stubs==1.17.*` which will install the last post release of the stubs for MicroPython 1.17.
As the creation of the stubs, and merging of the different types is still going though improvements, the stub packages are marked as Beta.
To upgrade stubs to the latest stubs for a specific version use `pip install micropython-stm32-stubs==1.17.* --upgrade`
If you have suggestions or find any issues with the stubs, please report them in the [MicroPython-stubs Discussions](https://github.com/Josverl/micropython-stubs/discussions)
For an overview of Micropython Stubs please see: https://micropython-stubs.readthedocs.io/en/main/
* List of all stubs : https://micropython-stubs.readthedocs.io/en/main/firmware_grp.html
Included stubs:
* Merged stubs from `stubs/micropython-v1_21_0-rp2-rpi_pico_w-merged`
* Frozen stubs from `stubs/micropython-v1_21_0-frozen/rp2/RPI_PICO_W`
* Core stubs from `stubs/micropython-core`
origin | Family | Port | Board | Version
-------|--------|------|-------|--------
Documentation | micropython | - | - | v1.21.0
Core | micropython | rp2 | - | v1.21.0

View File

@@ -0,0 +1,99 @@
__builtins__.pyi,sha256=SRa_5xpV_qKoNdAgXY1G1TGP59HAYvZxp-ew4M2s3dY,1068
_asyncio.pyi,sha256=oJgEZ1Axm6cN0eO1Ke7oj5YckMoicmwKfUO9vL_Y810,416
_boot.pyi,sha256=sE1k2jzwUqn1o5YjnKlavj6468D8XQNhJNJ69gVrdtg,71
_boot_fat.pyi,sha256=sE1k2jzwUqn1o5YjnKlavj6468D8XQNhJNJ69gVrdtg,71
_onewire.pyi,sha256=_BXH4KbavKcoxnFA9OF24eERDydSk7EfXlx6N2mlBw8,343
_rp2.pyi,sha256=MCGcgPRjp_mQlh3SN7TWalfzSXP6GAC2ojkCFPpSQj4,1556
_thread.pyi,sha256=8qtf48y90MbHs4wEY_eSDeZ7QP__qf5DnYSJa7r18Ps,953
aioble/__init__.pyi,sha256=Wjhrff1BWzqTqQh5pMss5q9gT51FhkzMF3tN_QV1cGc,600
aioble/central.pyi,sha256=hZmbJnu3ccyO9Z9zjd29KiW6CGTvBmsNJx2Nv1nzLzo,2448
aioble/client.pyi,sha256=KkXt3661i_eKYozYTW1BaZkZjbpOhrhfK1SBxKpC0eo,4180
aioble/core.pyi,sha256=juai87amOQhoM_Vum3OTUcbkNiQVhXT3mYNyGzLLhe0,520
aioble/device.pyi,sha256=7lzYKge6yfFNWwmwyViUQgEClY6lVXWKPneR4oCtifc,2311
aioble/l2cap.pyi,sha256=k4NiXgzbvI25TqyfPbuWfu_v0KmF2dVXGtt3FuaicAs,1509
aioble/peripheral.pyi,sha256=Rz6k4Jpk-_h6r_BAXp6-rwfnPMRcNJ8KT1uhiujugwM,1425
aioble/security.pyi,sha256=-POdQrFOH67f9vtr2vbrf5U4TdZzipfx_qzRWDo6wEM,1071
aioble/server.pyi,sha256=Wd4ESEM63-A-7q3sPS3ed6Pl19j4DVh112C2WqUCaxM,3364
array.pyi,sha256=ZPtcObYk-XaI4AknOYrfMOJPXOS2ho0p35xdCgYcuVQ,1090
asyncio/__init__.pyi,sha256=fa4aomSb_KMbJVCimrP6IfegajK_KSN8muiH_lbqc7k,132
asyncio/core.pyi,sha256=xzNDXF3b6zq-OGz22ZPoPIJ_m5TVxgSg0YfUmG_CDzY,1530
asyncio/event.pyi,sha256=fFBZxUa_PzdiiE8I14F9IZeqg9lJgIAH8s--6SBd-9E,623
asyncio/funcs.pyi,sha256=3uEqPbVQPEqsaids5pFDkvmYUpufmkw4XuoyjwrouvI,390
asyncio/lock.pyi,sha256=QF9HAL_ayvACAPYG9Rd2kB0-WeUFYPZazG-MFFVSvtE,414
asyncio/stream.pyi,sha256=Uih1xMqHeZY4RwBQ4g-ut3_NauUjF10yxjGvh4Z3zeQ,1427
binascii.pyi,sha256=kOai4wTFKZ1BQkyHe5WO2fkRDGST6eEetLS3KdtQ388,1488
bluetooth.pyi,sha256=eKIXx2TJHUUHtKsSL4NawmGdnp02pCLbURKMQzSPvv0,30033
cmath.pyi,sha256=Hvtu5G3iSwPeuJHZLWliHC3_g07MtMBEVErlOCRXqO0,1529
collections.pyi,sha256=veeBTws6XDnpGS2N6sh0lKECaVflUC61SkJXbeakdeY,3966
cryptolib.pyi,sha256=pV8vbhqweB83T5J70RwDAFpn9rNJSkHH5YZbS23EJ98,1739
deflate.pyi,sha256=xraLamaDJ2rDZtVYjXLKlIM-F-E1xf8x4oA3qEgqeDM,3311
dht.pyi,sha256=zamvhZo46pXwloiPKKlldLlMYmb7waNyZE1aeUnz-vA,344
ds18x20.pyi,sha256=-BUsQj1Y155ooBHsobP6SzzNqWrw3SO8wnH3EpI9Z0A,404
errno.pyi,sha256=fT9TQhrfWRbRou7wVcyJQWPysPh372koMw_GNZCC7SU,777
framebuf.pyi,sha256=AlWAUXju3GDe8cD5SUFMb3iz90uLkbFet0BS64eDKUg,6474
gc.pyi,sha256=rh6cpwqew57KbQm6aD92AY0cb0mSPM8KFrqxjQpLX6I,2524
hashlib.pyi,sha256=9b65Uc6P92RHUGIgf00qCHOb2KSytlkRUnSNEj8q4r8,1743
heapq.pyi,sha256=JE1S9UQ38DvuchjejRgb65S_tAYvK8CYYpQT4vEl4JA,1000
io.pyi,sha256=ltx99WnM7_72vpSIRbTiiSh9Z5D90SbCeKaNrGISsOs,2702
json.pyi,sha256=mP4C0XMgUt1ZcT7AQSteCY-n0rd0bREU_dnSQnjO5d0,1454
lwip.pyi,sha256=0vIYgPwL5EnAbcoS1U2p_8e1SgPWiIYNevZDxMrO0kk,1582
machine.pyi,sha256=HVtm4Fkv-HfqGxcE0koQVerU7acXqGkYBf8SKCKMPoM,50379
math.pyi,sha256=KgjOuv3rpLNWcKWe7fVkB0PQ0Spv2VsBeE-pPhWNCWs,4827
micropython.pyi,sha256=a72FE6dfhvby6MIVPeTOPcgh46wsJUyRJr4W4TJsGKA,8423
micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/LICENSE.md,sha256=EerY3Evf4ZqwZBZMGIokOHR70txtsAlS_GGHuxmxUJY,1092
micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/METADATA,sha256=i_ZZ0I4ExOQn4KCrRnXpajKX5Rdco9aYtYIPJdqAcds,3225
micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/RECORD,,
micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
micropython_rp2_rpi_pico_w_stubs-1.21.0.post1.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
mip/__init__.pyi,sha256=Zd0De2fcvpCue3E5iPFUmmXdpf_IeciR9Pty29Id_fk,598
neopixel.pyi,sha256=jOhE-kR8bRy0dEwqFLLY3Z7v7dp0n2XFwXTCwty0TuY,448
network.pyi,sha256=OQjwBgmxsx5ICq3GCOL3vzqIfUBb4X_gUTEPJ9PWIQQ,7633
ntptime.pyi,sha256=CmHJaGQFWjMig6JCTnWF0tY0KIw6jFWN362TnJWaWZQ,72
onewire.pyi,sha256=DgLSo0kBX1ox2Qm0x76XY00m2x17GM0uh8J4GWU1Tgs,617
os.pyi,sha256=BNYJHi5PymNeepTYUD27GoUDmXWrAreN3bIiU8-sB64,9677
platform.pyi,sha256=3xuJleRh1wBIkS51WeiUkfQtwgYK1CLjDqXteBQIWaY,1463
random.pyi,sha256=GDIgpkmplAsckapDEHlkkRdsRq8fCS_hBBRCS5gFmZI,2687
requests.pyi,sha256=Mk3u-Y3RUz3YXbe8bkC5gzNE12dF8Z7SGEp0-lhckLQ,609
requests/__init__.pyi,sha256=yNYrzq9TMRID3tAXLhHRRdzaiku2o6OfjDXjaF2bhYA,739
rp2.pyi,sha256=ghYwOcTINmxwfGdaL3tE1z5soZvskgoBbjvSLiuOb2E,1918
select.pyi,sha256=LKJ75d0F4BJg_7VAH3L07H1qqG3IkLuVEc82j-xMFWM,4114
socket.pyi,sha256=YUTOaiosablCnk-corhGdTRNpQScrllJxdw9pZyWrYo,11622
ssl.pyi,sha256=u94PkXN_NoaRnj2bBdMFq2x7JUHVmjeJcA8tvL1wMyI,3758
struct.pyi,sha256=4Mf6SQIchLMnVNPnFG-iNgeIqqpaAYx89xvz1FAKJQA,4316
sys.pyi,sha256=aL8EhWS78hy24Ir-y4QWB6QYdj5hYVooEiNQZ-MxMK0,1442
time.pyi,sha256=Df5LIT2n7WwXRXjLKBlqQ7g0ZHsQe1mnkTdyKyAtYno,13313
uarray.pyi,sha256=ZPtcObYk-XaI4AknOYrfMOJPXOS2ho0p35xdCgYcuVQ,1090
uasyncio.pyi,sha256=eu4a7KxASOh_jGsRn7zmGtUt2rJYtOpjVuGYB4ty4fc,28
uasyncio/__init__.pyi,sha256=bmpai6ZIJXDZ00aZz11BZZ92VGbfdQwqUL1jmUz3ZHU,2015
uasyncio/core.pyi,sha256=6mmEJjdYJhiYRHY4c4Hs47AYuY2zEq_ZJXitr4XSlR8,1029
uasyncio/event.pyi,sha256=OGzLIKk8AkbwIMK5l5CPy4QZTNc0uFQJzUftEgeUOzQ,580
uasyncio/funcs.pyi,sha256=rHBK8jMGD6X6rvpy7jYMCukHJ1SxFg7GfAxlXug_XX0,147
uasyncio/lock.pyi,sha256=LQ6j1whw6Oe2cRGT_gsQXkd16UrnsB5p33q8X73l220,259
uasyncio/stream.pyi,sha256=89XP2eqkvEmo3VZTNC1V0IAne8b_xEkPJQfkiN0i6Mg,1746
ubinascii.pyi,sha256=kOai4wTFKZ1BQkyHe5WO2fkRDGST6eEetLS3KdtQ388,1488
ubluetooth.pyi,sha256=eKIXx2TJHUUHtKsSL4NawmGdnp02pCLbURKMQzSPvv0,30033
ucollections.pyi,sha256=veeBTws6XDnpGS2N6sh0lKECaVflUC61SkJXbeakdeY,3966
ucryptolib.pyi,sha256=pV8vbhqweB83T5J70RwDAFpn9rNJSkHH5YZbS23EJ98,1739
uctypes.pyi,sha256=tUGuvBHmFbcLBOZCHZ--OKBe6dZdee-8B8ALYbTuV2Q,2417
uerrno.pyi,sha256=fT9TQhrfWRbRou7wVcyJQWPysPh372koMw_GNZCC7SU,777
uhashlib.pyi,sha256=9b65Uc6P92RHUGIgf00qCHOb2KSytlkRUnSNEj8q4r8,1743
uheapq.pyi,sha256=JE1S9UQ38DvuchjejRgb65S_tAYvK8CYYpQT4vEl4JA,1000
uio.pyi,sha256=ltx99WnM7_72vpSIRbTiiSh9Z5D90SbCeKaNrGISsOs,2702
ujson.pyi,sha256=mP4C0XMgUt1ZcT7AQSteCY-n0rd0bREU_dnSQnjO5d0,1454
umachine.pyi,sha256=HVtm4Fkv-HfqGxcE0koQVerU7acXqGkYBf8SKCKMPoM,50379
uos.pyi,sha256=BNYJHi5PymNeepTYUD27GoUDmXWrAreN3bIiU8-sB64,9677
uplatform.pyi,sha256=3xuJleRh1wBIkS51WeiUkfQtwgYK1CLjDqXteBQIWaY,1463
urandom.pyi,sha256=GDIgpkmplAsckapDEHlkkRdsRq8fCS_hBBRCS5gFmZI,2687
ure.pyi,sha256=bLeXSxERwfWOsjH_TCaRE4bcguKddfOgSAf2Bw9Fu7o,239
urequests.pyi,sha256=eu4a7KxASOh_jGsRn7zmGtUt2rJYtOpjVuGYB4ty4fc,28
uselect.pyi,sha256=LKJ75d0F4BJg_7VAH3L07H1qqG3IkLuVEc82j-xMFWM,4114
usocket.pyi,sha256=YUTOaiosablCnk-corhGdTRNpQScrllJxdw9pZyWrYo,11622
ussl.pyi,sha256=u94PkXN_NoaRnj2bBdMFq2x7JUHVmjeJcA8tvL1wMyI,3758
ustruct.pyi,sha256=4Mf6SQIchLMnVNPnFG-iNgeIqqpaAYx89xvz1FAKJQA,4316
usys.pyi,sha256=aL8EhWS78hy24Ir-y4QWB6QYdj5hYVooEiNQZ-MxMK0,1442
utime.pyi,sha256=Df5LIT2n7WwXRXjLKBlqQ7g0ZHsQe1mnkTdyKyAtYno,13313
uwebsocket.pyi,sha256=1wiEl4cRkoZE4jwWC-38w-Aowz3W5JZuv4KXHDYhsr8,469
webrepl.pyi,sha256=Du-Qx0WvAvNFp5E6NG7a2lJv7m5z7KEWpUNRTCZmVO4,513
webrepl_setup.pyi,sha256=3AjgA3EbRBgj6rUkxc_isYHihM-pCGnVANegahw1jfE,232
websocket.pyi,sha256=1wiEl4cRkoZE4jwWC-38w-Aowz3W5JZuv4KXHDYhsr8,469

View File

@@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: poetry-core 1.7.0
Root-Is-Purelib: true
Tag: py3-none-any

View File

@@ -0,0 +1 @@
pip

View File

@@ -0,0 +1,239 @@
MIT License
Copyright (c) 2023 Jos Verlinde
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
Parts of this package are licenced are licensed under different licenses , reproduced below.
-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
The "typeshed" project is licensed under the terms of the Apache license, as
reproduced below.
= = = = =
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
= = = = =
Parts of typeshed are licensed under different licenses (like the MIT
license), reproduced below.
= = = = =
The MIT License
Copyright (c) 2015 Jukka Lehtosalo and contributors
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
= = = = =

View File

@@ -0,0 +1,39 @@
Metadata-Version: 2.1
Name: micropython-stdlib-stubs
Version: 1.0.0
Summary: Micropython stdlib is a reduced and augmented copy of typeshed's stdlib for use by MicroPython stub packages
Home-page: https://github.com/josverl/micropython-stubs#micropython-stubs
License: MIT
Author: josverl
Author-email: josverl@users.noreply.github.com
Requires-Python: >=3.8,<4.0
Classifier: Development Status :: 4 - Beta
Classifier: License :: OSI Approved :: MIT License
Classifier: Operating System :: OS Independent
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.8
Classifier: Programming Language :: Python :: 3.9
Classifier: Programming Language :: Python :: 3.10
Classifier: Programming Language :: Python :: 3.11
Classifier: Programming Language :: Python :: 3 :: Only
Classifier: Programming Language :: Python :: Implementation :: MicroPython
Classifier: Topic :: Software Development :: Build Tools
Classifier: Topic :: Text Editors :: Integrated Development Environments (IDE)
Classifier: Typing :: Typed
Project-URL: Documentation, https://micropython-stubs.readthedocs.io/
Project-URL: Repository, https://github.com/josverl/micropython-stubs
Description-Content-Type: text/markdown
A limited size copy of typesheds stdlib directory.
https://github.com/python/typeshed/tree/main/stdlib
This is used as a dependency in the micropython-*-stub packages to allow overriding of some of the stdlib modules with micropython specific implementations.
If you have suggestions or find any issues with the stubs, please report them in the [MicroPython-stubs Discussions](https://github.com/Josverl/micropython-stubs/discussions)
For an overview of Micropython Stubs please see: https://micropython-stubs.readthedocs.io/en/main/
* List of all stubs : https://micropython-stubs.readthedocs.io/en/main/firmware_grp.html

View File

@@ -0,0 +1,68 @@
micropython_stdlib_stubs-1.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
micropython_stdlib_stubs-1.0.0.dist-info/LICENSE.md,sha256=XnIlPftszZeoPSWf1jwR9a1w2zp3zOL_-oC0qRi-gbE,13067
micropython_stdlib_stubs-1.0.0.dist-info/METADATA,sha256=KNci2h2_liQGHy_8lwV89mZaWIbUFW0pxIcrvP8mJ1Y,1908
micropython_stdlib_stubs-1.0.0.dist-info/RECORD,,
micropython_stdlib_stubs-1.0.0.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
stdlib/__future__.pyi,sha256=s-Y8IJP5L0EswG_QSR_xENtt9dEC0ItL_aslaA1UmME,951
stdlib/_ast.pyi,sha256=ba_Kp76gZrGxVzrJDXs48Mqx6zYCJ-TFw23Ef0hwDSs,15249
stdlib/_codecs.pyi,sha256=wSOMpfy-2VFBVy9uqzVqMvLP1rfFv94z4GOCtONwdsc,7283
stdlib/_collections_abc.pyi,sha256=_KGWOxzKJOU2gQu4vQ66zyoknNHWOHg5VElIrB-GFpY,2204
stdlib/_decimal.pyi,sha256=_kl9Zl3aws7sXZH8n5Nw885SYCrYhNNThB9IJ939lGs,14057
stdlib/_typeshed/__init__.pyi,sha256=dxOvbssi4kLrgRJ-tlhZki85eDQ4vVJ5z3DZ7-YH3KM,10964
stdlib/_typeshed/dbapi.pyi,sha256=BJz51q_GDfs3yYJa18jemTilsBKs-m7jV-5ZfM_7owA,1675
stdlib/_typeshed/wsgi.pyi,sha256=iExnjwtr89qEPnoCVG48EtM8OeCJ70WGXyAUJwmu0O4,1677
stdlib/_typeshed/xml.pyi,sha256=Ffv4MfMivR0nrzVZxli93eddTSRcZR8itwNKdc0ZZnQ,499
stdlib/abc.pyi,sha256=NCUDS1byk9ykHiqN1WRB0Ofyewm25MFOj12k-uiGugo,1997
stdlib/asyncio/__init__.pyi,sha256=Rha03nC28gVISpLc2COjbPX--6UF7Sq4qIa-G6pkjRw,1266
stdlib/asyncio/base_events.pyi,sha256=62qVPV4W7902jhyalPHXk1Zd2WTHVKgoHpN4r3sCW9A,19890
stdlib/asyncio/base_futures.pyi,sha256=cHkQsNjGfzBjHZgbyPnAvlsvFYCxOCocJ_mHQVx0HZ4,768
stdlib/asyncio/base_tasks.pyi,sha256=PYv3qwMz2WIqDs3GGdLTaJfiJBTuUwIK0PUEKHIl9Uc,413
stdlib/asyncio/constants.pyi,sha256=6QF1yNVLZmHDjtnuXStdw-8SWnxzQX8SXn5SnAqKvDs,599
stdlib/asyncio/coroutines.pyi,sha256=hRcymXIP0CVXqBpRoDzGtFdLHuTQXbO2y0Kap0d3ls0,1120
stdlib/asyncio/events.pyi,sha256=vPoqrxAVQwVfzv18BMjD71LpX2riA7J1IYj9t5U3WWQ,24512
stdlib/asyncio/exceptions.pyi,sha256=Sp-bv5S0sVEb11niAgCtX4D2-zoECgJL-EHiGxC0ke4,1039
stdlib/asyncio/format_helpers.pyi,sha256=7bHuuROgjnZAgQxjgG1IJrB_RH_PQymZ4c7BUCNt9YY,907
stdlib/asyncio/futures.pyi,sha256=L27XOkdyEAmEFHPimch3HWY6bzsWSvViUxmVXr1rJIE,2717
stdlib/asyncio/locks.pyi,sha256=ZmCUTwT0Urzu0X8kP691Dva3t42fdiG62gwMDpyyQBU,4203
stdlib/asyncio/log.pyi,sha256=--UJmDmbuqm1EdrcBW5c94k3pzoNwlZKjsqn-ckSpPA,42
stdlib/asyncio/mixins.pyi,sha256=JUtyAosLnudv46MZaTZvigzn5eoLZnVHajWTSOJ_CQY,242
stdlib/asyncio/proactor_events.pyi,sha256=q0tMiV-y81mJh16xcXa21hmlidXyULUn2C9FY6_uQ3k,3009
stdlib/asyncio/protocols.pyi,sha256=3ooDCGHhxxYPcM5o20stbqOPNd-RBbiIDNk4ungvJqU,1665
stdlib/asyncio/queues.pyi,sha256=TntiRYcbhHQ8pFYG9tmD6p9WUbE-s6kBPlYEzu_oVc8,1310
stdlib/asyncio/runners.pyi,sha256=BNNnBWEveWcf-Qe1QoAe99LD4vmWKHdoMSXe2Rph098,1288
stdlib/asyncio/selector_events.pyi,sha256=-40IJS-J9MsqcwKb9SkSsO-5-679O_7ocPwOZQZ44yA,231
stdlib/asyncio/sslproto.pyi,sha256=XT4zcTMZYTlRh8K2WSxHDz5Xw_k-otF5ShKYUDXI-30,6595
stdlib/asyncio/staggered.pyi,sha256=Qwgygm1Wd5ydD1Q9Iwu1lCq5uHRl0q_p5wca_HD7ask,351
stdlib/asyncio/streams.pyi,sha256=ZlYMdF5DydffhfXWrHAqky3DtiNxO31OzMRN9BhTf8I,6759
stdlib/asyncio/taskgroups.pyi,sha256=l399dBgZNG3_fVZ6Cd3YrmrLT6BVoCFdvkqoyGnQdbQ,646
stdlib/asyncio/tasks.pyi,sha256=6aKXS_Oh0s4buOTZkBlmIrNIR-6BNhLFjJoEZSBUOUs,14465
stdlib/asyncio/threads.pyi,sha256=2luO0lvlHbVhlIaZqQfFeVdr8xHn5hlmhl2QBP9YLlU,274
stdlib/asyncio/timeouts.pyi,sha256=brKulE6iRhpq7_8kdJYDdw-QyVTkrzbK_yYiPycIiyA,653
stdlib/asyncio/transports.pyi,sha256=lZFZ2sBsXpa1TGFnIWIBUMMWpqgQMIHjPuPHjJRHCLw,2094
stdlib/asyncio/trsock.pyi,sha256=QFrlTSefkKqkm9CvkQE-DrbJ-O0cbj3NN9MjUFBE250,5161
stdlib/asyncio/unix_events.pyi,sha256=KtpYuURjnn9lgipPv5FW7_S0yrZUKkBpxqLv1QjoZwo,5847
stdlib/builtins.pyi,sha256=slGhgVHGPZM_S2nDlAzR2s0pACuGWadcpohAvVdVDz4,85034
stdlib/codecs.pyi,sha256=yvQyUT4IcMHUB_5N6CpOX5dz7IC0UasuoQtS2zpFlNo,11867
stdlib/collections/__init__.pyi,sha256=K_CAL3sFw6sYSlV7dExVisyKVltE8wdl5_Ol-VVmXi8,21370
stdlib/collections/abc.pyi,sha256=7E24ytmwcwkHwpzvttFRMn9nJfpNDW6YYulI0H9nxxI,81
stdlib/contextlib.pyi,sha256=SotLFn_0pEX3aqSGKF1EzuQLUflAbNSkfG-fkGqonpY,9193
stdlib/contextvars.pyi,sha256=e1LEavWqiBfSZOscHUnLsazlho_3a1KhMfzvWGD-F7s,2472
stdlib/dataclasses.pyi,sha256=OU1-LN29Qf4TWDS_wJEX-cb-M2JH201gQqgWNxpLdLc,9719
stdlib/decimal.pyi,sha256=SsrITV7HUaFdSbalRQapbaA726XRSduQCtEBU8Ev9U4,119
stdlib/enum.pyi,sha256=qV1OqUsRtz8l81Q-Hfsc0sa6O9fbg4nD6ALmK7eRQdI,10984
stdlib/fractions.pyi,sha256=ZDMRzxCoqgPkPGPiQR4AVOYjx1Gbo5_0IDjo61ncBuA,5754
stdlib/functools.pyi,sha256=PB8UjNUNW8bsdOfrmReFzdrQrtzIB4r1e_5YQxCzsDY,8850
stdlib/io.pyi,sha256=Fx1e2zuobNnVGjlnFm6swi2H3lVMly8LrOw7SbNzJvc,7631
stdlib/numbers.pyi,sha256=0ze1DSG3UBfMDIjPFr-htK4FCF4R6FGY69KvSHxSkL8,4043
stdlib/os/__init__.pyi,sha256=59wnHVcQ5OD22a1GMNU4l-jmI703jWH5fWL5U17cohg,38450
stdlib/queue.pyi,sha256=k88XJePCEN3Vw-hTgj1-Vod8AzZ81E6hztLcmRnbM90,2131
stdlib/re.pyi,sha256=ioHD_Nvlx8LpB63K0A3fmGIMzKUuDYDG8j3GmDTfaDI,10221
stdlib/selectors.pyi,sha256=zCq3OZKYThiRl3NTC1pHXdwB2D5c4Nz3Y1lr4Ek9EwQ,3802
stdlib/socket.pyi,sha256=PhKukvc_esHzO3xsOdrfncyS4aI0Tkt0nOFI3WNeTFs,30384
stdlib/sre_compile.pyi,sha256=UPXp7Mfy8VRdGCUgDWu_-msiRGad2FEDtUpWwA5CIxA,343
stdlib/sre_constants.pyi,sha256=qZqXCU0gEevo7PDRM3j8iRTL3MdyehjzuA95SANnawk,4116
stdlib/sre_parse.pyi,sha256=PYPVZLt8485Ob9JYMKAf-ximVjZPTIlrKmvSFk-VtaI,4610
stdlib/sys.pyi,sha256=o_0z8ab0hvt9YjWDmA78jNmES6CfG8R2L-sRQJtdRmQ,12231
stdlib/types.pyi,sha256=LyfVBbkqqSn__17l6lkx1VFTQnYeOgt9eyZ48WuC_Ug,21369
stdlib/typing.pyi,sha256=YfF43PG2kly39ygU84RsxYGDq4mjf8IiSgdK9IBNNVo,30253
stdlib/typing_extensions.pyi,sha256=iSpzmXucn4MDSdhXLQRHI1smqjE9bEj_-Zx6C_tM_WY,15278

View File

@@ -0,0 +1,4 @@
Wheel-Version: 1.0
Generator: poetry-core 1.7.0
Root-Is-Purelib: true
Tag: py3-none-any

15
.vscode/Pico-W-Stub/mip/__init__.pyi vendored Normal file
View File

@@ -0,0 +1,15 @@
from _typeshed import Incomplete
_PACKAGE_INDEX: Incomplete
_CHUNK_SIZE: int
def _ensure_path_exists(path) -> None: ...
def _chunk(src, dest) -> None: ...
def _check_exists(path, short_hash): ...
def _rewrite_url(url, branch: Incomplete | None = ...): ...
def _download_file(url, dest): ...
def _install_json(package_json_url, index, target, version, mpy): ...
def _install_package(package, index, target, version, mpy): ...
def install(
package, index: Incomplete | None = ..., target: Incomplete | None = ..., version: Incomplete | None = ..., mpy: bool = ...
) -> None: ...

15
.vscode/Pico-W-Stub/neopixel.pyi vendored Normal file
View File

@@ -0,0 +1,15 @@
from _typeshed import Incomplete
class NeoPixel:
ORDER: Incomplete
pin: Incomplete
n: Incomplete
bpp: Incomplete
buf: Incomplete
timing: Incomplete
def __init__(self, pin, n, bpp: int = ..., timing: int = ...) -> None: ...
def __len__(self) -> int: ...
def __setitem__(self, i, v) -> None: ...
def __getitem__(self, i): ...
def fill(self, v) -> None: ...
def write(self) -> None: ...

184
.vscode/Pico-W-Stub/network.pyi vendored Normal file
View File

@@ -0,0 +1,184 @@
"""
Network configuration.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/network.html
This module provides network drivers and routing configuration. To use this
module, a MicroPython variant/build with network capabilities must be installed.
Network drivers for specific hardware are available within this module and are
used to configure hardware network interface(s). Network services provided
by configured interfaces are then available for use via the :mod:`socket`
module.
For example::
# connect/ show IP config a specific network interface
# see below for examples of specific drivers
import network
import time
nic = network.Driver(...)
if not nic.isconnected():
nic.connect()
print("Waiting for connection...")
while not nic.isconnected():
time.sleep(1)
print(nic.ifconfig())
# now use socket as usual
import socket
addr = socket.getaddrinfo('micropython.org', 80)[0][-1]
s = socket.socket()
s.connect(addr)
s.send(b'GET / HTTP/1.1\r\nHost: micropython.org\r\n\r\n')
data = s.recv(1000)
s.close()
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, List, Optional, Tuple, Union
STA_IF: int
STAT_IDLE: int
STAT_NO_AP_FOUND: int
STAT_WRONG_PASSWORD: int
STAT_GOT_IP: int
AP_IF: int
STAT_CONNECTING: int
STAT_CONNECT_FAIL: int
def route(*args, **kwargs) -> Incomplete: ...
def hostname(*args, **kwargs) -> Incomplete: ...
def country(*args, **kwargs) -> Incomplete: ...
class WLAN:
"""
Create a WLAN network interface object. Supported interfaces are
``network.STA_IF`` (station aka client, connects to upstream WiFi access
points) and ``network.AP_IF`` (access point, allows other WiFi clients to
connect). Availability of the methods below depends on interface type.
For example, only STA interface may `WLAN.connect()` to an access point.
"""
PM_PERFORMANCE: int
PM_POWERSAVE: int
PM_NONE: int
def isconnected(self) -> bool:
"""
In case of STA mode, returns ``True`` if connected to a WiFi access
point and has a valid IP address. In AP mode returns ``True`` when a
station is connected. Returns ``False`` otherwise.
"""
...
def ioctl(self, *args, **kwargs) -> Incomplete: ...
def ifconfig(self, configtuple: Optional[Any] = None) -> Tuple:
"""
Get/set IP-level network interface parameters: IP address, subnet mask,
gateway and DNS server. When called with no arguments, this method returns
a 4-tuple with the above information. To set the above values, pass a
4-tuple with the required information. For example::
nic.ifconfig(('192.168.0.4', '255.255.255.0', '192.168.0.1', '8.8.8.8'))
"""
...
def scan(self) -> List[Tuple]:
"""
Scan for the available wireless networks.
Hidden networks -- where the SSID is not broadcast -- will also be scanned
if the WLAN interface allows it.
Scanning is only possible on STA interface. Returns list of tuples with
the information about WiFi access points:
(ssid, bssid, channel, RSSI, security, hidden)
*bssid* is hardware address of an access point, in binary form, returned as
bytes object. You can use `binascii.hexlify()` to convert it to ASCII form.
There are five values for security:
* 0 -- open
* 1 -- WEP
* 2 -- WPA-PSK
* 3 -- WPA2-PSK
* 4 -- WPA/WPA2-PSK
and two for hidden:
* 0 -- visible
* 1 -- hidden
"""
...
def send_ethernet(self, *args, **kwargs) -> Incomplete: ...
def status(self, param: Optional[Any] = None) -> Incomplete:
"""
Return the current status of the wireless connection.
When called with no argument the return value describes the network link status.
The possible statuses are defined as constants:
* ``STAT_IDLE`` -- no connection and no activity,
* ``STAT_CONNECTING`` -- connecting in progress,
* ``STAT_WRONG_PASSWORD`` -- failed due to incorrect password,
* ``STAT_NO_AP_FOUND`` -- failed because no access point replied,
* ``STAT_CONNECT_FAIL`` -- failed due to other problems,
* ``STAT_GOT_IP`` -- connection successful.
When called with one argument *param* should be a string naming the status
parameter to retrieve. Supported parameters in WiFI STA mode are: ``'rssi'``.
"""
...
def config(self, *args, **kwargs) -> Incomplete:
"""
Get or set general network interface parameters. These methods allow to work
with additional parameters beyond standard IP configuration (as dealt with by
`WLAN.ifconfig()`). These include network-specific and hardware-specific
parameters. For setting parameters, keyword argument syntax should be used,
multiple parameters can be set at once. For querying, parameters name should
be quoted as a string, and only one parameter can be queries at time::
# Set WiFi access point name (formally known as SSID) and WiFi channel
ap.config(ssid='My AP', channel=11)
# Query params one by one
print(ap.config('ssid'))
print(ap.config('channel'))
Following are commonly supported parameters (availability of a specific parameter
depends on network technology type, driver, and :term:`MicroPython port`).
============= ===========
Parameter Description
============= ===========
mac MAC address (bytes)
ssid WiFi access point name (string)
channel WiFi channel (integer)
hidden Whether SSID is hidden (boolean)
security Security protocol supported (enumeration, see module constants)
key Access key (string)
hostname The hostname that will be sent to DHCP (STA interfaces) and mDNS (if supported, both STA and AP). (Deprecated, use :func:`network.hostname` instead)
reconnects Number of reconnect attempts to make (integer, 0=none, -1=unlimited)
txpower Maximum transmit power in dBm (integer or float)
pm WiFi Power Management setting (see below for allowed values)
============= ===========
"""
...
def active(self, is_active: Optional[Any] = None) -> None:
"""
Activate ("up") or deactivate ("down") network interface, if boolean
argument is passed. Otherwise, query current state if no argument is
provided. Most other methods require active interface.
"""
...
def disconnect(self) -> None:
"""
Disconnect from the currently connected wireless network.
"""
...
def connect(self, ssid=None, key=None, *, bssid=None) -> None:
"""
Connect to the specified wireless network, using the specified key.
If *bssid* is given then the connection will be restricted to the
access-point with that MAC address (the *ssid* must also be specified
in this case).
"""
...
def deinit(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, interface_id) -> None: ...

5
.vscode/Pico-W-Stub/ntptime.pyi vendored Normal file
View File

@@ -0,0 +1,5 @@
host: str
timeout: int
def time(): ...
def settime() -> None: ...

21
.vscode/Pico-W-Stub/onewire.pyi vendored Normal file
View File

@@ -0,0 +1,21 @@
from _typeshed import Incomplete
class OneWireError(Exception): ...
class OneWire:
SEARCH_ROM: int
MATCH_ROM: int
SKIP_ROM: int
pin: Incomplete
def __init__(self, pin) -> None: ...
def reset(self, required: bool = ...): ...
def readbit(self): ...
def readbyte(self): ...
def readinto(self, buf) -> None: ...
def writebit(self, value): ...
def writebyte(self, value): ...
def write(self, buf) -> None: ...
def select_rom(self, rom) -> None: ...
def scan(self): ...
def _search_rom(self, l_rom, diff): ...
def crc8(self, data): ...

253
.vscode/Pico-W-Stub/os.pyi vendored Normal file
View File

@@ -0,0 +1,253 @@
"""
Basic "operating system" services.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/os.html
CPython module: :mod:`python:os` https://docs.python.org/3/library/os.html .
The ``os`` module contains functions for filesystem access and mounting,
terminal redirection and duplication, and the ``uname`` and ``urandom``
functions.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from stdlib.os import *
from typing import Any, IO, Iterator, Optional, Tuple
def statvfs(path) -> Tuple:
"""
Get the status of a filesystem.
Returns a tuple with the filesystem information in the following order:
* ``f_bsize`` -- file system block size
* ``f_frsize`` -- fragment size
* ``f_blocks`` -- size of fs in f_frsize units
* ``f_bfree`` -- number of free blocks
* ``f_bavail`` -- number of free blocks for unprivileged users
* ``f_files`` -- number of inodes
* ``f_ffree`` -- number of free inodes
* ``f_favail`` -- number of free inodes for unprivileged users
* ``f_flag`` -- mount flags
* ``f_namemax`` -- maximum filename length
Parameters related to inodes: ``f_files``, ``f_ffree``, ``f_avail``
and the ``f_flags`` parameter may return ``0`` as they can be unavailable
in a port-specific implementation.
"""
...
def stat(path) -> Incomplete:
"""
Get the status of a file or directory.
"""
...
def rmdir(path) -> None:
"""
Remove a directory.
"""
...
def rename(old_path, new_path) -> None:
"""
Rename a file.
"""
...
def mount(fsobj, mount_point, *, readonly=False) -> Incomplete:
"""
Mount the filesystem object *fsobj* at the location in the VFS given by the
*mount_point* string. *fsobj* can be a a VFS object that has a ``mount()``
method, or a block device. If it's a block device then the filesystem type
is automatically detected (an exception is raised if no filesystem was
recognised). *mount_point* may be ``'/'`` to mount *fsobj* at the root,
or ``'/<name>'`` to mount it at a subdirectory under the root.
If *readonly* is ``True`` then the filesystem is mounted read-only.
During the mount process the method ``mount()`` is called on the filesystem
object.
Will raise ``OSError(EPERM)`` if *mount_point* is already mounted.
"""
...
def sync() -> None:
"""
Sync all filesystems.
"""
...
def unlink(*args, **kwargs) -> Incomplete: ...
def uname() -> uname_result:
"""
Return a tuple (possibly a named tuple) containing information about the
underlying machine and/or its operating system. The tuple has five fields
in the following order, each of them being a string:
* ``sysname`` -- the name of the underlying system
* ``nodename`` -- the network name (can be the same as ``sysname``)
* ``release`` -- the version of the underlying system
* ``version`` -- the MicroPython version and build date
* ``machine`` -- an identifier for the underlying hardware (eg board, CPU)
"""
...
def umount(mount_point) -> Incomplete:
"""
Unmount a filesystem. *mount_point* can be a string naming the mount location,
or a previously-mounted filesystem object. During the unmount process the
method ``umount()`` is called on the filesystem object.
Will raise ``OSError(EINVAL)`` if *mount_point* is not found.
"""
...
def urandom(n) -> bytes:
"""
Return a bytes object with *n* random bytes. Whenever possible, it is
generated by the hardware random number generator.
"""
...
def chdir(path) -> Incomplete:
"""
Change current directory.
"""
...
def dupterm(stream_object, index=0, /) -> IO:
"""
Duplicate or switch the MicroPython terminal (the REPL) on the given `stream`-like
object. The *stream_object* argument must be a native stream object, or derive
from ``io.IOBase`` and implement the ``readinto()`` and
``write()`` methods. The stream should be in non-blocking mode and
``readinto()`` should return ``None`` if there is no data available for reading.
After calling this function all terminal output is repeated on this stream,
and any input that is available on the stream is passed on to the terminal input.
The *index* parameter should be a non-negative integer and specifies which
duplication slot is set. A given port may implement more than one slot (slot 0
will always be available) and in that case terminal input and output is
duplicated on all the slots that are set.
If ``None`` is passed as the *stream_object* then duplication is cancelled on
the slot given by *index*.
The function returns the previous stream-like object in the given slot.
"""
...
def remove(path) -> None:
"""
Remove a file.
"""
...
def mkdir(path) -> Incomplete:
"""
Create a new directory.
"""
...
def getcwd() -> Incomplete:
"""
Get the current directory.
"""
...
def listdir(dir: Optional[Any] = None) -> Incomplete:
"""
With no argument, list the current directory. Otherwise list the given directory.
"""
...
def ilistdir(dir: Optional[Any] = None) -> Iterator[Tuple]:
"""
This function returns an iterator which then yields tuples corresponding to
the entries in the directory that it is listing. With no argument it lists the
current directory, otherwise it lists the directory given by *dir*.
The tuples have the form *(name, type, inode[, size])*:
- *name* is a string (or bytes if *dir* is a bytes object) and is the name of
the entry;
- *type* is an integer that specifies the type of the entry, with 0x4000 for
directories and 0x8000 for regular files;
- *inode* is an integer corresponding to the inode of the file, and may be 0
for filesystems that don't have such a notion.
- Some platforms may return a 4-tuple that includes the entry's *size*. For
file entries, *size* is an integer representing the size of the file
or -1 if unknown. Its meaning is currently undefined for directory
entries.
"""
...
class VfsLfs2:
"""
Create a filesystem object that uses the `littlefs v2 filesystem format`_.
Storage of the littlefs filesystem is provided by *block_dev*, which must
support the :ref:`extended interface <block-device-interface>`.
Objects created by this constructor can be mounted using :func:`mount`.
The *mtime* argument enables modification timestamps for files, stored using
littlefs attributes. This option can be disabled or enabled differently each
mount time and timestamps will only be added or updated if *mtime* is enabled,
otherwise the timestamps will remain untouched. Littlefs v2 filesystems without
timestamps will work without reformatting and timestamps will be added
transparently to existing files once they are opened for writing. When *mtime*
is enabled `os.stat` on files without timestamps will return 0 for the timestamp.
See :ref:`filesystem` for more information.
"""
def rename(self, *args, **kwargs) -> Incomplete: ...
@staticmethod
def mkfs(block_dev, readsize=32, progsize=32, lookahead=32) -> None:
"""
Build a Lfs2 filesystem on *block_dev*.
``Note:`` There are reports of littlefs v2 failing in certain situations,
for details see `littlefs issue 295`_.
"""
...
def mount(self, *args, **kwargs) -> Incomplete: ...
def statvfs(self, *args, **kwargs) -> Incomplete: ...
def rmdir(self, *args, **kwargs) -> Incomplete: ...
def stat(self, *args, **kwargs) -> Incomplete: ...
def umount(self, *args, **kwargs) -> Incomplete: ...
def remove(self, *args, **kwargs) -> Incomplete: ...
def mkdir(self, *args, **kwargs) -> Incomplete: ...
def open(self, *args, **kwargs) -> Incomplete: ...
def ilistdir(self, *args, **kwargs) -> Incomplete: ...
def chdir(self, *args, **kwargs) -> Incomplete: ...
def getcwd(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, block_dev, readsize=32, progsize=32, lookahead=32, mtime=True) -> None: ...
class VfsFat:
"""
Create a filesystem object that uses the FAT filesystem format. Storage of
the FAT filesystem is provided by *block_dev*.
Objects created by this constructor can be mounted using :func:`mount`.
"""
def rename(self, *args, **kwargs) -> Incomplete: ...
@staticmethod
def mkfs(block_dev) -> None:
"""
Build a FAT filesystem on *block_dev*.
"""
...
def mount(self, *args, **kwargs) -> Incomplete: ...
def statvfs(self, *args, **kwargs) -> Incomplete: ...
def rmdir(self, *args, **kwargs) -> Incomplete: ...
def stat(self, *args, **kwargs) -> Incomplete: ...
def umount(self, *args, **kwargs) -> Incomplete: ...
def remove(self, *args, **kwargs) -> Incomplete: ...
def mkdir(self, *args, **kwargs) -> Incomplete: ...
def open(self, *args, **kwargs) -> Incomplete: ...
def ilistdir(self, *args, **kwargs) -> Incomplete: ...
def chdir(self, *args, **kwargs) -> Incomplete: ...
def getcwd(self, *args, **kwargs) -> Incomplete: ...
def __init__(self, block_dev) -> None: ...

43
.vscode/Pico-W-Stub/platform.pyi vendored Normal file
View File

@@ -0,0 +1,43 @@
"""
Access to underlying platforms identifying data.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/platform.html
CPython module: :mod:`python:platform` https://docs.python.org/3/library/platform.html .
This module tries to retrieve as much platform-identifying data as possible. It
makes this information available via function APIs.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Tuple
def platform() -> str:
"""
Returns a string identifying the underlying platform. This string is composed
of several substrings in the following order, delimited by dashes (``-``):
- the name of the platform system (e.g. Unix, Windows or MicroPython)
- the MicroPython version
- the architecture of the platform
- the version of the underlying platform
- the concatenation of the name of the libc that MicroPython is linked to
and its corresponding version.
For example, this could be
``"MicroPython-1.20.0-xtensa-IDFv4.2.4-with-newlib3.0.0"``.
"""
...
def python_compiler() -> str:
"""
Returns a string identifying the compiler used for compiling MicroPython.
"""
...
def libc_ver() -> Tuple:
"""
Returns a tuple of strings *(lib, version)*, where *lib* is the name of the
libc that MicroPython is linked to, and *version* the corresponding version
of this libc.
"""
...

84
.vscode/Pico-W-Stub/random.pyi vendored Normal file
View File

@@ -0,0 +1,84 @@
"""
Random numbers.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/random.html
This module implements a pseudo-random number generator (PRNG).
CPython module: :mod:`python:random` https://docs.python.org/3/library/random.html . .
.. note::
The following notation is used for intervals:
- () are open interval brackets and do not include their endpoints.
For example, (0, 1) means greater than 0 and less than 1.
In set notation: (0, 1) = {x | 0 < x < 1}.
- [] are closed interval brackets which include all their limit points.
For example, [0, 1] means greater than or equal to 0 and less than
or equal to 1.
In set notation: [0, 1] = {x | 0 <= x <= 1}.
.. note::
The :func:`randrange`, :func:`randint` and :func:`choice` functions are only
available if the ``MICROPY_PY_RANDOM_EXTRA_FUNCS`` configuration option is
enabled.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, Optional
def randrange(start, stop, step: Optional[Any] = None) -> int:
"""
The first form returns a random integer from the range [0, *stop*).
The second form returns a random integer from the range [*start*, *stop*).
The third form returns a random integer from the range [*start*, *stop*) in
steps of *step*. For instance, calling ``randrange(1, 10, 2)`` will
return odd numbers between 1 and 9 inclusive.
"""
...
def random() -> int:
"""
Return a random floating point number in the range [0.0, 1.0).
"""
...
def seed(n=None, /) -> None:
"""
Initialise the random number generator module with the seed *n* which should
be an integer. When no argument (or ``None``) is passed in it will (if
supported by the port) initialise the PRNG with a true random number
(usually a hardware generated random number).
The ``None`` case only works if ``MICROPY_PY_RANDOM_SEED_INIT_FUNC`` is
enabled by the port, otherwise it raises ``ValueError``.
"""
...
def uniform(a, b) -> int:
"""
Return a random floating point number N such that *a* <= N <= *b* for *a* <= *b*,
and *b* <= N <= *a* for *b* < *a*.
"""
...
def choice(sequence) -> Incomplete:
"""
Chooses and returns one item at random from *sequence* (tuple, list or
any object that supports the subscript operation).
"""
...
def randint(a, b) -> int:
"""
Return a random integer in the range [*a*, *b*].
"""
...
def getrandbits(n) -> int:
"""
Return an integer with *n* random bits (0 <= n <= 32).
"""
...

16
.vscode/Pico-W-Stub/requests.pyi vendored Normal file
View File

@@ -0,0 +1,16 @@
from _typeshed import Incomplete as Incomplete
def request(*args, **kwargs) -> Incomplete: ...
def head(*args, **kwargs) -> Incomplete: ...
def post(*args, **kwargs) -> Incomplete: ...
def patch(*args, **kwargs) -> Incomplete: ...
def delete(*args, **kwargs) -> Incomplete: ...
def put(*args, **kwargs) -> Incomplete: ...
def get(*args, **kwargs) -> Incomplete: ...
class Response:
def json(self, *args, **kwargs) -> Incomplete: ...
def close(self, *args, **kwargs) -> Incomplete: ...
content: Incomplete
text: Incomplete
def __init__(self, *argv, **kwargs) -> None: ...

View File

@@ -0,0 +1,31 @@
from _typeshed import Incomplete
class Response:
raw: Incomplete
encoding: str
_cached: Incomplete
def __init__(self, f) -> None: ...
def close(self) -> None: ...
@property
def content(self): ...
@property
def text(self): ...
def json(self): ...
def request(
method,
url,
data: Incomplete | None = ...,
json: Incomplete | None = ...,
headers=...,
stream: Incomplete | None = ...,
auth: Incomplete | None = ...,
timeout: Incomplete | None = ...,
parse_headers: bool = ...,
): ...
def head(url, **kw): ...
def get(url, **kw): ...
def post(url, **kw): ...
def put(url, **kw): ...
def patch(url, **kw): ...
def delete(url, **kw): ...

62
.vscode/Pico-W-Stub/rp2.pyi vendored Normal file
View File

@@ -0,0 +1,62 @@
from _rp2 import *
from _typeshed import Incomplete
_PROG_DATA: Incomplete
_PROG_OFFSET_PIO0: Incomplete
_PROG_OFFSET_PIO1: Incomplete
_PROG_EXECCTRL: Incomplete
_PROG_SHIFTCTRL: Incomplete
_PROG_OUT_PINS: Incomplete
_PROG_SET_PINS: Incomplete
_PROG_SIDESET_PINS: Incomplete
_PROG_MAX_FIELDS: Incomplete
class PIOASMError(Exception): ...
class PIOASMEmit:
labels: Incomplete
prog: Incomplete
wrap_used: bool
sideset_count: int
def __init__(
self,
*,
out_init: Incomplete | None = ...,
set_init: Incomplete | None = ...,
sideset_init: Incomplete | None = ...,
in_shiftdir: int = ...,
out_shiftdir: int = ...,
autopush: bool = ...,
autopull: bool = ...,
push_thresh: int = ...,
pull_thresh: int = ...,
fifo_join: int = ...,
) -> None: ...
delay_max: int
sideset_opt: Incomplete
pass_: Incomplete
num_instr: int
num_sideset: int
def start_pass(self, pass_) -> None: ...
def __getitem__(self, key): ...
def delay(self, delay): ...
def side(self, value): ...
def wrap_target(self) -> None: ...
def wrap(self) -> None: ...
def label(self, label) -> None: ...
def word(self, instr, label: Incomplete | None = ...): ...
def nop(self): ...
def jmp(self, cond, label: Incomplete | None = ...): ...
def wait(self, polarity, src, index): ...
def in_(self, src, data): ...
def out(self, dest, data): ...
def push(self, value: int = ..., value2: int = ...): ...
def pull(self, value: int = ..., value2: int = ...): ...
def mov(self, dest, src): ...
def irq(self, mod, index: Incomplete | None = ...): ...
def set(self, dest, data): ...
_pio_funcs: Incomplete
def asm_pio(**kw): ...
def asm_pio_encode(instr, sideset_count, sideset_opt: bool = ...): ...

103
.vscode/Pico-W-Stub/select.pyi vendored Normal file
View File

@@ -0,0 +1,103 @@
"""
Wait for events on a set of streams.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/select.html
CPython module: :mod:`python:select` https://docs.python.org/3/library/select.html .
This module provides functions to efficiently wait for events on multiple
`streams <stream>` (select streams which are ready for operations).
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from typing import Any, Iterator, List, Optional, Tuple
POLLOUT: int
POLLIN: int
POLLHUP: int
POLLERR: int
def select(rlist, wlist, xlist, timeout: Optional[Any] = None) -> None:
"""
Wait for activity on a set of objects.
This function is provided by some MicroPython ports for compatibility
and is not efficient. Usage of :class:`Poll` is recommended instead.
"""
...
class poll:
"""
Create an instance of the Poll class.
"""
def __init__(self) -> None: ...
def register(self, obj, eventmask: Optional[Any] = None) -> None:
"""
Register `stream` *obj* for polling. *eventmask* is logical OR of:
* ``select.POLLIN`` - data available for reading
* ``select.POLLOUT`` - more data can be written
Note that flags like ``select.POLLHUP`` and ``select.POLLERR`` are
*not* valid as input eventmask (these are unsolicited events which
will be returned from `poll()` regardless of whether they are asked
for). This semantics is per POSIX.
*eventmask* defaults to ``select.POLLIN | select.POLLOUT``.
It is OK to call this function multiple times for the same *obj*.
Successive calls will update *obj*'s eventmask to the value of
*eventmask* (i.e. will behave as `modify()`).
"""
...
def unregister(self, obj) -> Incomplete:
"""
Unregister *obj* from polling.
"""
...
def modify(self, obj, eventmask) -> None:
"""
Modify the *eventmask* for *obj*. If *obj* is not registered, `OSError`
is raised with error of ENOENT.
"""
...
def poll(self, timeout=-1, /) -> List:
"""
Wait for at least one of the registered objects to become ready or have an
exceptional condition, with optional timeout in milliseconds (if *timeout*
arg is not specified or -1, there is no timeout).
Returns list of (``obj``, ``event``, ...) tuples. There may be other elements in
tuple, depending on a platform and version, so don't assume that its size is 2.
The ``event`` element specifies which events happened with a stream and
is a combination of ``select.POLL*`` constants described above. Note that
flags ``select.POLLHUP`` and ``select.POLLERR`` can be returned at any time
(even if were not asked for), and must be acted on accordingly (the
corresponding stream unregistered from poll and likely closed), because
otherwise all further invocations of `poll()` may return immediately with
these flags set for this stream again.
In case of timeout, an empty list is returned.
Difference to CPython
Tuples returned may contain more than 2 elements as described above.
"""
...
def ipoll(self, timeout=-1, flags=0, /) -> Iterator[Tuple]:
"""
Like :meth:`poll.poll`, but instead returns an iterator which yields a
`callee-owned tuple`. This function provides an efficient, allocation-free
way to poll on streams.
If *flags* is 1, one-shot behaviour for events is employed: streams for
which events happened will have their event masks automatically reset
(equivalent to ``poll.modify(obj, 0)``), so new events for such a stream
won't be processed until new mask is set with `poll.modify()`. This
behaviour is useful for asynchronous I/O schedulers.
Difference to CPython
This function is a MicroPython extension.
"""
...

271
.vscode/Pico-W-Stub/socket.pyi vendored Normal file
View File

@@ -0,0 +1,271 @@
"""
Socket module.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/socket.html
CPython module: :mod:`python:socket` https://docs.python.org/3/library/socket.html .
This module provides access to the BSD socket interface.
Difference to CPython
For efficiency and consistency, socket objects in MicroPython implement a `stream`
(file-like) interface directly. In CPython, you need to convert a socket to
a file-like object using `makefile()` method. This method is still supported
by MicroPython (but is a no-op), so where compatibility with CPython matters,
be sure to use it.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from stdlib.socket import *
from typing import Any, IO, Optional, Tuple
SOCK_STREAM: int
SOCK_RAW: int
SOCK_DGRAM: int
SOL_SOCKET: int
SO_BROADCAST: int
SO_REUSEADDR: int
AF_INET6: int
AF_INET: int
IP_DROP_MEMBERSHIP: int
IPPROTO_IP: int
IP_ADD_MEMBERSHIP: int
def reset(*args, **kwargs) -> Incomplete: ...
def print_pcbs(*args, **kwargs) -> Incomplete: ...
def getaddrinfo(host, port, af=0, type=0, proto=0, flags=0, /) -> Incomplete:
"""
Translate the host/port argument into a sequence of 5-tuples that contain all the
necessary arguments for creating a socket connected to that service. Arguments
*af*, *type*, and *proto* (which have the same meaning as for the `socket()` function)
can be used to filter which kind of addresses are returned. If a parameter is not
specified or zero, all combinations of addresses can be returned (requiring
filtering on the user side).
The resulting list of 5-tuples has the following structure::
(family, type, proto, canonname, sockaddr)
The following example shows how to connect to a given url::
s = socket.socket()
# This assumes that if "type" is not specified, an address for
# SOCK_STREAM will be returned, which may be not true
s.connect(socket.getaddrinfo('www.micropython.org', 80)[0][-1])
Recommended use of filtering params::
s = socket.socket()
# Guaranteed to return an address which can be connect'ed to for
# stream operation.
s.connect(socket.getaddrinfo('www.micropython.org', 80, 0, SOCK_STREAM)[0][-1])
Difference to CPython
CPython raises a ``socket.gaierror`` exception (`OSError` subclass) in case
of error in this function. MicroPython doesn't have ``socket.gaierror``
and raises OSError directly. Note that error numbers of `getaddrinfo()`
form a separate namespace and may not match error numbers from
the :mod:`errno` module. To distinguish `getaddrinfo()` errors, they are
represented by negative numbers, whereas standard system errors are
positive numbers (error numbers are accessible using ``e.args[0]`` property
from an exception object). The use of negative values is a provisional
detail which may change in the future.
"""
...
def callback(*args, **kwargs) -> Incomplete: ...
class socket:
"""
Create a new socket using the given address family, socket type and
protocol number. Note that specifying *proto* in most cases is not
required (and not recommended, as some MicroPython ports may omit
``IPPROTO_*`` constants). Instead, *type* argument will select needed
protocol automatically::
# Create STREAM TCP socket
socket(AF_INET, SOCK_STREAM)
# Create DGRAM UDP socket
socket(AF_INET, SOCK_DGRAM)
"""
def recvfrom(self, bufsize) -> Tuple:
"""
Receive data from the socket. The return value is a pair *(bytes, address)* where *bytes* is a
bytes object representing the data received and *address* is the address of the socket sending
the data.
"""
...
def recv(self, bufsize) -> bytes:
"""
Receive data from the socket. The return value is a bytes object representing the data
received. The maximum amount of data to be received at once is specified by bufsize.
"""
...
def makefile(self, mode="rb", buffering=0, /) -> IO:
"""
Return a file object associated with the socket. The exact returned type depends on the arguments
given to makefile(). The support is limited to binary modes only ('rb', 'wb', and 'rwb').
CPython's arguments: *encoding*, *errors* and *newline* are not supported.
Difference to CPython
As MicroPython doesn't support buffered streams, values of *buffering*
parameter is ignored and treated as if it was 0 (unbuffered).
Difference to CPython
Closing the file object returned by makefile() WILL close the
original socket as well.
"""
...
def listen(self, backlog: Optional[Any] = None) -> None:
"""
Enable a server to accept connections. If *backlog* is specified, it must be at least 0
(if it's lower, it will be set to 0); and specifies the number of unaccepted connections
that the system will allow before refusing new connections. If not specified, a default
reasonable value is chosen.
"""
...
def settimeout(self, value) -> Incomplete:
"""
**Note**: Not every port supports this method, see below.
Set a timeout on blocking socket operations. The value argument can be a nonnegative floating
point number expressing seconds, or None. If a non-zero value is given, subsequent socket operations
will raise an `OSError` exception if the timeout period value has elapsed before the operation has
completed. If zero is given, the socket is put in non-blocking mode. If None is given, the socket
is put in blocking mode.
Not every :term:`MicroPython port` supports this method. A more portable and
generic solution is to use `select.poll` object. This allows to wait on
multiple objects at the same time (and not just on sockets, but on generic
`stream` objects which support polling). Example::
# Instead of:
s.settimeout(1.0) # time in seconds
s.read(10) # may timeout
# Use:
poller = select.poll()
poller.register(s, select.POLLIN)
res = poller.poll(1000) # time in milliseconds
if not res:
# s is still not ready for input, i.e. operation timed out
Difference to CPython
CPython raises a ``socket.timeout`` exception in case of timeout,
which is an `OSError` subclass. MicroPython raises an OSError directly
instead. If you use ``except OSError:`` to catch the exception,
your code will work both in MicroPython and CPython.
"""
...
def sendall(self, bytes) -> int:
"""
Send all data to the socket. The socket must be connected to a remote socket.
Unlike `send()`, this method will try to send all of data, by sending data
chunk by chunk consecutively.
The behaviour of this method on non-blocking sockets is undefined. Due to this,
on MicroPython, it's recommended to use `write()` method instead, which
has the same "no short writes" policy for blocking sockets, and will return
number of bytes sent on non-blocking sockets.
"""
...
def setsockopt(self, level, optname, value) -> None:
"""
Set the value of the given socket option. The needed symbolic constants are defined in the
socket module (SO_* etc.). The *value* can be an integer or a bytes-like object representing
a buffer.
"""
...
def setblocking(self, flag) -> Incomplete:
"""
Set blocking or non-blocking mode of the socket: if flag is false, the socket is set to non-blocking,
else to blocking mode.
This method is a shorthand for certain `settimeout()` calls:
* ``sock.setblocking(True)`` is equivalent to ``sock.settimeout(None)``
* ``sock.setblocking(False)`` is equivalent to ``sock.settimeout(0)``
"""
...
def sendto(self, bytes, address) -> None:
"""
Send data to the socket. The socket should not be connected to a remote socket, since the
destination socket is specified by *address*.
"""
...
def readline(self) -> Incomplete:
"""
Read a line, ending in a newline character.
Return value: the line read.
"""
...
def readinto(self, buf, nbytes: Optional[Any] = None) -> int:
"""
Read bytes into the *buf*. If *nbytes* is specified then read at most
that many bytes. Otherwise, read at most *len(buf)* bytes. Just as
`read()`, this method follows "no short reads" policy.
Return value: number of bytes read and stored into *buf*.
"""
...
def read(self, size: Optional[Any] = None) -> bytes:
"""
Read up to size bytes from the socket. Return a bytes object. If *size* is not given, it
reads all data available from the socket until EOF; as such the method will not return until
the socket is closed. This function tries to read as much data as
requested (no "short reads"). This may be not possible with
non-blocking socket though, and then less data will be returned.
"""
...
def close(self) -> Incomplete:
"""
Mark the socket closed and release all resources. Once that happens, all future operations
on the socket object will fail. The remote end will receive EOF indication if
supported by protocol.
Sockets are automatically closed when they are garbage-collected, but it is recommended
to `close()` them explicitly as soon you finished working with them.
"""
...
def connect(self, address) -> None:
"""
Connect to a remote socket at *address*.
"""
...
def send(self, bytes) -> int:
"""
Send data to the socket. The socket must be connected to a remote socket.
Returns number of bytes sent, which may be smaller than the length of data
("short write").
"""
...
def bind(self, address) -> Incomplete:
"""
Bind the socket to *address*. The socket must not already be bound.
"""
...
def accept(self) -> Tuple:
"""
Accept a connection. The socket must be bound to an address and listening for connections.
The return value is a pair (conn, address) where conn is a new socket object usable to send
and receive data on the connection, and address is the address bound to the socket on the
other end of the connection.
"""
...
def write(self, buf) -> int:
"""
Write the buffer of bytes to the socket. This function will try to
write all data to a socket (no "short writes"). This may be not possible
with a non-blocking socket though, and returned value will be less than
the length of *buf*.
Return value: number of bytes written.
"""
...
def __init__(self, af=AF_INET, type=SOCK_STREAM, proto=IPPROTO_TCP, /) -> None: ...

74
.vscode/Pico-W-Stub/ssl.pyi vendored Normal file
View File

@@ -0,0 +1,74 @@
"""
TLS/SSL wrapper for socket objects.
MicroPython module: https://docs.micropython.org/en/v1.21.0/library/ssl.html
CPython module: :mod:`python:ssl` https://docs.python.org/3/library/ssl.html .
This module provides access to Transport Layer Security (previously and
widely known as “Secure Sockets Layer”) encryption and peer authentication
facilities for network sockets, both client-side and server-side.
"""
from _typeshed import Incomplete, Incomplete as Incomplete
from stdlib.ssl import *
from typing import IO
CERT_REQUIRED: int
PROTOCOL_TLS_CLIENT: int
PROTOCOL_TLS_SERVER: int
CERT_OPTIONAL: int
CERT_NONE: int
def wrap_socket(
sock, server_side=False, keyfile=None, certfile=None, cert_reqs=None, cadata=None, server_hostname=None, do_handshake=True
) -> IO:
"""
Wrap the given *sock* and return a new wrapped-socket object. The implementation
of this function is to first create an `SSLContext` and then call the `SSLContext.wrap_socket`
method on that context object. The arguments *sock*, *server_side* and *server_hostname* are
passed through unchanged to the method call. The argument *do_handshake* is passed through as
*do_handshake_on_connect*. The remaining arguments have the following behaviour:
- *cert_reqs* determines whether the peer (server or client) must present a valid certificate.
Note that for mbedtls based ports, ``ssl.CERT_NONE`` and ``ssl.CERT_OPTIONAL`` will not
validate any certificate, only ``ssl.CERT_REQUIRED`` will.
- *cadata* is a bytes object containing the CA certificate chain (in DER format) that will
validate the peer's certificate. Currently only a single DER-encoded certificate is supported.
Depending on the underlying module implementation in a particular
:term:`MicroPython port`, some or all keyword arguments above may be not supported.
"""
...
class SSLContext:
"""
Create a new SSLContext instance. The *protocol* argument must be one of the ``PROTOCOL_*``
constants.
"""
def wrap_socket(self, sock, *, server_side=False, do_handshake_on_connect=True, server_hostname=None) -> Incomplete:
"""
Takes a `stream` *sock* (usually socket.socket instance of ``SOCK_STREAM`` type),
and returns an instance of ssl.SSLSocket, wrapping the underlying stream.
The returned object has the usual `stream` interface methods like
``read()``, ``write()``, etc.
- *server_side* selects whether the wrapped socket is on the server or client side.
A server-side SSL socket should be created from a normal socket returned from
:meth:`~socket.socket.accept()` on a non-SSL listening server socket.
- *do_handshake_on_connect* determines whether the handshake is done as part of the ``wrap_socket``
or whether it is deferred to be done as part of the initial reads or writes
For blocking sockets doing the handshake immediately is standard. For non-blocking
sockets (i.e. when the *sock* passed into ``wrap_socket`` is in non-blocking mode)
the handshake should generally be deferred because otherwise ``wrap_socket`` blocks
until it completes. Note that in AXTLS the handshake can be deferred until the first
read or write but it then blocks until completion.
- *server_hostname* is for use as a client, and sets the hostname to check against the received
server certificate. It also sets the name for Server Name Indication (SNI), allowing the server
to present the proper certificate.
"""
...
def __init__(self, protocol, /) -> None: ...

View File

@@ -0,0 +1,36 @@
from typing_extensions import TypeAlias
_VersionInfo: TypeAlias = tuple[int, int, int, str, int]
class _Feature:
def __init__(self, optionalRelease: _VersionInfo, mandatoryRelease: _VersionInfo | None, compiler_flag: int) -> None: ...
def getOptionalRelease(self) -> _VersionInfo: ...
def getMandatoryRelease(self) -> _VersionInfo | None: ...
compiler_flag: int
absolute_import: _Feature
division: _Feature
generators: _Feature
nested_scopes: _Feature
print_function: _Feature
unicode_literals: _Feature
with_statement: _Feature
barry_as_FLUFL: _Feature
generator_stop: _Feature
annotations: _Feature
all_feature_names: list[str] # undocumented
__all__ = [
"all_feature_names",
"absolute_import",
"division",
"generators",
"nested_scopes",
"print_function",
"unicode_literals",
"with_statement",
"barry_as_FLUFL",
"generator_stop",
"annotations",
]

573
.vscode/Pico-W-Stub/stdlib/_ast.pyi vendored Normal file
View File

@@ -0,0 +1,573 @@
import sys
from typing import Any, ClassVar
from typing_extensions import Literal, TypeAlias
PyCF_ONLY_AST: Literal[1024]
if sys.version_info >= (3, 8):
PyCF_TYPE_COMMENTS: Literal[4096]
PyCF_ALLOW_TOP_LEVEL_AWAIT: Literal[8192]
_Identifier: TypeAlias = str
class AST:
if sys.version_info >= (3, 10):
__match_args__ = ()
_attributes: ClassVar[tuple[str, ...]]
_fields: ClassVar[tuple[str, ...]]
def __init__(self, *args: Any, **kwargs: Any) -> None: ...
# TODO: Not all nodes have all of the following attributes
lineno: int
col_offset: int
if sys.version_info >= (3, 8):
end_lineno: int | None
end_col_offset: int | None
type_comment: str | None
class mod(AST): ...
if sys.version_info >= (3, 8):
class type_ignore(AST): ...
class TypeIgnore(type_ignore):
if sys.version_info >= (3, 10):
__match_args__ = ("lineno", "tag")
tag: str
class FunctionType(mod):
if sys.version_info >= (3, 10):
__match_args__ = ("argtypes", "returns")
argtypes: list[expr]
returns: expr
class Module(mod):
if sys.version_info >= (3, 10):
__match_args__ = ("body", "type_ignores")
body: list[stmt]
if sys.version_info >= (3, 8):
type_ignores: list[TypeIgnore]
class Interactive(mod):
if sys.version_info >= (3, 10):
__match_args__ = ("body",)
body: list[stmt]
class Expression(mod):
if sys.version_info >= (3, 10):
__match_args__ = ("body",)
body: expr
class stmt(AST): ...
class FunctionDef(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
name: _Identifier
args: arguments
body: list[stmt]
decorator_list: list[expr]
returns: expr | None
class AsyncFunctionDef(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("name", "args", "body", "decorator_list", "returns", "type_comment")
name: _Identifier
args: arguments
body: list[stmt]
decorator_list: list[expr]
returns: expr | None
class ClassDef(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("name", "bases", "keywords", "body", "decorator_list")
name: _Identifier
bases: list[expr]
keywords: list[keyword]
body: list[stmt]
decorator_list: list[expr]
class Return(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr | None
class Delete(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("targets",)
targets: list[expr]
class Assign(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("targets", "value", "type_comment")
targets: list[expr]
value: expr
class AugAssign(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "op", "value")
target: expr
op: operator
value: expr
class AnnAssign(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "annotation", "value", "simple")
target: expr
annotation: expr
value: expr | None
simple: int
class For(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
target: expr
iter: expr
body: list[stmt]
orelse: list[stmt]
class AsyncFor(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "iter", "body", "orelse", "type_comment")
target: expr
iter: expr
body: list[stmt]
orelse: list[stmt]
class While(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("test", "body", "orelse")
test: expr
body: list[stmt]
orelse: list[stmt]
class If(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("test", "body", "orelse")
test: expr
body: list[stmt]
orelse: list[stmt]
class With(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("items", "body", "type_comment")
items: list[withitem]
body: list[stmt]
class AsyncWith(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("items", "body", "type_comment")
items: list[withitem]
body: list[stmt]
class Raise(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("exc", "cause")
exc: expr | None
cause: expr | None
class Try(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("body", "handlers", "orelse", "finalbody")
body: list[stmt]
handlers: list[ExceptHandler]
orelse: list[stmt]
finalbody: list[stmt]
if sys.version_info >= (3, 11):
class TryStar(stmt):
__match_args__ = ("body", "handlers", "orelse", "finalbody")
body: list[stmt]
handlers: list[ExceptHandler]
orelse: list[stmt]
finalbody: list[stmt]
class Assert(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("test", "msg")
test: expr
msg: expr | None
class Import(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("names",)
names: list[alias]
class ImportFrom(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("module", "names", "level")
module: str | None
names: list[alias]
level: int
class Global(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("names",)
names: list[_Identifier]
class Nonlocal(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("names",)
names: list[_Identifier]
class Expr(stmt):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr
class Pass(stmt): ...
class Break(stmt): ...
class Continue(stmt): ...
class expr(AST): ...
class BoolOp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("op", "values")
op: boolop
values: list[expr]
class BinOp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("left", "op", "right")
left: expr
op: operator
right: expr
class UnaryOp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("op", "operand")
op: unaryop
operand: expr
class Lambda(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("args", "body")
args: arguments
body: expr
class IfExp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("test", "body", "orelse")
test: expr
body: expr
orelse: expr
class Dict(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("keys", "values")
keys: list[expr | None]
values: list[expr]
class Set(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elts",)
elts: list[expr]
class ListComp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elt", "generators")
elt: expr
generators: list[comprehension]
class SetComp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elt", "generators")
elt: expr
generators: list[comprehension]
class DictComp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("key", "value", "generators")
key: expr
value: expr
generators: list[comprehension]
class GeneratorExp(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elt", "generators")
elt: expr
generators: list[comprehension]
class Await(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr
class Yield(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr | None
class YieldFrom(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value",)
value: expr
class Compare(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("left", "ops", "comparators")
left: expr
ops: list[cmpop]
comparators: list[expr]
class Call(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("func", "args", "keywords")
func: expr
args: list[expr]
keywords: list[keyword]
class FormattedValue(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value", "conversion", "format_spec")
value: expr
conversion: int
format_spec: expr | None
class JoinedStr(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("values",)
values: list[expr]
if sys.version_info < (3, 8):
class Num(expr): # Deprecated in 3.8; use Constant
n: complex
class Str(expr): # Deprecated in 3.8; use Constant
s: str
class Bytes(expr): # Deprecated in 3.8; use Constant
s: bytes
class NameConstant(expr): # Deprecated in 3.8; use Constant
value: Any
class Ellipsis(expr): ... # Deprecated in 3.8; use Constant
class Constant(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value", "kind")
value: Any # None, str, bytes, bool, int, float, complex, Ellipsis
kind: str | None
# Aliases for value, for backwards compatibility
s: Any
n: complex
if sys.version_info >= (3, 8):
class NamedExpr(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "value")
target: expr
value: expr
class Attribute(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value", "attr", "ctx")
value: expr
attr: _Identifier
ctx: expr_context
if sys.version_info >= (3, 9):
_Slice: TypeAlias = expr
else:
class slice(AST): ...
_Slice: TypeAlias = slice
class Slice(_Slice):
if sys.version_info >= (3, 10):
__match_args__ = ("lower", "upper", "step")
lower: expr | None
upper: expr | None
step: expr | None
if sys.version_info < (3, 9):
class ExtSlice(slice):
dims: list[slice]
class Index(slice):
value: expr
class Subscript(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value", "slice", "ctx")
value: expr
slice: _Slice
ctx: expr_context
class Starred(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("value", "ctx")
value: expr
ctx: expr_context
class Name(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("id", "ctx")
id: _Identifier
ctx: expr_context
class List(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elts", "ctx")
elts: list[expr]
ctx: expr_context
class Tuple(expr):
if sys.version_info >= (3, 10):
__match_args__ = ("elts", "ctx")
elts: list[expr]
ctx: expr_context
if sys.version_info >= (3, 9):
dims: list[expr]
class expr_context(AST): ...
if sys.version_info < (3, 9):
class AugLoad(expr_context): ...
class AugStore(expr_context): ...
class Param(expr_context): ...
class Suite(mod):
body: list[stmt]
class Del(expr_context): ...
class Load(expr_context): ...
class Store(expr_context): ...
class boolop(AST): ...
class And(boolop): ...
class Or(boolop): ...
class operator(AST): ...
class Add(operator): ...
class BitAnd(operator): ...
class BitOr(operator): ...
class BitXor(operator): ...
class Div(operator): ...
class FloorDiv(operator): ...
class LShift(operator): ...
class Mod(operator): ...
class Mult(operator): ...
class MatMult(operator): ...
class Pow(operator): ...
class RShift(operator): ...
class Sub(operator): ...
class unaryop(AST): ...
class Invert(unaryop): ...
class Not(unaryop): ...
class UAdd(unaryop): ...
class USub(unaryop): ...
class cmpop(AST): ...
class Eq(cmpop): ...
class Gt(cmpop): ...
class GtE(cmpop): ...
class In(cmpop): ...
class Is(cmpop): ...
class IsNot(cmpop): ...
class Lt(cmpop): ...
class LtE(cmpop): ...
class NotEq(cmpop): ...
class NotIn(cmpop): ...
class comprehension(AST):
if sys.version_info >= (3, 10):
__match_args__ = ("target", "iter", "ifs", "is_async")
target: expr
iter: expr
ifs: list[expr]
is_async: int
class excepthandler(AST): ...
class ExceptHandler(excepthandler):
if sys.version_info >= (3, 10):
__match_args__ = ("type", "name", "body")
type: expr | None
name: _Identifier | None
body: list[stmt]
class arguments(AST):
if sys.version_info >= (3, 10):
__match_args__ = ("posonlyargs", "args", "vararg", "kwonlyargs", "kw_defaults", "kwarg", "defaults")
if sys.version_info >= (3, 8):
posonlyargs: list[arg]
args: list[arg]
vararg: arg | None
kwonlyargs: list[arg]
kw_defaults: list[expr | None]
kwarg: arg | None
defaults: list[expr]
class arg(AST):
if sys.version_info >= (3, 10):
__match_args__ = ("arg", "annotation", "type_comment")
arg: _Identifier
annotation: expr | None
class keyword(AST):
if sys.version_info >= (3, 10):
__match_args__ = ("arg", "value")
arg: _Identifier | None
value: expr
class alias(AST):
if sys.version_info >= (3, 10):
__match_args__ = ("name", "asname")
name: _Identifier
asname: _Identifier | None
class withitem(AST):
if sys.version_info >= (3, 10):
__match_args__ = ("context_expr", "optional_vars")
context_expr: expr
optional_vars: expr | None
if sys.version_info >= (3, 10):
class Match(stmt):
__match_args__ = ("subject", "cases")
subject: expr
cases: list[match_case]
class pattern(AST): ...
# Without the alias, Pyright complains variables named pattern are recursively defined
_Pattern: TypeAlias = pattern
class match_case(AST):
__match_args__ = ("pattern", "guard", "body")
pattern: _Pattern
guard: expr | None
body: list[stmt]
class MatchValue(pattern):
__match_args__ = ("value",)
value: expr
class MatchSingleton(pattern):
__match_args__ = ("value",)
value: Literal[True, False, None]
class MatchSequence(pattern):
__match_args__ = ("patterns",)
patterns: list[pattern]
class MatchStar(pattern):
__match_args__ = ("name",)
name: _Identifier | None
class MatchMapping(pattern):
__match_args__ = ("keys", "patterns", "rest")
keys: list[expr]
patterns: list[pattern]
rest: _Identifier | None
class MatchClass(pattern):
__match_args__ = ("cls", "patterns", "kwd_attrs", "kwd_patterns")
cls: expr
patterns: list[pattern]
kwd_attrs: list[_Identifier]
kwd_patterns: list[pattern]
class MatchAs(pattern):
__match_args__ = ("pattern", "name")
pattern: _Pattern | None
name: _Identifier | None
class MatchOr(pattern):
__match_args__ = ("patterns",)
patterns: list[pattern]

136
.vscode/Pico-W-Stub/stdlib/_codecs.pyi vendored Normal file
View File

@@ -0,0 +1,136 @@
import codecs
import sys
from _typeshed import ReadableBuffer
from collections.abc import Callable
from typing import overload
from typing_extensions import Literal, TypeAlias
# This type is not exposed; it is defined in unicodeobject.c
class _EncodingMap:
def size(self) -> int: ...
_CharMap: TypeAlias = dict[int, int] | _EncodingMap
_Handler: TypeAlias = Callable[[UnicodeError], tuple[str | bytes, int]]
_SearchFunction: TypeAlias = Callable[[str], codecs.CodecInfo | None]
def register(__search_function: _SearchFunction) -> None: ...
if sys.version_info >= (3, 10):
def unregister(__search_function: _SearchFunction) -> None: ...
def register_error(__errors: str, __handler: _Handler) -> None: ...
def lookup_error(__name: str) -> _Handler: ...
# The type ignore on `encode` and `decode` is to avoid issues with overlapping overloads, for more details, see #300
# https://docs.python.org/3/library/codecs.html#binary-transforms
_BytesToBytesEncoding: TypeAlias = Literal[
"base64",
"base_64",
"base64_codec",
"bz2",
"bz2_codec",
"hex",
"hex_codec",
"quopri",
"quotedprintable",
"quoted_printable",
"quopri_codec",
"uu",
"uu_codec",
"zip",
"zlib",
"zlib_codec",
]
# https://docs.python.org/3/library/codecs.html#text-transforms
_StrToStrEncoding: TypeAlias = Literal["rot13", "rot_13"]
@overload
def encode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ...
@overload
def encode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ... # type: ignore[misc]
@overload
def encode(obj: str, encoding: str = ..., errors: str = ...) -> bytes: ...
@overload
def decode(obj: ReadableBuffer, encoding: _BytesToBytesEncoding, errors: str = ...) -> bytes: ... # type: ignore[misc]
@overload
def decode(obj: str, encoding: _StrToStrEncoding, errors: str = ...) -> str: ...
# these are documented as text encodings but in practice they also accept str as input
@overload
def decode(
obj: str, encoding: Literal["unicode_escape", "unicode-escape", "raw_unicode_escape", "raw-unicode-escape"], errors: str = ...
) -> str: ...
# hex is officially documented as a bytes to bytes encoding, but it appears to also work with str
@overload
def decode(obj: str, encoding: Literal["hex", "hex_codec"], errors: str = ...) -> bytes: ...
@overload
def decode(obj: ReadableBuffer, encoding: str = ..., errors: str = ...) -> str: ...
def lookup(__encoding: str) -> codecs.CodecInfo: ...
def charmap_build(__map: str) -> _CharMap: ...
def ascii_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ...
def ascii_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
def charmap_decode(__data: ReadableBuffer, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[str, int]: ...
def charmap_encode(__str: str, __errors: str | None = ..., __mapping: _CharMap | None = ...) -> tuple[bytes, int]: ...
def escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ...
def escape_encode(__data: bytes, __errors: str | None = ...) -> tuple[bytes, int]: ...
def latin_1_decode(__data: ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ...
def latin_1_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
if sys.version_info >= (3, 9):
def raw_unicode_escape_decode(
__data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ...
) -> tuple[str, int]: ...
else:
def raw_unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ...
def raw_unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
def readbuffer_encode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ...
if sys.version_info >= (3, 9):
def unicode_escape_decode(
__data: str | ReadableBuffer, __errors: str | None = ..., __final: bool = ...
) -> tuple[str, int]: ...
else:
def unicode_escape_decode(__data: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ...
def unicode_escape_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
if sys.version_info < (3, 8):
def unicode_internal_decode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[str, int]: ...
def unicode_internal_encode(__obj: str | ReadableBuffer, __errors: str | None = ...) -> tuple[bytes, int]: ...
def utf_16_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def utf_16_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
def utf_16_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def utf_16_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ...
def utf_16_ex_decode(
__data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ...
) -> tuple[str, int, int]: ...
def utf_16_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def utf_16_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
def utf_32_be_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def utf_32_be_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
def utf_32_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def utf_32_encode(__str: str, __errors: str | None = ..., __byteorder: int = ...) -> tuple[bytes, int]: ...
def utf_32_ex_decode(
__data: ReadableBuffer, __errors: str | None = ..., __byteorder: int = ..., __final: int = ...
) -> tuple[str, int, int]: ...
def utf_32_le_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def utf_32_le_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
def utf_7_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def utf_7_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
def utf_8_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def utf_8_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
if sys.platform == "win32":
def mbcs_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def mbcs_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
def code_page_decode(
__codepage: int, __data: ReadableBuffer, __errors: str | None = ..., __final: int = ...
) -> tuple[str, int]: ...
def code_page_encode(__code_page: int, __str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...
def oem_decode(__data: ReadableBuffer, __errors: str | None = ..., __final: int = ...) -> tuple[str, int]: ...
def oem_encode(__str: str, __errors: str | None = ...) -> tuple[bytes, int]: ...

View File

@@ -0,0 +1,81 @@
import sys
from types import MappingProxyType
from typing import ( # noqa: Y027,Y038
AbstractSet as Set,
AsyncGenerator as AsyncGenerator,
AsyncIterable as AsyncIterable,
AsyncIterator as AsyncIterator,
Awaitable as Awaitable,
ByteString as ByteString,
Callable as Callable,
Collection as Collection,
Container as Container,
Coroutine as Coroutine,
Generator as Generator,
Generic,
Hashable as Hashable,
ItemsView as ItemsView,
Iterable as Iterable,
Iterator as Iterator,
KeysView as KeysView,
Mapping as Mapping,
MappingView as MappingView,
MutableMapping as MutableMapping,
MutableSequence as MutableSequence,
MutableSet as MutableSet,
Reversible as Reversible,
Sequence as Sequence,
Sized as Sized,
TypeVar,
ValuesView as ValuesView,
)
from typing_extensions import final
__all__ = [
"Awaitable",
"Coroutine",
"AsyncIterable",
"AsyncIterator",
"AsyncGenerator",
"Hashable",
"Iterable",
"Iterator",
"Generator",
"Reversible",
"Sized",
"Container",
"Callable",
"Collection",
"Set",
"MutableSet",
"Mapping",
"MutableMapping",
"MappingView",
"KeysView",
"ItemsView",
"ValuesView",
"Sequence",
"MutableSequence",
"ByteString",
]
_KT_co = TypeVar("_KT_co", covariant=True) # Key type covariant containers.
_VT_co = TypeVar("_VT_co", covariant=True) # Value type covariant containers.
@final
class dict_keys(KeysView[_KT_co], Generic[_KT_co, _VT_co]): # undocumented
if sys.version_info >= (3, 10):
@property
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
@final
class dict_values(ValuesView[_VT_co], Generic[_KT_co, _VT_co]): # undocumented
if sys.version_info >= (3, 10):
@property
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...
@final
class dict_items(ItemsView[_KT_co, _VT_co], Generic[_KT_co, _VT_co]): # undocumented
if sys.version_info >= (3, 10):
@property
def mapping(self) -> MappingProxyType[_KT_co, _VT_co]: ...

281
.vscode/Pico-W-Stub/stdlib/_decimal.pyi vendored Normal file
View File

@@ -0,0 +1,281 @@
import numbers
import sys
from collections.abc import Container, Sequence
from types import TracebackType
from typing import Any, ClassVar, NamedTuple, overload
from typing_extensions import Final, Literal, Self, TypeAlias
_Decimal: TypeAlias = Decimal | int
_DecimalNew: TypeAlias = Decimal | float | str | tuple[int, Sequence[int], int]
_ComparableNum: TypeAlias = Decimal | float | numbers.Rational
__version__: Final[str]
__libmpdec_version__: Final[str]
class DecimalTuple(NamedTuple):
sign: int
digits: tuple[int, ...]
exponent: int | Literal["n", "N", "F"]
ROUND_DOWN: str
ROUND_HALF_UP: str
ROUND_HALF_EVEN: str
ROUND_CEILING: str
ROUND_FLOOR: str
ROUND_UP: str
ROUND_HALF_DOWN: str
ROUND_05UP: str
HAVE_CONTEXTVAR: bool
HAVE_THREADS: bool
MAX_EMAX: int
MAX_PREC: int
MIN_EMIN: int
MIN_ETINY: int
class DecimalException(ArithmeticError): ...
class Clamped(DecimalException): ...
class InvalidOperation(DecimalException): ...
class ConversionSyntax(InvalidOperation): ...
class DivisionByZero(DecimalException, ZeroDivisionError): ...
class DivisionImpossible(InvalidOperation): ...
class DivisionUndefined(InvalidOperation, ZeroDivisionError): ...
class Inexact(DecimalException): ...
class InvalidContext(InvalidOperation): ...
class Rounded(DecimalException): ...
class Subnormal(DecimalException): ...
class Overflow(Inexact, Rounded): ...
class Underflow(Inexact, Rounded, Subnormal): ...
class FloatOperation(DecimalException, TypeError): ...
def setcontext(__context: Context) -> None: ...
def getcontext() -> Context: ...
if sys.version_info >= (3, 11):
def localcontext(
ctx: Context | None = None,
*,
prec: int | None = ...,
rounding: str | None = ...,
Emin: int | None = ...,
Emax: int | None = ...,
capitals: int | None = ...,
clamp: int | None = ...,
traps: dict[_TrapType, bool] | None = ...,
flags: dict[_TrapType, bool] | None = ...,
) -> _ContextManager: ...
else:
def localcontext(ctx: Context | None = None) -> _ContextManager: ...
class Decimal:
def __new__(cls, value: _DecimalNew = ..., context: Context | None = ...) -> Self: ...
@classmethod
def from_float(cls, __f: float) -> Self: ...
def __bool__(self) -> bool: ...
def compare(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def __hash__(self) -> int: ...
def as_tuple(self) -> DecimalTuple: ...
def as_integer_ratio(self) -> tuple[int, int]: ...
def to_eng_string(self, context: Context | None = None) -> str: ...
def __abs__(self) -> Decimal: ...
def __add__(self, __value: _Decimal) -> Decimal: ...
def __divmod__(self, __value: _Decimal) -> tuple[Decimal, Decimal]: ...
def __eq__(self, __value: object) -> bool: ...
def __floordiv__(self, __value: _Decimal) -> Decimal: ...
def __ge__(self, __value: _ComparableNum) -> bool: ...
def __gt__(self, __value: _ComparableNum) -> bool: ...
def __le__(self, __value: _ComparableNum) -> bool: ...
def __lt__(self, __value: _ComparableNum) -> bool: ...
def __mod__(self, __value: _Decimal) -> Decimal: ...
def __mul__(self, __value: _Decimal) -> Decimal: ...
def __neg__(self) -> Decimal: ...
def __pos__(self) -> Decimal: ...
def __pow__(self, __value: _Decimal, __mod: _Decimal | None = None) -> Decimal: ...
def __radd__(self, __value: _Decimal) -> Decimal: ...
def __rdivmod__(self, __value: _Decimal) -> tuple[Decimal, Decimal]: ...
def __rfloordiv__(self, __value: _Decimal) -> Decimal: ...
def __rmod__(self, __value: _Decimal) -> Decimal: ...
def __rmul__(self, __value: _Decimal) -> Decimal: ...
def __rsub__(self, __value: _Decimal) -> Decimal: ...
def __rtruediv__(self, __value: _Decimal) -> Decimal: ...
def __sub__(self, __value: _Decimal) -> Decimal: ...
def __truediv__(self, __value: _Decimal) -> Decimal: ...
def remainder_near(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def __float__(self) -> float: ...
def __int__(self) -> int: ...
def __trunc__(self) -> int: ...
@property
def real(self) -> Decimal: ...
@property
def imag(self) -> Decimal: ...
def conjugate(self) -> Decimal: ...
def __complex__(self) -> complex: ...
@overload
def __round__(self) -> int: ...
@overload
def __round__(self, __ndigits: int) -> Decimal: ...
def __floor__(self) -> int: ...
def __ceil__(self) -> int: ...
def fma(self, other: _Decimal, third: _Decimal, context: Context | None = None) -> Decimal: ...
def __rpow__(self, __value: _Decimal, __mod: Context | None = None) -> Decimal: ...
def normalize(self, context: Context | None = None) -> Decimal: ...
def quantize(self, exp: _Decimal, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
def same_quantum(self, other: _Decimal, context: Context | None = None) -> bool: ...
def to_integral_exact(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
def to_integral_value(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
def to_integral(self, rounding: str | None = None, context: Context | None = None) -> Decimal: ...
def sqrt(self, context: Context | None = None) -> Decimal: ...
def max(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def min(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def adjusted(self) -> int: ...
def canonical(self) -> Decimal: ...
def compare_signal(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def compare_total(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def compare_total_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def copy_abs(self) -> Decimal: ...
def copy_negate(self) -> Decimal: ...
def copy_sign(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def exp(self, context: Context | None = None) -> Decimal: ...
def is_canonical(self) -> bool: ...
def is_finite(self) -> bool: ...
def is_infinite(self) -> bool: ...
def is_nan(self) -> bool: ...
def is_normal(self, context: Context | None = None) -> bool: ...
def is_qnan(self) -> bool: ...
def is_signed(self) -> bool: ...
def is_snan(self) -> bool: ...
def is_subnormal(self, context: Context | None = None) -> bool: ...
def is_zero(self) -> bool: ...
def ln(self, context: Context | None = None) -> Decimal: ...
def log10(self, context: Context | None = None) -> Decimal: ...
def logb(self, context: Context | None = None) -> Decimal: ...
def logical_and(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def logical_invert(self, context: Context | None = None) -> Decimal: ...
def logical_or(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def logical_xor(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def max_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def min_mag(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def next_minus(self, context: Context | None = None) -> Decimal: ...
def next_plus(self, context: Context | None = None) -> Decimal: ...
def next_toward(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def number_class(self, context: Context | None = None) -> str: ...
def radix(self) -> Decimal: ...
def rotate(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def scaleb(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def shift(self, other: _Decimal, context: Context | None = None) -> Decimal: ...
def __reduce__(self) -> tuple[type[Self], tuple[str]]: ...
def __copy__(self) -> Self: ...
def __deepcopy__(self, __memo: Any) -> Self: ...
def __format__(self, __specifier: str, __context: Context | None = ...) -> str: ...
class _ContextManager:
new_context: Context
saved_context: Context
def __init__(self, new_context: Context) -> None: ...
def __enter__(self) -> Context: ...
def __exit__(self, t: type[BaseException] | None, v: BaseException | None, tb: TracebackType | None) -> None: ...
_TrapType: TypeAlias = type[DecimalException]
class Context:
# TODO: Context doesn't allow you to delete *any* attributes from instances of the class at runtime,
# even settable attributes like `prec` and `rounding`,
# but that's inexpressable in the stub.
# Type checkers either ignore it or misinterpret it
# if you add a `def __delattr__(self, __name: str) -> NoReturn` method to the stub
prec: int
rounding: str
Emin: int
Emax: int
capitals: int
clamp: int
traps: dict[_TrapType, bool]
flags: dict[_TrapType, bool]
def __init__(
self,
prec: int | None = ...,
rounding: str | None = ...,
Emin: int | None = ...,
Emax: int | None = ...,
capitals: int | None = ...,
clamp: int | None = ...,
flags: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
traps: None | dict[_TrapType, bool] | Container[_TrapType] = ...,
_ignored_flags: list[_TrapType] | None = ...,
) -> None: ...
def __reduce__(self) -> tuple[type[Self], tuple[Any, ...]]: ...
def clear_flags(self) -> None: ...
def clear_traps(self) -> None: ...
def copy(self) -> Context: ...
def __copy__(self) -> Context: ...
# see https://github.com/python/cpython/issues/94107
__hash__: ClassVar[None] # type: ignore[assignment]
def Etiny(self) -> int: ...
def Etop(self) -> int: ...
def create_decimal(self, __num: _DecimalNew = "0") -> Decimal: ...
def create_decimal_from_float(self, __f: float) -> Decimal: ...
def abs(self, __x: _Decimal) -> Decimal: ...
def add(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def canonical(self, __x: Decimal) -> Decimal: ...
def compare(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def compare_signal(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def compare_total(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def compare_total_mag(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def copy_abs(self, __x: _Decimal) -> Decimal: ...
def copy_decimal(self, __x: _Decimal) -> Decimal: ...
def copy_negate(self, __x: _Decimal) -> Decimal: ...
def copy_sign(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def divide(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def divide_int(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def divmod(self, __x: _Decimal, __y: _Decimal) -> tuple[Decimal, Decimal]: ...
def exp(self, __x: _Decimal) -> Decimal: ...
def fma(self, __x: _Decimal, __y: _Decimal, __z: _Decimal) -> Decimal: ...
def is_canonical(self, __x: _Decimal) -> bool: ...
def is_finite(self, __x: _Decimal) -> bool: ...
def is_infinite(self, __x: _Decimal) -> bool: ...
def is_nan(self, __x: _Decimal) -> bool: ...
def is_normal(self, __x: _Decimal) -> bool: ...
def is_qnan(self, __x: _Decimal) -> bool: ...
def is_signed(self, __x: _Decimal) -> bool: ...
def is_snan(self, __x: _Decimal) -> bool: ...
def is_subnormal(self, __x: _Decimal) -> bool: ...
def is_zero(self, __x: _Decimal) -> bool: ...
def ln(self, __x: _Decimal) -> Decimal: ...
def log10(self, __x: _Decimal) -> Decimal: ...
def logb(self, __x: _Decimal) -> Decimal: ...
def logical_and(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def logical_invert(self, __x: _Decimal) -> Decimal: ...
def logical_or(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def logical_xor(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def max(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def max_mag(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def min(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def min_mag(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def minus(self, __x: _Decimal) -> Decimal: ...
def multiply(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def next_minus(self, __x: _Decimal) -> Decimal: ...
def next_plus(self, __x: _Decimal) -> Decimal: ...
def next_toward(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def normalize(self, __x: _Decimal) -> Decimal: ...
def number_class(self, __x: _Decimal) -> str: ...
def plus(self, __x: _Decimal) -> Decimal: ...
def power(self, a: _Decimal, b: _Decimal, modulo: _Decimal | None = None) -> Decimal: ...
def quantize(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def radix(self) -> Decimal: ...
def remainder(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def remainder_near(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def rotate(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def same_quantum(self, __x: _Decimal, __y: _Decimal) -> bool: ...
def scaleb(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def shift(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def sqrt(self, __x: _Decimal) -> Decimal: ...
def subtract(self, __x: _Decimal, __y: _Decimal) -> Decimal: ...
def to_eng_string(self, __x: _Decimal) -> str: ...
def to_sci_string(self, __x: _Decimal) -> str: ...
def to_integral_exact(self, __x: _Decimal) -> Decimal: ...
def to_integral_value(self, __x: _Decimal) -> Decimal: ...
def to_integral(self, __x: _Decimal) -> Decimal: ...
DefaultContext: Context
BasicContext: Context
ExtendedContext: Context

View File

@@ -0,0 +1,318 @@
# Utility types for typeshed
#
# See the README.md file in this directory for more information.
import sys
from collections.abc import Awaitable, Callable, Iterable, Sequence
from collections.abc import Set as AbstractSet
from collections.abc import Sized
from dataclasses import Field
from os import PathLike
from types import FrameType, TracebackType
from typing import Any, AnyStr, ClassVar, Generic, Protocol, TypeVar, overload
from typing_extensions import (Buffer, Final, Literal, LiteralString,
TypeAlias, final)
_KT = TypeVar("_KT")
_KT_co = TypeVar("_KT_co", covariant=True)
_KT_contra = TypeVar("_KT_contra", contravariant=True)
_VT = TypeVar("_VT")
_VT_co = TypeVar("_VT_co", covariant=True)
_T = TypeVar("_T")
_T_co = TypeVar("_T_co", covariant=True)
_T_contra = TypeVar("_T_contra", contravariant=True)
# Use for "self" annotations:
# def __enter__(self: Self) -> Self: ...
Self = TypeVar("Self") # noqa: Y001
# covariant version of typing.AnyStr, useful for protocols
AnyStr_co = TypeVar("AnyStr_co", str, bytes, covariant=True) # noqa: Y001
# For partially known annotations. Usually, fields where type annotations
# haven't been added are left unannotated, but in some situations this
# isn't possible or a type is already partially known. In cases like these,
# use Incomplete instead of Any as a marker. For example, use
# "Incomplete | None" instead of "Any | None".
Incomplete: TypeAlias = Any
# To describe a function parameter that is unused and will work with anything.
Unused: TypeAlias = object
# Used to mark arguments that default to a sentinel value. This prevents
# stubtest from complaining about the default value not matching.
#
# def foo(x: int | None = sentinel) -> None: ...
#
# In cases where the sentinel object is exported and can be used by user code,
# a construct like this is better:
#
# _SentinelType = NewType("_SentinelType", object)
# sentinel: _SentinelType
# def foo(x: int | None | _SentinelType = ...) -> None: ...
sentinel = Any # noqa: Y026
# stable
class IdentityFunction(Protocol):
def __call__(self, __x: _T) -> _T: ...
# stable
class SupportsNext(Protocol[_T_co]):
def __next__(self) -> _T_co: ...
# stable
class SupportsAnext(Protocol[_T_co]):
def __anext__(self) -> Awaitable[_T_co]: ...
# Comparison protocols
class SupportsDunderLT(Protocol[_T_contra]):
def __lt__(self, __other: _T_contra) -> bool: ...
class SupportsDunderGT(Protocol[_T_contra]):
def __gt__(self, __other: _T_contra) -> bool: ...
class SupportsDunderLE(Protocol[_T_contra]):
def __le__(self, __other: _T_contra) -> bool: ...
class SupportsDunderGE(Protocol[_T_contra]):
def __ge__(self, __other: _T_contra) -> bool: ...
class SupportsAllComparisons(
SupportsDunderLT[Any], SupportsDunderGT[Any], SupportsDunderLE[Any], SupportsDunderGE[Any], Protocol
): ...
SupportsRichComparison: TypeAlias = SupportsDunderLT[Any] | SupportsDunderGT[Any]
SupportsRichComparisonT = TypeVar("SupportsRichComparisonT", bound=SupportsRichComparison) # noqa: Y001
# Dunder protocols
class SupportsAdd(Protocol[_T_contra, _T_co]):
def __add__(self, __x: _T_contra) -> _T_co: ...
class SupportsRAdd(Protocol[_T_contra, _T_co]):
def __radd__(self, __x: _T_contra) -> _T_co: ...
class SupportsSub(Protocol[_T_contra, _T_co]):
def __sub__(self, __x: _T_contra) -> _T_co: ...
class SupportsRSub(Protocol[_T_contra, _T_co]):
def __rsub__(self, __x: _T_contra) -> _T_co: ...
class SupportsDivMod(Protocol[_T_contra, _T_co]):
def __divmod__(self, __other: _T_contra) -> _T_co: ...
class SupportsRDivMod(Protocol[_T_contra, _T_co]):
def __rdivmod__(self, __other: _T_contra) -> _T_co: ...
# This protocol is generic over the iterator type, while Iterable is
# generic over the type that is iterated over.
class SupportsIter(Protocol[_T_co]):
def __iter__(self) -> _T_co: ...
# This protocol is generic over the iterator type, while AsyncIterable is
# generic over the type that is iterated over.
class SupportsAiter(Protocol[_T_co]):
def __aiter__(self) -> _T_co: ...
class SupportsLenAndGetItem(Protocol[_T_co]):
def __len__(self) -> int: ...
def __getitem__(self, __k: int) -> _T_co: ...
class SupportsTrunc(Protocol):
def __trunc__(self) -> int: ...
# Mapping-like protocols
# stable
class SupportsItems(Protocol[_KT_co, _VT_co]):
def items(self) -> AbstractSet[tuple[_KT_co, _VT_co]]: ...
# stable
class SupportsKeysAndGetItem(Protocol[_KT, _VT_co]):
def keys(self) -> Iterable[_KT]: ...
def __getitem__(self, __key: _KT) -> _VT_co: ...
# stable
class SupportsGetItem(Protocol[_KT_contra, _VT_co]):
def __contains__(self, __x: Any) -> bool: ...
def __getitem__(self, __key: _KT_contra) -> _VT_co: ...
# stable
class SupportsItemAccess(SupportsGetItem[_KT_contra, _VT], Protocol[_KT_contra, _VT]):
def __setitem__(self, __key: _KT_contra, __value: _VT) -> None: ...
def __delitem__(self, __key: _KT_contra) -> None: ...
StrPath: TypeAlias = str | PathLike[str] # stable
BytesPath: TypeAlias = bytes | PathLike[bytes] # stable
GenericPath: TypeAlias = AnyStr | PathLike[AnyStr]
StrOrBytesPath: TypeAlias = str | bytes | PathLike[str] | PathLike[bytes] # stable
OpenTextModeUpdating: TypeAlias = Literal[
"r+",
"+r",
"rt+",
"r+t",
"+rt",
"tr+",
"t+r",
"+tr",
"w+",
"+w",
"wt+",
"w+t",
"+wt",
"tw+",
"t+w",
"+tw",
"a+",
"+a",
"at+",
"a+t",
"+at",
"ta+",
"t+a",
"+ta",
"x+",
"+x",
"xt+",
"x+t",
"+xt",
"tx+",
"t+x",
"+tx",
]
OpenTextModeWriting: TypeAlias = Literal["w", "wt", "tw", "a", "at", "ta", "x", "xt", "tx"]
OpenTextModeReading: TypeAlias = Literal["r", "rt", "tr", "U", "rU", "Ur", "rtU", "rUt", "Urt", "trU", "tUr", "Utr"]
OpenTextMode: TypeAlias = OpenTextModeUpdating | OpenTextModeWriting | OpenTextModeReading
OpenBinaryModeUpdating: TypeAlias = Literal[
"rb+",
"r+b",
"+rb",
"br+",
"b+r",
"+br",
"wb+",
"w+b",
"+wb",
"bw+",
"b+w",
"+bw",
"ab+",
"a+b",
"+ab",
"ba+",
"b+a",
"+ba",
"xb+",
"x+b",
"+xb",
"bx+",
"b+x",
"+bx",
]
OpenBinaryModeWriting: TypeAlias = Literal["wb", "bw", "ab", "ba", "xb", "bx"]
OpenBinaryModeReading: TypeAlias = Literal["rb", "br", "rbU", "rUb", "Urb", "brU", "bUr", "Ubr"]
OpenBinaryMode: TypeAlias = OpenBinaryModeUpdating | OpenBinaryModeReading | OpenBinaryModeWriting
# stable
class HasFileno(Protocol):
def fileno(self) -> int: ...
FileDescriptor: TypeAlias = int # stable
FileDescriptorLike: TypeAlias = int | HasFileno # stable
FileDescriptorOrPath: TypeAlias = int | StrOrBytesPath
# stable
class SupportsRead(Protocol[_T_co]):
def read(self, __length: int = ...) -> _T_co: ...
# stable
class SupportsReadline(Protocol[_T_co]):
def readline(self, __length: int = ...) -> _T_co: ...
# stable
class SupportsNoArgReadline(Protocol[_T_co]):
def readline(self) -> _T_co: ...
# stable
class SupportsWrite(Protocol[_T_contra]):
def write(self, __s: _T_contra) -> object: ...
# Unfortunately PEP 688 does not allow us to distinguish read-only
# from writable buffers. We use these aliases for readability for now.
# Perhaps a future extension of the buffer protocol will allow us to
# distinguish these cases in the type system.
ReadOnlyBuffer: TypeAlias = Buffer # stable
# Anything that implements the read-write buffer interface.
WriteableBuffer: TypeAlias = Buffer
# Same as WriteableBuffer, but also includes read-only buffer types (like bytes).
ReadableBuffer: TypeAlias = Buffer # stable
class SliceableBuffer(Buffer, Protocol):
def __getitem__(self, __slice: slice) -> Sequence[int]: ...
class IndexableBuffer(Buffer, Protocol):
def __getitem__(self, __i: int) -> int: ...
class SupportsGetItemBuffer(SliceableBuffer, IndexableBuffer, Protocol):
def __contains__(self, __x: Any) -> bool: ...
@overload
def __getitem__(self, __slice: slice) -> Sequence[int]: ...
@overload
def __getitem__(self, __i: int) -> int: ...
class SizedBuffer(Sized, Buffer, Protocol): ...
# for compatibility with third-party stubs that may use this
_BufferWithLen: TypeAlias = SizedBuffer # not stable # noqa: Y047
ExcInfo: TypeAlias = tuple[type[BaseException], BaseException, TracebackType]
OptExcInfo: TypeAlias = ExcInfo | tuple[None, None, None]
# stable
if sys.version_info >= (3, 10):
from types import NoneType as NoneType
else:
# Used by type checkers for checks involving None (does not exist at runtime)
@final
class NoneType:
def __bool__(self) -> Literal[False]: ...
# This is an internal CPython type that is like, but subtly different from, a NamedTuple
# Subclasses of this type are found in multiple modules.
# In typeshed, `structseq` is only ever used as a mixin in combination with a fixed-length `Tuple`
# See discussion at #6546 & #6560
# `structseq` classes are unsubclassable, so are all decorated with `@final`.
class structseq(Generic[_T_co]):
n_fields: Final[int]
n_unnamed_fields: Final[int]
n_sequence_fields: Final[int]
# The first parameter will generally only take an iterable of a specific length.
# E.g. `os.uname_result` takes any iterable of length exactly 5.
#
# The second parameter will accept a dict of any kind without raising an exception,
# but only has any meaning if you supply it a dict where the keys are strings.
# https://github.com/python/typeshed/pull/6560#discussion_r767149830
def __new__(cls: type[Self], sequence: Iterable[_T_co], dict: dict[str, Any] = ...) -> Self: ...
# Superset of typing.AnyStr that also includes LiteralString
AnyOrLiteralStr = TypeVar("AnyOrLiteralStr", str, bytes, LiteralString) # noqa: Y001
# Represents when str or LiteralStr is acceptable. Useful for string processing
# APIs where literalness of return value depends on literalness of inputs
StrOrLiteralStr = TypeVar("StrOrLiteralStr", LiteralString, str) # noqa: Y001
# Objects suitable to be passed to sys.setprofile, threading.setprofile, and similar
ProfileFunction: TypeAlias = Callable[[FrameType, str, Any], object]
# Objects suitable to be passed to sys.settrace, threading.settrace, and similar
TraceFunction: TypeAlias = Callable[[FrameType, str, Any], TraceFunction | None]
# experimental
# Might not work as expected for pyright, see
# https://github.com/python/typeshed/pull/9362
# https://github.com/microsoft/pyright/issues/4339
class DataclassInstance(Protocol):
__dataclass_fields__: ClassVar[dict[str, Field[Any]]]

View File

@@ -0,0 +1,37 @@
# PEP 249 Database API 2.0 Types
# https://www.python.org/dev/peps/pep-0249/
from collections.abc import Mapping, Sequence
from typing import Any, Protocol
from typing_extensions import TypeAlias
DBAPITypeCode: TypeAlias = Any | None
# Strictly speaking, this should be a Sequence, but the type system does
# not support fixed-length sequences.
DBAPIColumnDescription: TypeAlias = tuple[str, DBAPITypeCode, int | None, int | None, int | None, int | None, bool | None]
class DBAPIConnection(Protocol):
def close(self) -> object: ...
def commit(self) -> object: ...
# optional:
# def rollback(self) -> Any: ...
def cursor(self) -> DBAPICursor: ...
class DBAPICursor(Protocol):
@property
def description(self) -> Sequence[DBAPIColumnDescription] | None: ...
@property
def rowcount(self) -> int: ...
# optional:
# def callproc(self, __procname: str, __parameters: Sequence[Any] = ...) -> Sequence[Any]: ...
def close(self) -> object: ...
def execute(self, __operation: str, __parameters: Sequence[Any] | Mapping[str, Any] = ...) -> object: ...
def executemany(self, __operation: str, __seq_of_parameters: Sequence[Sequence[Any]]) -> object: ...
def fetchone(self) -> Sequence[Any] | None: ...
def fetchmany(self, __size: int = ...) -> Sequence[Sequence[Any]]: ...
def fetchall(self) -> Sequence[Sequence[Any]]: ...
# optional:
# def nextset(self) -> None | Literal[True]: ...
arraysize: int
def setinputsizes(self, __sizes: Sequence[DBAPITypeCode | int | None]) -> object: ...
def setoutputsize(self, __size: int, __column: int = ...) -> object: ...

View File

@@ -0,0 +1,44 @@
# Types to support PEP 3333 (WSGI)
#
# Obsolete since Python 3.11: Use wsgiref.types instead.
#
# See the README.md file in this directory for more information.
import sys
from _typeshed import OptExcInfo
from collections.abc import Callable, Iterable, Iterator
from typing import Any, Protocol
from typing_extensions import TypeAlias
class _Readable(Protocol):
def read(self, size: int = ...) -> bytes: ...
# Optional: def close(self) -> object: ...
if sys.version_info >= (3, 11):
from wsgiref.types import *
else:
# stable
class StartResponse(Protocol):
def __call__(
self, __status: str, __headers: list[tuple[str, str]], __exc_info: OptExcInfo | None = ...
) -> Callable[[bytes], object]: ...
WSGIEnvironment: TypeAlias = dict[str, Any] # stable
WSGIApplication: TypeAlias = Callable[[WSGIEnvironment, StartResponse], Iterable[bytes]] # stable
# WSGI input streams per PEP 3333, stable
class InputStream(Protocol):
def read(self, __size: int = ...) -> bytes: ...
def readline(self, __size: int = ...) -> bytes: ...
def readlines(self, __hint: int = ...) -> list[bytes]: ...
def __iter__(self) -> Iterator[bytes]: ...
# WSGI error streams per PEP 3333, stable
class ErrorStream(Protocol):
def flush(self) -> object: ...
def write(self, __s: str) -> object: ...
def writelines(self, __seq: list[str]) -> object: ...
# Optional file wrapper in wsgi.file_wrapper
class FileWrapper(Protocol):
def __call__(self, __file: _Readable, __block_size: int = ...) -> Iterable[bytes]: ...

View File

@@ -0,0 +1,9 @@
# See the README.md file in this directory for more information.
from typing import Any, Protocol
# As defined https://docs.python.org/3/library/xml.dom.html#domimplementation-objects
class DOMImplementation(Protocol):
def hasFeature(self, feature: str, version: str | None) -> bool: ...
def createDocument(self, namespaceUri: str, qualifiedName: str, doctype: Any | None) -> Any: ...
def createDocumentType(self, qualifiedName: str, publicId: str, systemId: str) -> Any: ...

57
.vscode/Pico-W-Stub/stdlib/abc.pyi vendored Normal file
View File

@@ -0,0 +1,57 @@
import sys
from collections.abc import Callable
from typing import Any, TypeVar
import _typeshed
from _typeshed import SupportsWrite
from typing_extensions import Concatenate, Literal, ParamSpec
_T = TypeVar("_T")
_R_co = TypeVar("_R_co", covariant=True)
_FuncT = TypeVar("_FuncT", bound=Callable[..., Any])
_P = ParamSpec("_P")
# These definitions have special processing in mypy
class ABCMeta(type):
__abstractmethods__: frozenset[str]
if sys.version_info >= (3, 11):
def __new__(
__mcls: type[_typeshed.Self],
__name: str,
__bases: tuple[type, ...],
__namespace: dict[str, Any],
**kwargs: Any
) -> _typeshed.Self: ...
else:
def __new__(
mcls: type[_typeshed.Self],
name: str,
bases: tuple[type, ...],
namespace: dict[str, Any],
**kwargs: Any
) -> _typeshed.Self: ...
def __instancecheck__(cls: ABCMeta, instance: Any) -> bool: ...
def __subclasscheck__(cls: ABCMeta, subclass: type) -> bool: ...
def _dump_registry(cls: ABCMeta, file: SupportsWrite[str] | None = None) -> None: ...
def register(cls: ABCMeta, subclass: type[_T]) -> type[_T]: ...
def abstractmethod(funcobj: _FuncT) -> _FuncT: ...
class abstractclassmethod(classmethod[_T, _P, _R_co]): # type: ignore
__isabstractmethod__: Literal[True]
def __init__(self, callable: Callable[Concatenate[type[_T], _P], _R_co]) -> None: ... # type: ignore
class abstractstaticmethod(staticmethod[_P, _R_co]): # type: ignore
__isabstractmethod__: Literal[True]
def __init__(self, callable: Callable[_P, _R_co]) -> None: ... # type: ignore
class abstractproperty(property):
__isabstractmethod__: Literal[True]
class ABC(metaclass=ABCMeta): ...
def get_cache_token() -> object: ...
if sys.version_info >= (3, 10):
def update_abstractmethods(cls: type[_T]) -> type[_T]: ...

View File

@@ -0,0 +1,45 @@
import sys
from collections.abc import Awaitable, Coroutine, Generator
from typing import Any, TypeVar
from typing_extensions import TypeAlias
# As at runtime, this depends on all submodules defining __all__ accurately.
from .base_events import *
from .coroutines import *
from .events import *
from .futures import *
from .locks import *
from .protocols import *
from .queues import *
from .runners import *
from .streams import *
# from .subprocess import *
from .tasks import *
from .transports import *
if sys.version_info >= (3, 8):
from .exceptions import *
if sys.version_info >= (3, 9):
from .threads import *
if sys.version_info >= (3, 11):
from .taskgroups import *
from .timeouts import *
if sys.platform == "win32":
from .windows_events import *
else:
from .unix_events import *
_T = TypeVar("_T")
# Aliases imported by multiple submodules in typeshed
if sys.version_info >= (3, 12):
_AwaitableLike: TypeAlias = Awaitable[_T] # noqa: Y047
_CoroutineLike: TypeAlias = Coroutine[Any, Any, _T] # noqa: Y047
else:
_AwaitableLike: TypeAlias = Generator[Any, None, _T] | Awaitable[_T]
_CoroutineLike: TypeAlias = Generator[Any, None, _T] | Coroutine[Any, Any, _T]

View File

@@ -0,0 +1,521 @@
import ssl
import sys
from asyncio import _AwaitableLike, _CoroutineLike
from asyncio.events import AbstractEventLoop, AbstractServer, Handle, TimerHandle, _TaskFactory
from asyncio.futures import Future
from asyncio.protocols import BaseProtocol
from asyncio.tasks import Task
from asyncio.transports import (
BaseTransport,
DatagramTransport,
ReadTransport,
SubprocessTransport,
Transport,
WriteTransport,
)
from collections.abc import Callable, Iterable, Sequence
from contextvars import Context
from typing import IO, Any, TypeVar, overload
from _typeshed import FileDescriptorLike, ReadableBuffer, WriteableBuffer
from stdlib.socket import AddressFamily, SocketKind, _Address, _RetAddress, socket
from typing_extensions import Literal, TypeAlias
if sys.version_info >= (3, 9):
__all__ = ("BaseEventLoop", "Server")
else:
__all__ = ("BaseEventLoop",)
_T = TypeVar("_T")
_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol)
_Context: TypeAlias = dict[str, Any]
_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object]
_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol]
_SSLContext: TypeAlias = bool | None | ssl.SSLContext # type: ignore[misc]
class Server(AbstractServer):
if sys.version_info >= (3, 11):
def __init__(
self,
loop: AbstractEventLoop,
sockets: Iterable[socket],
protocol_factory: _ProtocolFactory,
ssl_context: _SSLContext,
backlog: int,
ssl_handshake_timeout: float | None,
ssl_shutdown_timeout: float | None = None,
) -> None: ...
else:
def __init__(
self,
loop: AbstractEventLoop,
sockets: Iterable[socket],
protocol_factory: _ProtocolFactory,
ssl_context: _SSLContext,
backlog: int,
ssl_handshake_timeout: float | None,
) -> None: ...
def get_loop(self) -> AbstractEventLoop: ...
def is_serving(self) -> bool: ...
async def start_serving(self) -> None: ...
async def serve_forever(self) -> None: ...
if sys.version_info >= (3, 8):
@property
def sockets(self) -> tuple[socket, ...]: ...
else:
@property
def sockets(self) -> list[socket]: ...
def close(self) -> None: ...
async def wait_closed(self) -> None: ...
class BaseEventLoop(AbstractEventLoop):
def run_forever(self) -> None: ...
def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ...
def stop(self) -> None: ...
def is_running(self) -> bool: ...
def is_closed(self) -> bool: ...
def close(self) -> None: ...
async def shutdown_asyncgens(self) -> None: ...
# Methods scheduling callbacks. All these return Handles.
def call_soon(
self, callback: Callable[..., object], *args: Any, context: Context | None = None
) -> Handle: ...
def call_later(
self,
delay: float,
callback: Callable[..., object],
*args: Any,
context: Context | None = None,
) -> TimerHandle: ...
def call_at(
self,
when: float,
callback: Callable[..., object],
*args: Any,
context: Context | None = None,
) -> TimerHandle: ...
def time(self) -> float: ...
# Future methods
def create_future(self) -> Future[Any]: ...
# Tasks methods
if sys.version_info >= (3, 11):
def create_task(
self, coro: _CoroutineLike[_T], *, name: object = None, context: Context | None = None
) -> Task[_T]: ...
elif sys.version_info >= (3, 8):
def create_task(self, coro: _CoroutineLike[_T], *, name: object = None) -> Task[_T]: ...
else:
def create_task(self, coro: _CoroutineLike[_T]) -> Task[_T]: ...
def set_task_factory(self, factory: _TaskFactory | None) -> None: ...
def get_task_factory(self) -> _TaskFactory | None: ...
# Methods for interacting with threads
def call_soon_threadsafe(
self, callback: Callable[..., object], *args: Any, context: Context | None = None
) -> Handle: ...
def run_in_executor(
self, executor: Any, func: Callable[..., _T], *args: Any
) -> Future[_T]: ...
def set_default_executor(self, executor: Any) -> None: ...
# Network I/O methods returning Futures.
async def getaddrinfo(
self,
host: bytes | str | None,
port: bytes | str | int | None,
*,
family: int = 0,
type: int = 0,
proto: int = 0,
flags: int = 0,
) -> list[
tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]
]: ...
async def getnameinfo(
self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0
) -> tuple[str, str]: ...
if sys.version_info >= (3, 12):
@overload
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: str = ...,
port: int = ...,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: None = None,
local_addr: tuple[str, int] | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
all_errors: bool = False,
) -> tuple[Transport, _ProtocolT]: ...
@overload
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: None = None,
port: None = None,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: socket,
local_addr: None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
all_errors: bool = False,
) -> tuple[Transport, _ProtocolT]: ...
elif sys.version_info >= (3, 11):
@overload
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: str = ...,
port: int = ...,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: None = None,
local_addr: tuple[str, int] | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
) -> tuple[Transport, _ProtocolT]: ...
@overload
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: None = None,
port: None = None,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: socket,
local_addr: None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
) -> tuple[Transport, _ProtocolT]: ...
elif sys.version_info >= (3, 8):
@overload
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: str = ...,
port: int = ...,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: None = None,
local_addr: tuple[str, int] | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
) -> tuple[Transport, _ProtocolT]: ...
@overload
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: None = None,
port: None = None,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: socket,
local_addr: None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
) -> tuple[Transport, _ProtocolT]: ...
else:
@overload
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: str = ...,
port: int = ...,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: None = None,
local_addr: tuple[str, int] | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
@overload
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: None = None,
port: None = None,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: socket,
local_addr: None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
if sys.version_info >= (3, 11):
@overload
async def create_server(
self,
protocol_factory: _ProtocolFactory,
host: str | Sequence[str] | None = None,
port: int = ...,
*,
family: int = ...,
flags: int = ...,
sock: None = None,
backlog: int = 100,
ssl: _SSLContext = None,
reuse_address: bool | None = None,
reuse_port: bool | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
@overload
async def create_server(
self,
protocol_factory: _ProtocolFactory,
host: None = None,
port: None = None,
*,
family: int = ...,
flags: int = ...,
sock: socket = ...,
backlog: int = 100,
ssl: _SSLContext = None,
reuse_address: bool | None = None,
reuse_port: bool | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
async def start_tls(
self,
transport: BaseTransport,
protocol: BaseProtocol,
sslcontext: ssl.SSLContext,
*,
server_side: bool = False,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
) -> Transport | None: ...
async def connect_accepted_socket(
self,
protocol_factory: Callable[[], _ProtocolT],
sock: socket,
*,
ssl: _SSLContext = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
else:
@overload
async def create_server(
self,
protocol_factory: _ProtocolFactory,
host: str | Sequence[str] | None = None,
port: int = ...,
*,
family: int = ...,
flags: int = ...,
sock: None = None,
backlog: int = 100,
ssl: _SSLContext = None,
reuse_address: bool | None = None,
reuse_port: bool | None = None,
ssl_handshake_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
@overload
async def create_server(
self,
protocol_factory: _ProtocolFactory,
host: None = None,
port: None = None,
*,
family: int = ...,
flags: int = ...,
sock: socket = ...,
backlog: int = 100,
ssl: _SSLContext = None,
reuse_address: bool | None = None,
reuse_port: bool | None = None,
ssl_handshake_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
async def start_tls(
self,
transport: BaseTransport,
protocol: BaseProtocol,
sslcontext: ssl.SSLContext, # type: ignore[misc]
*,
server_side: bool = False,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
) -> Transport | None: ...
async def connect_accepted_socket(
self,
protocol_factory: Callable[[], _ProtocolT],
sock: socket,
*,
ssl: _SSLContext = None,
ssl_handshake_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
async def sock_sendfile(
self,
sock: socket,
file: IO[bytes],
offset: int = 0,
count: int | None = None,
*,
fallback: bool | None = True,
) -> int: ...
async def sendfile(
self,
transport: WriteTransport,
file: IO[bytes],
offset: int = 0,
count: int | None = None,
*,
fallback: bool = True,
) -> int: ...
if sys.version_info >= (3, 11):
async def create_datagram_endpoint( # type: ignore[override]
self,
protocol_factory: Callable[[], _ProtocolT],
local_addr: tuple[str, int] | str | None = None,
remote_addr: tuple[str, int] | str | None = None,
*,
family: int = 0,
proto: int = 0,
flags: int = 0,
reuse_port: bool | None = None,
allow_broadcast: bool | None = None,
sock: socket | None = None,
) -> tuple[DatagramTransport, _ProtocolT]: ...
else:
async def create_datagram_endpoint(
self,
protocol_factory: Callable[[], _ProtocolT],
local_addr: tuple[str, int] | str | None = None,
remote_addr: tuple[str, int] | str | None = None,
*,
family: int = 0,
proto: int = 0,
flags: int = 0,
reuse_address: bool | None = ...,
reuse_port: bool | None = None,
allow_broadcast: bool | None = None,
sock: socket | None = None,
) -> tuple[DatagramTransport, _ProtocolT]: ...
# Pipes and subprocesses.
async def connect_read_pipe(
self, protocol_factory: Callable[[], _ProtocolT], pipe: Any
) -> tuple[ReadTransport, _ProtocolT]: ...
async def connect_write_pipe(
self, protocol_factory: Callable[[], _ProtocolT], pipe: Any
) -> tuple[WriteTransport, _ProtocolT]: ...
async def subprocess_shell(
self,
protocol_factory: Callable[[], _ProtocolT],
cmd: bytes | str,
*,
stdin: int | IO[Any] | None = -1,
stdout: int | IO[Any] | None = -1,
stderr: int | IO[Any] | None = -1,
universal_newlines: Literal[False] = False,
shell: Literal[True] = True,
bufsize: Literal[0] = 0,
encoding: None = None,
errors: None = None,
text: Literal[False, None] = None,
**kwargs: Any,
) -> tuple[SubprocessTransport, _ProtocolT]: ...
async def subprocess_exec(
self,
protocol_factory: Callable[[], _ProtocolT],
program: Any,
*args: Any,
stdin: int | IO[Any] | None = -1,
stdout: int | IO[Any] | None = -1,
stderr: int | IO[Any] | None = -1,
universal_newlines: Literal[False] = False,
shell: Literal[False] = False,
bufsize: Literal[0] = 0,
encoding: None = None,
errors: None = None,
**kwargs: Any,
) -> tuple[SubprocessTransport, _ProtocolT]: ...
def add_reader(
self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any
) -> None: ...
def remove_reader(self, fd: FileDescriptorLike) -> bool: ...
def add_writer(
self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any
) -> None: ...
def remove_writer(self, fd: FileDescriptorLike) -> bool: ...
# The sock_* methods (and probably some others) are not actually implemented on
# BaseEventLoop, only on subclasses. We list them here for now for convenience.
async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ...
async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ...
async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ...
async def sock_connect(self, sock: socket, address: _Address) -> None: ...
async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ...
if sys.version_info >= (3, 11):
async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ...
async def sock_recvfrom_into(
self, sock: socket, buf: WriteableBuffer, nbytes: int = 0
) -> tuple[int, _RetAddress]: ...
async def sock_sendto(
self, sock: socket, data: ReadableBuffer, address: _Address
) -> int: ...
# Signal handling.
def add_signal_handler(self, sig: int, callback: Callable[..., Any], *args: Any) -> None: ...
def remove_signal_handler(self, sig: int) -> bool: ...
# Error handlers.
def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ...
def get_exception_handler(self) -> _ExceptionHandler | None: ...
def default_exception_handler(self, context: _Context) -> None: ...
def call_exception_handler(self, context: _Context) -> None: ...
# Debug flag management.
def get_debug(self) -> bool: ...
def set_debug(self, enabled: bool) -> None: ...
if sys.version_info >= (3, 12):
async def shutdown_default_executor(self, timeout: float | None = None) -> None: ...
elif sys.version_info >= (3, 9):
async def shutdown_default_executor(self) -> None: ...

View File

@@ -0,0 +1,20 @@
from collections.abc import Callable, Sequence
from contextvars import Context
from typing import Any
from typing_extensions import Literal
from . import futures
__all__ = ()
# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py
# but it leads to circular import error in pytype tool.
# That's why the import order is reversed.
from .futures import isfuture as isfuture
_PENDING: Literal["PENDING"] # undocumented
_CANCELLED: Literal["CANCELLED"] # undocumented
_FINISHED: Literal["FINISHED"] # undocumented
def _format_callbacks(cb: Sequence[tuple[Callable[[futures.Future[Any]], None], Context]]) -> str: ... # undocumented
def _future_repr_info(future: futures.Future[Any]) -> list[str]: ... # undocumented

View File

@@ -0,0 +1,9 @@
from _typeshed import StrOrBytesPath
from types import FrameType
from typing import Any
from . import tasks
def _task_repr_info(task: tasks.Task[Any]) -> list[str]: ... # undocumented
def _task_get_stack(task: tasks.Task[Any], limit: int | None) -> list[FrameType]: ... # undocumented
def _task_print_stack(task: tasks.Task[Any], limit: int | None, file: StrOrBytesPath) -> None: ... # undocumented

View File

@@ -0,0 +1,20 @@
import enum
import sys
from typing_extensions import Literal
LOG_THRESHOLD_FOR_CONNLOST_WRITES: Literal[5]
ACCEPT_RETRY_DELAY: Literal[1]
DEBUG_STACK_DEPTH: Literal[10]
SSL_HANDSHAKE_TIMEOUT: float
SENDFILE_FALLBACK_READBUFFER_SIZE: Literal[262144]
if sys.version_info >= (3, 11):
SSL_SHUTDOWN_TIMEOUT: float
FLOW_CONTROL_HIGH_WATER_SSL_READ: Literal[256]
FLOW_CONTROL_HIGH_WATER_SSL_WRITE: Literal[512]
if sys.version_info >= (3, 12):
THREAD_JOIN_TIMEOUT: Literal[300]
class _SendfileMode(enum.Enum):
UNSUPPORTED: int
TRY_NATIVE: int
FALLBACK: int

View File

@@ -0,0 +1,28 @@
import sys
from collections.abc import Awaitable, Callable, Coroutine
from typing import Any, TypeVar, overload
from typing_extensions import ParamSpec, TypeGuard
if sys.version_info >= (3, 11):
__all__ = ("iscoroutinefunction", "iscoroutine")
else:
__all__ = ("coroutine", "iscoroutinefunction", "iscoroutine")
_T = TypeVar("_T")
_FunctionT = TypeVar("_FunctionT", bound=Callable[..., Any])
_P = ParamSpec("_P")
if sys.version_info < (3, 11):
def coroutine(func: _FunctionT) -> _FunctionT: ...
@overload
def iscoroutinefunction(func: Callable[..., Coroutine[Any, Any, Any]]) -> bool: ...
@overload
def iscoroutinefunction(func: Callable[_P, Awaitable[_T]]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, _T]]]: ...
@overload
def iscoroutinefunction(func: Callable[_P, object]) -> TypeGuard[Callable[_P, Coroutine[Any, Any, Any]]]: ...
@overload
def iscoroutinefunction(func: object) -> TypeGuard[Callable[..., Coroutine[Any, Any, Any]]]: ...
# Can actually be a generator-style coroutine on Python 3.7
def iscoroutine(obj: object) -> TypeGuard[Coroutine[Any, Any, Any]]: ...

View File

@@ -0,0 +1,687 @@
import ssl
import sys
from abc import ABCMeta, abstractmethod
from collections.abc import Callable, Coroutine, Generator, Sequence
from contextvars import Context
from typing import IO, Any, Protocol, TypeVar, overload
from _typeshed import FileDescriptorLike, ReadableBuffer, StrPath, Unused, WriteableBuffer
from stdlib.socket import AddressFamily, SocketKind, _Address, _RetAddress, socket
from typing_extensions import Literal, Self, TypeAlias
from . import _AwaitableLike, _CoroutineLike
from .base_events import Server
from .futures import Future
from .protocols import BaseProtocol
from .tasks import Task
from .transports import (
BaseTransport,
DatagramTransport,
ReadTransport,
SubprocessTransport,
Transport,
WriteTransport,
)
from .unix_events import AbstractChildWatcher
if sys.version_info >= (3, 8):
__all__ = (
"AbstractEventLoopPolicy",
"AbstractEventLoop",
"AbstractServer",
"Handle",
"TimerHandle",
"get_event_loop_policy",
"set_event_loop_policy",
"get_event_loop",
"set_event_loop",
"new_event_loop",
"get_child_watcher",
"set_child_watcher",
"_set_running_loop",
"get_running_loop",
"_get_running_loop",
)
else:
__all__ = (
"AbstractEventLoopPolicy",
"AbstractEventLoop",
"AbstractServer",
"Handle",
"TimerHandle",
"SendfileNotAvailableError",
"get_event_loop_policy",
"set_event_loop_policy",
"get_event_loop",
"set_event_loop",
"new_event_loop",
"get_child_watcher",
"set_child_watcher",
"_set_running_loop",
"get_running_loop",
"_get_running_loop",
)
_T = TypeVar("_T")
_ProtocolT = TypeVar("_ProtocolT", bound=BaseProtocol)
_Context: TypeAlias = dict[str, Any]
_ExceptionHandler: TypeAlias = Callable[[AbstractEventLoop, _Context], object]
_ProtocolFactory: TypeAlias = Callable[[], BaseProtocol]
_SSLContext: TypeAlias = bool | None | ssl.SSLContext # type: ignore
class _TaskFactory(Protocol):
def __call__(
self,
__loop: AbstractEventLoop,
__factory: Coroutine[Any, Any, _T] | Generator[Any, None, _T],
) -> Future[_T]: ...
class Handle:
_cancelled: bool
_args: Sequence[Any]
def __init__(
self,
callback: Callable[..., object],
args: Sequence[Any],
loop: AbstractEventLoop,
context: Context | None = None,
) -> None: ...
def cancel(self) -> None: ...
def _run(self) -> None: ...
def cancelled(self) -> bool: ...
if sys.version_info >= (3, 12):
def get_context(self) -> Context: ...
class TimerHandle(Handle):
def __init__(
self,
when: float,
callback: Callable[..., object],
args: Sequence[Any],
loop: AbstractEventLoop,
context: Context | None = None,
) -> None: ...
def __hash__(self) -> int: ...
def when(self) -> float: ...
def __lt__(self, other: TimerHandle) -> bool: ...
def __le__(self, other: TimerHandle) -> bool: ...
def __gt__(self, other: TimerHandle) -> bool: ...
def __ge__(self, other: TimerHandle) -> bool: ...
def __eq__(self, other: object) -> bool: ...
class AbstractServer:
@abstractmethod
def close(self) -> None: ...
async def __aenter__(self) -> Self: ...
async def __aexit__(self, *exc: Unused) -> None: ...
@abstractmethod
def get_loop(self) -> AbstractEventLoop: ...
@abstractmethod
def is_serving(self) -> bool: ...
@abstractmethod
async def start_serving(self) -> None: ...
@abstractmethod
async def serve_forever(self) -> None: ...
@abstractmethod
async def wait_closed(self) -> None: ...
class AbstractEventLoop:
slow_callback_duration: float
@abstractmethod
def run_forever(self) -> None: ...
@abstractmethod
def run_until_complete(self, future: _AwaitableLike[_T]) -> _T: ...
@abstractmethod
def stop(self) -> None: ...
@abstractmethod
def is_running(self) -> bool: ...
@abstractmethod
def is_closed(self) -> bool: ...
@abstractmethod
def close(self) -> None: ...
@abstractmethod
async def shutdown_asyncgens(self) -> None: ...
# Methods scheduling callbacks. All these return Handles.
if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2
@abstractmethod
def call_soon(
self, callback: Callable[..., object], *args: Any, context: Context | None = None
) -> Handle: ...
@abstractmethod
def call_later(
self,
delay: float,
callback: Callable[..., object],
*args: Any,
context: Context | None = None,
) -> TimerHandle: ...
@abstractmethod
def call_at(
self,
when: float,
callback: Callable[..., object],
*args: Any,
context: Context | None = None,
) -> TimerHandle: ...
else:
@abstractmethod
def call_soon(self, callback: Callable[..., object], *args: Any) -> Handle: ...
@abstractmethod
def call_later(
self, delay: float, callback: Callable[..., object], *args: Any
) -> TimerHandle: ...
@abstractmethod
def call_at(
self, when: float, callback: Callable[..., object], *args: Any
) -> TimerHandle: ...
@abstractmethod
def time(self) -> float: ...
# Future methods
@abstractmethod
def create_future(self) -> Future[Any]: ...
# Tasks methods
if sys.version_info >= (3, 11):
@abstractmethod
def create_task(
self,
coro: _CoroutineLike[_T],
*,
name: str | None = None,
context: Context | None = None,
) -> Task[_T]: ...
elif sys.version_info >= (3, 8):
@abstractmethod
def create_task(
self, coro: _CoroutineLike[_T], *, name: str | None = None
) -> Task[_T]: ...
else:
@abstractmethod
def create_task(self, coro: _CoroutineLike[_T]) -> Task[_T]: ...
@abstractmethod
def set_task_factory(self, factory: _TaskFactory | None) -> None: ...
@abstractmethod
def get_task_factory(self) -> _TaskFactory | None: ...
# Methods for interacting with threads
if sys.version_info >= (3, 9): # "context" added in 3.9.10/3.10.2
@abstractmethod
def call_soon_threadsafe(
self, callback: Callable[..., object], *args: Any, context: Context | None = None
) -> Handle: ...
else:
@abstractmethod
def call_soon_threadsafe(self, callback: Callable[..., object], *args: Any) -> Handle: ...
@abstractmethod
def run_in_executor(
self, executor: Any, func: Callable[..., _T], *args: Any
) -> Future[_T]: ...
@abstractmethod
def set_default_executor(self, executor: Any) -> None: ...
# Network I/O methods returning Futures.
@abstractmethod
async def getaddrinfo(
self,
host: bytes | str | None,
port: bytes | str | int | None,
*,
family: int = 0,
type: int = 0,
proto: int = 0,
flags: int = 0,
) -> list[
tuple[AddressFamily, SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int]]
]: ...
@abstractmethod
async def getnameinfo(
self, sockaddr: tuple[str, int] | tuple[str, int, int, int], flags: int = 0
) -> tuple[str, str]: ...
if sys.version_info >= (3, 11):
@overload
@abstractmethod
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: str = ...,
port: int = ...,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: None = None,
local_addr: tuple[str, int] | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
) -> tuple[Transport, _ProtocolT]: ...
@overload
@abstractmethod
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: None = None,
port: None = None,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: socket,
local_addr: None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
) -> tuple[Transport, _ProtocolT]: ...
elif sys.version_info >= (3, 8):
@overload
@abstractmethod
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: str = ...,
port: int = ...,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: None = None,
local_addr: tuple[str, int] | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
) -> tuple[Transport, _ProtocolT]: ...
@overload
@abstractmethod
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: None = None,
port: None = None,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: socket,
local_addr: None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
happy_eyeballs_delay: float | None = None,
interleave: int | None = None,
) -> tuple[Transport, _ProtocolT]: ...
else:
@overload
@abstractmethod
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: str = ...,
port: int = ...,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: None = None,
local_addr: tuple[str, int] | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
@overload
@abstractmethod
async def create_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
host: None = None,
port: None = None,
*,
ssl: _SSLContext = None,
family: int = 0,
proto: int = 0,
flags: int = 0,
sock: socket,
local_addr: None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
if sys.version_info >= (3, 11):
@overload
@abstractmethod
async def create_server(
self,
protocol_factory: _ProtocolFactory,
host: str | Sequence[str] | None = None,
port: int = ...,
*,
family: int = ...,
flags: int = ...,
sock: None = None,
backlog: int = 100,
ssl: _SSLContext = None,
reuse_address: bool | None = None,
reuse_port: bool | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
@overload
@abstractmethod
async def create_server(
self,
protocol_factory: _ProtocolFactory,
host: None = None,
port: None = None,
*,
family: int = ...,
flags: int = ...,
sock: socket = ...,
backlog: int = 100,
ssl: _SSLContext = None,
reuse_address: bool | None = None,
reuse_port: bool | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
@abstractmethod
async def start_tls(
self,
transport: WriteTransport,
protocol: BaseProtocol,
sslcontext: ssl.SSLContext,
*,
server_side: bool = False,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
) -> Transport | None: ...
async def create_unix_server(
self,
protocol_factory: _ProtocolFactory,
path: StrPath | None = None,
*,
sock: socket | None = None,
backlog: int = 100,
ssl: _SSLContext = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
else:
@overload
@abstractmethod
async def create_server(
self,
protocol_factory: _ProtocolFactory,
host: str | Sequence[str] | None = None,
port: int = ...,
*,
family: int = ...,
flags: int = ...,
sock: None = None,
backlog: int = 100,
ssl: _SSLContext = None,
reuse_address: bool | None = None,
reuse_port: bool | None = None,
ssl_handshake_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
@overload
@abstractmethod
async def create_server(
self,
protocol_factory: _ProtocolFactory,
host: None = None,
port: None = None,
*,
family: int = ...,
flags: int = ...,
sock: socket = ...,
backlog: int = 100,
ssl: _SSLContext = None,
reuse_address: bool | None = None,
reuse_port: bool | None = None,
ssl_handshake_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
@abstractmethod
async def start_tls(
self,
transport: BaseTransport,
protocol: BaseProtocol,
sslcontext: ssl.SSLContext, # type: ignore
*,
server_side: bool = False,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
) -> Transport | None: ...
async def create_unix_server(
self,
protocol_factory: _ProtocolFactory,
path: StrPath | None = None,
*,
sock: socket | None = None,
backlog: int = 100,
ssl: _SSLContext = None,
ssl_handshake_timeout: float | None = None,
start_serving: bool = True,
) -> Server: ...
if sys.version_info >= (3, 11):
async def connect_accepted_socket(
self,
protocol_factory: Callable[[], _ProtocolT],
sock: socket,
*,
ssl: _SSLContext = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
elif sys.version_info >= (3, 10):
async def connect_accepted_socket(
self,
protocol_factory: Callable[[], _ProtocolT],
sock: socket,
*,
ssl: _SSLContext = None,
ssl_handshake_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
if sys.version_info >= (3, 11):
async def create_unix_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
path: str | None = None,
*,
ssl: _SSLContext = None,
sock: socket | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
else:
async def create_unix_connection(
self,
protocol_factory: Callable[[], _ProtocolT],
path: str | None = None,
*,
ssl: _SSLContext = None,
sock: socket | None = None,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
) -> tuple[Transport, _ProtocolT]: ...
@abstractmethod
async def sock_sendfile(
self,
sock: socket,
file: IO[bytes],
offset: int = 0,
count: int | None = None,
*,
fallback: bool | None = None,
) -> int: ...
@abstractmethod
async def sendfile(
self,
transport: WriteTransport,
file: IO[bytes],
offset: int = 0,
count: int | None = None,
*,
fallback: bool = True,
) -> int: ...
@abstractmethod
async def create_datagram_endpoint(
self,
protocol_factory: Callable[[], _ProtocolT],
local_addr: tuple[str, int] | str | None = None,
remote_addr: tuple[str, int] | str | None = None,
*,
family: int = 0,
proto: int = 0,
flags: int = 0,
reuse_address: bool | None = None,
reuse_port: bool | None = None,
allow_broadcast: bool | None = None,
sock: socket | None = None,
) -> tuple[DatagramTransport, _ProtocolT]: ...
# Pipes and subprocesses.
@abstractmethod
async def connect_read_pipe(
self, protocol_factory: Callable[[], _ProtocolT], pipe: Any
) -> tuple[ReadTransport, _ProtocolT]: ...
@abstractmethod
async def connect_write_pipe(
self, protocol_factory: Callable[[], _ProtocolT], pipe: Any
) -> tuple[WriteTransport, _ProtocolT]: ...
@abstractmethod
async def subprocess_shell(
self,
protocol_factory: Callable[[], _ProtocolT],
cmd: bytes | str,
*,
stdin: int | IO[Any] | None = -1,
stdout: int | IO[Any] | None = -1,
stderr: int | IO[Any] | None = -1,
universal_newlines: Literal[False] = False,
shell: Literal[True] = True,
bufsize: Literal[0] = 0,
encoding: None = None,
errors: None = None,
text: Literal[False, None] = ...,
**kwargs: Any,
) -> tuple[SubprocessTransport, _ProtocolT]: ...
@abstractmethod
async def subprocess_exec(
self,
protocol_factory: Callable[[], _ProtocolT],
program: Any,
*args: Any,
stdin: int | IO[Any] | None = -1,
stdout: int | IO[Any] | None = -1,
stderr: int | IO[Any] | None = -1,
universal_newlines: Literal[False] = False,
shell: Literal[False] = False,
bufsize: Literal[0] = 0,
encoding: None = None,
errors: None = None,
**kwargs: Any,
) -> tuple[SubprocessTransport, _ProtocolT]: ...
@abstractmethod
def add_reader(
self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any
) -> None: ...
@abstractmethod
def remove_reader(self, fd: FileDescriptorLike) -> bool: ...
@abstractmethod
def add_writer(
self, fd: FileDescriptorLike, callback: Callable[..., Any], *args: Any
) -> None: ...
@abstractmethod
def remove_writer(self, fd: FileDescriptorLike) -> bool: ...
# Completion based I/O methods returning Futures prior to 3.7
@abstractmethod
async def sock_recv(self, sock: socket, nbytes: int) -> bytes: ...
@abstractmethod
async def sock_recv_into(self, sock: socket, buf: WriteableBuffer) -> int: ...
@abstractmethod
async def sock_sendall(self, sock: socket, data: ReadableBuffer) -> None: ...
@abstractmethod
async def sock_connect(self, sock: socket, address: _Address) -> None: ...
@abstractmethod
async def sock_accept(self, sock: socket) -> tuple[socket, _RetAddress]: ...
if sys.version_info >= (3, 11):
@abstractmethod
async def sock_recvfrom(self, sock: socket, bufsize: int) -> tuple[bytes, _RetAddress]: ...
@abstractmethod
async def sock_recvfrom_into(
self, sock: socket, buf: WriteableBuffer, nbytes: int = 0
) -> tuple[int, _RetAddress]: ...
@abstractmethod
async def sock_sendto(
self, sock: socket, data: ReadableBuffer, address: _Address
) -> int: ...
# Signal handling.
@abstractmethod
def add_signal_handler(
self, sig: int, callback: Callable[..., object], *args: Any
) -> None: ...
@abstractmethod
def remove_signal_handler(self, sig: int) -> bool: ...
# Error handlers.
@abstractmethod
def set_exception_handler(self, handler: _ExceptionHandler | None) -> None: ...
@abstractmethod
def get_exception_handler(self) -> _ExceptionHandler | None: ...
@abstractmethod
def default_exception_handler(self, context: _Context) -> None: ...
@abstractmethod
def call_exception_handler(self, context: _Context) -> None: ...
# Debug flag management.
@abstractmethod
def get_debug(self) -> bool: ...
@abstractmethod
def set_debug(self, enabled: bool) -> None: ...
if sys.version_info >= (3, 9):
@abstractmethod
async def shutdown_default_executor(self) -> None: ...
class AbstractEventLoopPolicy:
@abstractmethod
def get_event_loop(self) -> AbstractEventLoop: ...
@abstractmethod
def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ...
@abstractmethod
def new_event_loop(self) -> AbstractEventLoop: ...
# Child processes handling (Unix only).
@abstractmethod
def get_child_watcher(self) -> AbstractChildWatcher: ...
@abstractmethod
def set_child_watcher(self, watcher: AbstractChildWatcher) -> None: ...
class BaseDefaultEventLoopPolicy(AbstractEventLoopPolicy, metaclass=ABCMeta):
def get_event_loop(self) -> AbstractEventLoop: ...
def set_event_loop(self, loop: AbstractEventLoop | None) -> None: ...
def new_event_loop(self) -> AbstractEventLoop: ...
def get_event_loop_policy() -> AbstractEventLoopPolicy: ...
def set_event_loop_policy(policy: AbstractEventLoopPolicy | None) -> None: ...
def get_event_loop() -> AbstractEventLoop: ...
def set_event_loop(loop: AbstractEventLoop | None) -> None: ...
def new_event_loop() -> AbstractEventLoop: ...
def get_child_watcher() -> AbstractChildWatcher: ...
def set_child_watcher(watcher: AbstractChildWatcher) -> None: ...
def _set_running_loop(__loop: AbstractEventLoop | None) -> None: ...
def _get_running_loop() -> AbstractEventLoop: ...
def get_running_loop() -> AbstractEventLoop: ...
if sys.version_info < (3, 8):
class SendfileNotAvailableError(RuntimeError): ...

View File

@@ -0,0 +1,38 @@
import sys
if sys.version_info >= (3, 11):
__all__ = (
"BrokenBarrierError",
"CancelledError",
"InvalidStateError",
"TimeoutError",
"IncompleteReadError",
"LimitOverrunError",
"SendfileNotAvailableError",
)
else:
__all__ = (
"CancelledError",
"InvalidStateError",
"TimeoutError",
"IncompleteReadError",
"LimitOverrunError",
"SendfileNotAvailableError",
)
class CancelledError(BaseException): ...
class TimeoutError(Exception): ...
class InvalidStateError(Exception): ...
class SendfileNotAvailableError(RuntimeError): ...
class IncompleteReadError(EOFError):
expected: int | None
partial: bytes
def __init__(self, partial: bytes, expected: int | None) -> None: ...
class LimitOverrunError(Exception):
consumed: int
def __init__(self, message: str, consumed: int) -> None: ...
if sys.version_info >= (3, 11):
class BrokenBarrierError(RuntimeError): ...

View File

@@ -0,0 +1,20 @@
import functools
import traceback
from collections.abc import Iterable
from types import FrameType, FunctionType
from typing import Any, overload
from typing_extensions import TypeAlias
class _HasWrapper:
__wrapper__: _HasWrapper | FunctionType
_FuncType: TypeAlias = FunctionType | _HasWrapper | functools.partial[Any] | functools.partialmethod[Any]
@overload
def _get_function_source(func: _FuncType) -> tuple[str, int]: ...
@overload
def _get_function_source(func: object) -> tuple[str, int] | None: ...
def _format_callback_source(func: object, args: Iterable[Any]) -> str: ...
def _format_args_and_kwargs(args: Iterable[Any], kwargs: dict[str, Any]) -> str: ...
def _format_callback(func: object, args: Iterable[Any], kwargs: dict[str, Any], suffix: str = "") -> str: ...
def extract_stack(f: FrameType | None = None, limit: int | None = None) -> traceback.StackSummary: ...

View File

@@ -0,0 +1,66 @@
import sys
from collections.abc import Awaitable, Callable, Generator, Iterable
from concurrent.futures._base import Error, Future as _ConcurrentFuture
from typing import Any, TypeVar
from typing_extensions import Literal, Self, TypeGuard
from .events import AbstractEventLoop
if sys.version_info < (3, 8):
from concurrent.futures import CancelledError as CancelledError, TimeoutError as TimeoutError
class InvalidStateError(Error): ...
from contextvars import Context
if sys.version_info >= (3, 9):
from types import GenericAlias
if sys.version_info >= (3, 8):
__all__ = ("Future", "wrap_future", "isfuture")
else:
__all__ = ("CancelledError", "TimeoutError", "InvalidStateError", "Future", "wrap_future", "isfuture")
_T = TypeVar("_T")
# asyncio defines 'isfuture()' in base_futures.py and re-imports it in futures.py
# but it leads to circular import error in pytype tool.
# That's why the import order is reversed.
def isfuture(obj: object) -> TypeGuard[Future[Any]]: ...
class Future(Awaitable[_T], Iterable[_T]):
_state: str
@property
def _exception(self) -> BaseException | None: ...
_blocking: bool
@property
def _log_traceback(self) -> bool: ...
@_log_traceback.setter
def _log_traceback(self, val: Literal[False]) -> None: ...
_asyncio_future_blocking: bool # is a part of duck-typing contract for `Future`
def __init__(self, *, loop: AbstractEventLoop | None = ...) -> None: ...
def __del__(self) -> None: ...
def get_loop(self) -> AbstractEventLoop: ...
@property
def _callbacks(self) -> list[tuple[Callable[[Self], Any], Context]]: ...
def add_done_callback(self, __fn: Callable[[Self], object], *, context: Context | None = None) -> None: ...
if sys.version_info >= (3, 9):
def cancel(self, msg: Any | None = None) -> bool: ...
else:
def cancel(self) -> bool: ...
def cancelled(self) -> bool: ...
def done(self) -> bool: ...
def result(self) -> _T: ...
def exception(self) -> BaseException | None: ...
def remove_done_callback(self, __fn: Callable[[Self], object]) -> int: ...
def set_result(self, __result: _T) -> None: ...
def set_exception(self, __exception: type | BaseException) -> None: ...
def __iter__(self) -> Generator[Any, None, _T]: ...
def __await__(self) -> Generator[Any, None, _T]: ...
@property
def _loop(self) -> AbstractEventLoop: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, item: Any) -> GenericAlias: ...
def wrap_future(future: _ConcurrentFuture[_T] | Future[_T], *, loop: AbstractEventLoop | None = None) -> Future[_T]: ...

View File

@@ -0,0 +1,116 @@
import enum
import sys
from _typeshed import Unused
from collections import deque
from collections.abc import Callable, Generator
from types import TracebackType
from typing import Any, TypeVar
from typing_extensions import Literal, Self
from .events import AbstractEventLoop
from .futures import Future
if sys.version_info >= (3, 11):
from .mixins import _LoopBoundMixin
if sys.version_info >= (3, 11):
__all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore", "Barrier")
else:
__all__ = ("Lock", "Event", "Condition", "Semaphore", "BoundedSemaphore")
_T = TypeVar("_T")
if sys.version_info >= (3, 9):
class _ContextManagerMixin:
async def __aenter__(self) -> None: ...
async def __aexit__(
self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None
) -> None: ...
else:
class _ContextManager:
def __init__(self, lock: Lock | Semaphore) -> None: ...
def __enter__(self) -> None: ...
def __exit__(self, *args: Unused) -> None: ...
class _ContextManagerMixin:
# Apparently this exists to *prohibit* use as a context manager.
# def __enter__(self) -> NoReturn: ... see: https://github.com/python/typing/issues/1043
# def __exit__(self, *args: Any) -> None: ...
def __iter__(self) -> Generator[Any, None, _ContextManager]: ...
def __await__(self) -> Generator[Any, None, _ContextManager]: ...
async def __aenter__(self) -> None: ...
async def __aexit__(
self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None
) -> None: ...
class Lock(_ContextManagerMixin):
if sys.version_info >= (3, 10):
def __init__(self) -> None: ...
else:
def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ...
def locked(self) -> bool: ...
async def acquire(self) -> Literal[True]: ...
def release(self) -> None: ...
class Event:
if sys.version_info >= (3, 10):
def __init__(self) -> None: ...
else:
def __init__(self, *, loop: AbstractEventLoop | None = None) -> None: ...
def is_set(self) -> bool: ...
def set(self) -> None: ...
def clear(self) -> None: ...
async def wait(self) -> Literal[True]: ...
class Condition(_ContextManagerMixin):
if sys.version_info >= (3, 10):
def __init__(self, lock: Lock | None = None) -> None: ...
else:
def __init__(self, lock: Lock | None = None, *, loop: AbstractEventLoop | None = None) -> None: ...
def locked(self) -> bool: ...
async def acquire(self) -> Literal[True]: ...
def release(self) -> None: ...
async def wait(self) -> Literal[True]: ...
async def wait_for(self, predicate: Callable[[], _T]) -> _T: ...
def notify(self, n: int = 1) -> None: ...
def notify_all(self) -> None: ...
class Semaphore(_ContextManagerMixin):
_value: int
_waiters: deque[Future[Any]] # type: ignore
if sys.version_info >= (3, 10):
def __init__(self, value: int = 1) -> None: ...
else:
def __init__(self, value: int = 1, *, loop: AbstractEventLoop | None = None) -> None: ...
def locked(self) -> bool: ...
async def acquire(self) -> Literal[True]: ...
def release(self) -> None: ...
def _wake_up_next(self) -> None: ...
class BoundedSemaphore(Semaphore): ...
if sys.version_info >= (3, 11):
class _BarrierState(enum.Enum): # undocumented
FILLING: str
DRAINING: str
RESETTING: str
BROKEN: str
class Barrier(_LoopBoundMixin):
def __init__(self, parties: int) -> None: ...
async def __aenter__(self) -> Self: ...
async def __aexit__(self, *args: Unused) -> None: ...
async def wait(self) -> int: ...
async def abort(self) -> None: ...
async def reset(self) -> None: ...
@property
def parties(self) -> int: ...
@property
def n_waiting(self) -> int: ...
@property
def broken(self) -> bool: ...

View File

@@ -0,0 +1,3 @@
import logging
logger: logging.Logger

View File

@@ -0,0 +1,10 @@
import sys
import threading
from typing_extensions import Never
_global_lock: threading.Lock # type: ignore
class _LoopBoundMixin:
if sys.version_info < (3, 11):
def __init__(self, *, loop: Never = ...) -> None: ...

View File

@@ -0,0 +1,74 @@
import sys
from collections.abc import Mapping
from socket import socket
from typing import Any, ClassVar, Protocol
from typing_extensions import Literal
from . import base_events, constants, events, futures, streams, transports
__all__ = ("BaseProactorEventLoop",)
if sys.version_info >= (3, 8):
class _WarnCallbackProtocol(Protocol):
def __call__(
self, message: str, category: type[Warning] | None = ..., stacklevel: int = ..., source: Any | None = ...
) -> object: ...
class _ProactorBasePipeTransport(transports._FlowControlMixin, transports.BaseTransport):
def __init__(
self,
loop: events.AbstractEventLoop,
sock: socket,
protocol: streams.StreamReaderProtocol,
waiter: futures.Future[Any] | None = None,
extra: Mapping[Any, Any] | None = None,
server: events.AbstractServer | None = None,
) -> None: ...
if sys.version_info >= (3, 8):
def __del__(self, _warn: _WarnCallbackProtocol = ...) -> None: ...
else:
def __del__(self) -> None: ...
class _ProactorReadPipeTransport(_ProactorBasePipeTransport, transports.ReadTransport):
if sys.version_info >= (3, 10):
def __init__(
self,
loop: events.AbstractEventLoop,
sock: socket,
protocol: streams.StreamReaderProtocol,
waiter: futures.Future[Any] | None = None,
extra: Mapping[Any, Any] | None = None,
server: events.AbstractServer | None = None,
buffer_size: int = 65536,
) -> None: ...
else:
def __init__(
self,
loop: events.AbstractEventLoop,
sock: socket,
protocol: streams.StreamReaderProtocol,
waiter: futures.Future[Any] | None = None,
extra: Mapping[Any, Any] | None = None,
server: events.AbstractServer | None = None,
) -> None: ...
class _ProactorBaseWritePipeTransport(_ProactorBasePipeTransport, transports.WriteTransport): ...
class _ProactorWritePipeTransport(_ProactorBaseWritePipeTransport): ...
class _ProactorDuplexPipeTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport): ...
class _ProactorSocketTransport(_ProactorReadPipeTransport, _ProactorBaseWritePipeTransport, transports.Transport):
_sendfile_compatible: ClassVar[constants._SendfileMode]
def __init__(
self,
loop: events.AbstractEventLoop,
sock: socket,
protocol: streams.StreamReaderProtocol,
waiter: futures.Future[Any] | None = None,
extra: Mapping[Any, Any] | None = None,
server: events.AbstractServer | None = None,
) -> None: ...
def _set_extra(self, sock: socket) -> None: ...
def can_write_eof(self) -> Literal[True]: ...
class BaseProactorEventLoop(base_events.BaseEventLoop):
def __init__(self, proactor: Any) -> None: ...

View File

@@ -0,0 +1,34 @@
from _typeshed import ReadableBuffer
from asyncio import transports
from typing import Any
__all__ = ("BaseProtocol", "Protocol", "DatagramProtocol", "SubprocessProtocol", "BufferedProtocol")
class BaseProtocol:
def connection_made(self, transport: transports.BaseTransport) -> None: ...
def connection_lost(self, exc: Exception | None) -> None: ...
def pause_writing(self) -> None: ...
def resume_writing(self) -> None: ...
class Protocol(BaseProtocol):
def data_received(self, data: bytes) -> None: ...
def eof_received(self) -> bool | None: ...
class BufferedProtocol(BaseProtocol):
def get_buffer(self, sizehint: int) -> ReadableBuffer: ...
def buffer_updated(self, nbytes: int) -> None: ...
def eof_received(self) -> bool | None: ...
class DatagramProtocol(BaseProtocol):
def connection_made(self, transport: transports.DatagramTransport) -> None: ... # type: ignore[override]
# addr can be a tuple[int, int] for some unusual protocols like socket.AF_NETLINK.
# Use tuple[str | Any, int] to not cause typechecking issues on most usual cases.
# This could be improved by using tuple[AnyOf[str, int], int] if the AnyOf feature is accepted.
# See https://github.com/python/typing/issues/566
def datagram_received(self, data: bytes, addr: tuple[str | Any, int]) -> None: ...
def error_received(self, exc: Exception) -> None: ...
class SubprocessProtocol(BaseProtocol):
def pipe_data_received(self, fd: int, data: bytes) -> None: ...
def pipe_connection_lost(self, fd: int, exc: Exception | None) -> None: ...
def process_exited(self) -> None: ...

View File

@@ -0,0 +1,40 @@
import sys
from asyncio.events import AbstractEventLoop
from typing import Any, Generic, TypeVar
if sys.version_info >= (3, 9):
from types import GenericAlias
__all__ = ("Queue", "PriorityQueue", "LifoQueue", "QueueFull", "QueueEmpty")
class QueueEmpty(Exception): ...
class QueueFull(Exception): ...
_T = TypeVar("_T")
class Queue(Generic[_T]):
if sys.version_info >= (3, 10):
def __init__(self, maxsize: int = 0) -> None: ...
else:
def __init__(self, maxsize: int = 0, *, loop: AbstractEventLoop | None = None) -> None: ...
def _init(self, maxsize: int) -> None: ...
def _get(self) -> _T: ...
def _put(self, item: _T) -> None: ...
def _format(self) -> str: ...
def qsize(self) -> int: ...
@property
def maxsize(self) -> int: ...
def empty(self) -> bool: ...
def full(self) -> bool: ...
async def put(self, item: _T) -> None: ...
def put_nowait(self, item: _T) -> None: ...
async def get(self) -> _T: ...
def get_nowait(self) -> _T: ...
async def join(self) -> None: ...
def task_done(self) -> None: ...
if sys.version_info >= (3, 9):
def __class_getitem__(cls, type: Any) -> GenericAlias: ...
class PriorityQueue(Queue[_T]): ...
class LifoQueue(Queue[_T]): ...

View File

@@ -0,0 +1,35 @@
import sys
from _typeshed import Unused
from collections.abc import Callable, Coroutine
from contextvars import Context
from typing import Any, TypeVar
from typing_extensions import Self, final
from .events import AbstractEventLoop
if sys.version_info >= (3, 11):
__all__ = ("Runner", "run")
else:
__all__ = ("run",)
_T = TypeVar("_T")
if sys.version_info >= (3, 11):
@final
class Runner:
def __init__(self, *, debug: bool | None = None, loop_factory: Callable[[], AbstractEventLoop] | None = None) -> None: ...
def __enter__(self) -> Self: ...
def __exit__(self, exc_type: Unused, exc_val: Unused, exc_tb: Unused) -> None: ...
def close(self) -> None: ...
def get_loop(self) -> AbstractEventLoop: ...
def run(self, coro: Coroutine[Any, Any, _T], *, context: Context | None = None) -> _T: ...
if sys.version_info >= (3, 12):
def run(
main: Coroutine[Any, Any, _T], *, debug: bool | None = ..., loop_factory: Callable[[], AbstractEventLoop] | None = ...
) -> _T: ...
elif sys.version_info >= (3, 8):
def run(main: Coroutine[Any, Any, _T], *, debug: bool | None = None) -> _T: ...
else:
def run(main: Coroutine[Any, Any, _T], *, debug: bool = False) -> _T: ...

View File

@@ -0,0 +1,8 @@
import selectors
from . import base_events
__all__ = ("BaseSelectorEventLoop",)
class BaseSelectorEventLoop(base_events.BaseEventLoop):
def __init__(self, selector: selectors.BaseSelector | None = None) -> None: ...

View File

@@ -0,0 +1,176 @@
import sys
from collections import deque
from collections.abc import Callable
from enum import Enum
from typing import Any, ClassVar
import stdlib.ssl as ssl # type: ignore
from typing_extensions import Literal, TypeAlias
from . import constants, events, futures, protocols, transports
def _create_transport_context(
server_side: bool, server_hostname: str | None
) -> ssl.SSLContext: ...
if sys.version_info >= (3, 11):
SSLAgainErrors: tuple[type[ssl.SSLWantReadError], type[ssl.SSLSyscallError]]
class SSLProtocolState(Enum):
UNWRAPPED: str
DO_HANDSHAKE: str
WRAPPED: str
FLUSHING: str
SHUTDOWN: str
class AppProtocolState(Enum):
STATE_INIT: str
STATE_CON_MADE: str
STATE_EOF: str
STATE_CON_LOST: str
def add_flowcontrol_defaults(
high: int | None, low: int | None, kb: int
) -> tuple[int, int]: ...
else:
_UNWRAPPED: Literal["UNWRAPPED"]
_DO_HANDSHAKE: Literal["DO_HANDSHAKE"]
_WRAPPED: Literal["WRAPPED"]
_SHUTDOWN: Literal["SHUTDOWN"]
if sys.version_info < (3, 11):
class _SSLPipe:
max_size: ClassVar[int]
_context: ssl.SSLContext
_server_side: bool
_server_hostname: str | None
_state: str
_incoming: ssl.MemoryBIO
_outgoing: ssl.MemoryBIO
_sslobj: ssl.SSLObject | None
_need_ssldata: bool
_handshake_cb: Callable[[BaseException | None], None] | None
_shutdown_cb: Callable[[], None] | None
def __init__(
self, context: ssl.SSLContext, server_side: bool, server_hostname: str | None = None
) -> None: ...
@property
def context(self) -> ssl.SSLContext: ...
@property
def ssl_object(self) -> ssl.SSLObject | None: ...
@property
def need_ssldata(self) -> bool: ...
@property
def wrapped(self) -> bool: ...
def do_handshake(
self, callback: Callable[[BaseException | None], object] | None = None
) -> list[bytes]: ...
def shutdown(self, callback: Callable[[], object] | None = None) -> list[bytes]: ...
def feed_eof(self) -> None: ...
def feed_ssldata(
self, data: bytes, only_handshake: bool = False
) -> tuple[list[bytes], list[bytes]]: ...
def feed_appdata(self, data: bytes, offset: int = 0) -> tuple[list[bytes], int]: ...
class _SSLProtocolTransport(transports._FlowControlMixin, transports.Transport):
_sendfile_compatible: ClassVar[constants._SendfileMode]
_loop: events.AbstractEventLoop
if sys.version_info >= (3, 11):
_ssl_protocol: SSLProtocol | None
else:
_ssl_protocol: SSLProtocol
_closed: bool
def __init__(self, loop: events.AbstractEventLoop, ssl_protocol: SSLProtocol) -> None: ...
def get_extra_info(self, name: str, default: Any | None = None) -> dict[str, Any]: ...
@property
def _protocol_paused(self) -> bool: ...
def write(self, data: bytes | bytearray | memoryview) -> None: ...
def can_write_eof(self) -> Literal[False]: ...
if sys.version_info >= (3, 11):
def get_write_buffer_limits(self) -> tuple[int, int]: ...
def get_read_buffer_limits(self) -> tuple[int, int]: ...
def set_read_buffer_limits(
self, high: int | None = None, low: int | None = None
) -> None: ...
def get_read_buffer_size(self) -> int: ...
if sys.version_info >= (3, 11):
_SSLProtocolBase: TypeAlias = protocols.BufferedProtocol
else:
_SSLProtocolBase: TypeAlias = protocols.Protocol
class SSLProtocol(_SSLProtocolBase):
_server_side: bool
_server_hostname: str | None
_sslcontext: ssl.SSLContext
_extra: dict[str, Any]
_write_backlog: deque[tuple[bytes, int]] # type: ignore
_write_buffer_size: int
_waiter: futures.Future[Any]
_loop: events.AbstractEventLoop
_app_transport: _SSLProtocolTransport
_transport: transports.BaseTransport | None
_ssl_handshake_timeout: int | None
_app_protocol: protocols.BaseProtocol
_app_protocol_is_buffer: bool
if sys.version_info >= (3, 11):
max_size: ClassVar[int]
else:
_sslpipe: _SSLPipe | None
_session_established: bool
_call_connection_made: bool
_in_handshake: bool
_in_shutdown: bool
if sys.version_info >= (3, 11):
def __init__(
self,
loop: events.AbstractEventLoop,
app_protocol: protocols.BaseProtocol,
sslcontext: ssl.SSLContext,
waiter: futures.Future[Any],
server_side: bool = False,
server_hostname: str | None = None,
call_connection_made: bool = True,
ssl_handshake_timeout: int | None = None,
ssl_shutdown_timeout: float | None = None,
) -> None: ...
else:
def __init__(
self,
loop: events.AbstractEventLoop,
app_protocol: protocols.BaseProtocol,
sslcontext: ssl.SSLContext,
waiter: futures.Future[Any],
server_side: bool = False,
server_hostname: str | None = None,
call_connection_made: bool = True,
ssl_handshake_timeout: int | None = None,
) -> None: ...
def _set_app_protocol(self, app_protocol: protocols.BaseProtocol) -> None: ...
def _wakeup_waiter(self, exc: BaseException | None = None) -> None: ...
def connection_lost(self, exc: BaseException | None) -> None: ...
def eof_received(self) -> None: ...
def _get_extra_info(self, name: str, default: Any | None = None) -> Any: ...
def _start_shutdown(self) -> None: ...
if sys.version_info >= (3, 11):
def _write_appdata(self, list_of_data: list[bytes]) -> None: ...
else:
def _write_appdata(self, data: bytes) -> None: ...
def _start_handshake(self) -> None: ...
def _check_handshake_timeout(self) -> None: ...
def _on_handshake_complete(self, handshake_exc: BaseException | None) -> None: ...
def _fatal_error(
self, exc: BaseException, message: str = "Fatal error on transport"
) -> None: ...
def _abort(self) -> None: ...
if sys.version_info >= (3, 11):
def get_buffer(self, n: int) -> memoryview: ...
else:
def _finalize(self) -> None: ...
def _process_write_backlog(self) -> None: ...

View File

@@ -0,0 +1,10 @@
from collections.abc import Awaitable, Callable, Iterable
from typing import Any
from . import events
__all__ = ("staggered_race",)
async def staggered_race(
coro_fns: Iterable[Callable[[], Awaitable[Any]]], delay: float | None, *, loop: events.AbstractEventLoop | None = None
) -> tuple[Any, int | None, list[Exception | None]]: ...

View File

@@ -0,0 +1,179 @@
import ssl
import sys
from _typeshed import StrPath
from collections.abc import AsyncIterator, Awaitable, Callable, Iterable, Sequence
from typing import Any
from typing_extensions import Self, SupportsIndex, TypeAlias
from . import events, protocols, transports
from .base_events import Server
if sys.platform == "win32":
if sys.version_info >= (3, 8):
__all__ = ("StreamReader", "StreamWriter", "StreamReaderProtocol", "open_connection", "start_server")
else:
__all__ = (
"StreamReader",
"StreamWriter",
"StreamReaderProtocol",
"open_connection",
"start_server",
"IncompleteReadError",
"LimitOverrunError",
)
else:
if sys.version_info >= (3, 8):
__all__ = (
"StreamReader",
"StreamWriter",
"StreamReaderProtocol",
"open_connection",
"start_server",
"open_unix_connection",
"start_unix_server",
)
else:
__all__ = (
"StreamReader",
"StreamWriter",
"StreamReaderProtocol",
"open_connection",
"start_server",
"IncompleteReadError",
"LimitOverrunError",
"open_unix_connection",
"start_unix_server",
)
_ClientConnectedCallback: TypeAlias = Callable[[StreamReader, StreamWriter], Awaitable[None] | None]
if sys.version_info < (3, 8):
class IncompleteReadError(EOFError):
expected: int | None
partial: bytes
def __init__(self, partial: bytes, expected: int | None) -> None: ...
class LimitOverrunError(Exception):
consumed: int
def __init__(self, message: str, consumed: int) -> None: ...
if sys.version_info >= (3, 10):
async def open_connection(
host: str | None = None,
port: int | str | None = None,
*,
limit: int = 65536,
ssl_handshake_timeout: float | None = ...,
**kwds: Any,
) -> tuple[StreamReader, StreamWriter]: ...
async def start_server(
client_connected_cb: _ClientConnectedCallback,
host: str | Sequence[str] | None = None,
port: int | str | None = None,
*,
limit: int = 65536,
ssl_handshake_timeout: float | None = ...,
**kwds: Any,
) -> Server: ...
else:
async def open_connection(
host: str | None = None,
port: int | str | None = None,
*,
loop: events.AbstractEventLoop | None = None,
limit: int = 65536,
ssl_handshake_timeout: float | None = ...,
**kwds: Any,
) -> tuple[StreamReader, StreamWriter]: ...
async def start_server(
client_connected_cb: _ClientConnectedCallback,
host: str | None = None,
port: int | str | None = None,
*,
loop: events.AbstractEventLoop | None = None,
limit: int = 65536,
ssl_handshake_timeout: float | None = ...,
**kwds: Any,
) -> Server: ...
if sys.platform != "win32":
if sys.version_info >= (3, 10):
async def open_unix_connection(
path: StrPath | None = None, *, limit: int = 65536, **kwds: Any
) -> tuple[StreamReader, StreamWriter]: ...
async def start_unix_server(
client_connected_cb: _ClientConnectedCallback, path: StrPath | None = None, *, limit: int = 65536, **kwds: Any
) -> Server: ...
else:
async def open_unix_connection(
path: StrPath | None = None, *, loop: events.AbstractEventLoop | None = None, limit: int = 65536, **kwds: Any
) -> tuple[StreamReader, StreamWriter]: ...
async def start_unix_server(
client_connected_cb: _ClientConnectedCallback,
path: StrPath | None = None,
*,
loop: events.AbstractEventLoop | None = None,
limit: int = 65536,
**kwds: Any,
) -> Server: ...
class FlowControlMixin(protocols.Protocol):
def __init__(self, loop: events.AbstractEventLoop | None = None) -> None: ...
class StreamReaderProtocol(FlowControlMixin, protocols.Protocol):
def __init__(
self,
stream_reader: StreamReader,
client_connected_cb: _ClientConnectedCallback | None = None,
loop: events.AbstractEventLoop | None = None,
) -> None: ...
class StreamWriter:
def __init__(
self,
transport: transports.WriteTransport,
protocol: protocols.BaseProtocol,
reader: StreamReader | None,
loop: events.AbstractEventLoop,
) -> None: ...
@property
def transport(self) -> transports.WriteTransport: ...
def write(self, data: bytes | bytearray | memoryview) -> None: ...
def writelines(self, data: Iterable[bytes | bytearray | memoryview]) -> None: ...
def write_eof(self) -> None: ...
def can_write_eof(self) -> bool: ...
def close(self) -> None: ...
def is_closing(self) -> bool: ...
async def wait_closed(self) -> None: ...
def get_extra_info(self, name: str, default: Any = None) -> Any: ...
async def drain(self) -> None: ...
if sys.version_info >= (3, 12):
async def start_tls(
self,
sslcontext: ssl.SSLContext,
*,
server_hostname: str | None = None,
ssl_handshake_timeout: float | None = None,
ssl_shutdown_timeout: float | None = None,
) -> None: ...
elif sys.version_info >= (3, 11):
async def start_tls(
self, sslcontext: ssl.SSLContext, *, server_hostname: str | None = None, ssl_handshake_timeout: float | None = None
) -> None: ...
class StreamReader(AsyncIterator[bytes]):
def __init__(self, limit: int = 65536, loop: events.AbstractEventLoop | None = None) -> None: ...
def exception(self) -> Exception: ...
def set_exception(self, exc: Exception) -> None: ...
def set_transport(self, transport: transports.BaseTransport) -> None: ...
def feed_eof(self) -> None: ...
def at_eof(self) -> bool: ...
def feed_data(self, data: Iterable[SupportsIndex]) -> None: ...
async def readline(self) -> bytes: ...
# Can be any buffer that supports len(); consider changing to a Protocol if PEP 688 is accepted
async def readuntil(self, separator: bytes | bytearray | memoryview = b"\n") -> bytes: ...
async def read(self, n: int = -1) -> bytes: ...
async def readexactly(self, n: int) -> bytes: ...
def __aiter__(self) -> Self: ...
async def __anext__(self) -> bytes: ...

Some files were not shown because too many files have changed in this diff Show More