diff --git a/botkit/__init__.py b/botkit/__init__.py
index 6027023..ec26c6a 100644
--- a/botkit/__init__.py
+++ b/botkit/__init__.py
@@ -4,8 +4,12 @@
from importlib.metadata import version
+from ._settings import _BotkitSettings
+
try:
__version__ = version(__name__)
except:
__version__ = None
+settings = _BotkitSettings()
+botkit_settings = settings
diff --git a/botkit/settings.py b/botkit/_settings.py
similarity index 93%
rename from botkit/settings.py
rename to botkit/_settings.py
index 892add5..769927f 100644
--- a/botkit/settings.py
+++ b/botkit/_settings.py
@@ -21,7 +21,7 @@ class _BotkitSettings:
# region Callback manager
- callback_manager_qualifier: Literal["memory", "redis"] = "memory"
+ callback_store_qualifier: Literal["memory", "redis"] = "memory"
"""
Qualifier key of the kind of callback manager to be used. Should be "memory" for an in-memory store (without
persistence) and "redis" if you have the `redis_collections` package installed.
@@ -60,6 +60,3 @@ def log_level(self, value: Optional[str]) -> None:
self._current_log_level = value
# endregion
-
-
-botkit_settings = _BotkitSettings()
diff --git a/botkit/agnostic/_pyrogram_update_type_inference.py b/botkit/agnostic/_pyrogram_update_type_inference.py
index a5a1501..075e935 100644
--- a/botkit/agnostic/_pyrogram_update_type_inference.py
+++ b/botkit/agnostic/_pyrogram_update_type_inference.py
@@ -6,7 +6,7 @@
from boltons.iterutils import flatten
from pyrogram.handlers.handler import Handler
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.utils.typed_callable import TypedCallable
PYROGRAM_UPDATE_TYPES: Dict[Type[pyrogram.types.Update], UpdateType] = {
diff --git a/botkit/agnostic/annotations.py b/botkit/agnostic/annotations.py
index 94a0716..9fbed8f 100644
--- a/botkit/agnostic/annotations.py
+++ b/botkit/agnostic/annotations.py
@@ -10,7 +10,7 @@
)
from botkit.agnostic.library_checks import is_installed
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
if TYPE_CHECKING:
from botkit.clients.client import IClient
diff --git a/botkit/agnostic/pyrogram_chat_resolver.py b/botkit/agnostic/pyrogram_chat_resolver.py
index 0572ff7..f670f3c 100644
--- a/botkit/agnostic/pyrogram_chat_resolver.py
+++ b/botkit/agnostic/pyrogram_chat_resolver.py
@@ -1,15 +1,26 @@
-from typing import Pattern, cast
+import asyncio
+from datetime import timedelta
+from typing import (
+ Optional,
+ Pattern,
+ cast,
+)
+from pyrogram.errors import BotMethodInvalid
+
+from botkit.tghelpers.names import display_name
from botkit.utils.botkit_logging.setup import create_logger
+from tgtypes.interfaces.resolvercache import IResolverCache
+from tgtypes.persistence.json_file_resolver_cache import JsonFileResolverCache
+from tgtypes.utils.debounce import DebouncedTask
+from tgtypes.primitives import Username
from tgtypes.identities.chat_identity import ChatIdentity, ChatType
from tgtypes.interfaces.chatresolver import IChatResolver
-from tgtypes.primitives import Username
-from tgtypes.utils.async_lazy_dict import AsyncLazyDict
try:
# TODO: Turn this into a contextmanager, `with lib_check('Pyrogram'): import ...`
from pyrogram import Client as PyrogramClient
- from pyrogram.types import Message, User
+ from pyrogram.types import Chat, Message, User
except ImportError as e:
raise ImportError(
"The Pyrogram library does not seem to be installed, so using Botkit in Pyrogram flavor is not possible. "
@@ -19,30 +30,58 @@
class PyrogramChatResolver(IChatResolver):
- def __init__(self, client: PyrogramClient):
+ def __init__(self, client: PyrogramClient, cache: Optional[IResolverCache] = None):
self.client = client
- self.context = AsyncLazyDict()
+ self.cache: IResolverCache = cache or JsonFileResolverCache()
+ self._iter_dialogs_lock = asyncio.Lock()
+ self._save_func = DebouncedTask(
+ lambda: self.cache.dump_data(), delta=timedelta(seconds=20), num_runs=3
+ )
async def resolve_chat_by_username(self, username: Username) -> ChatIdentity:
- chat = await self.context.setdefault_lazy("chat", self.client.get_chat(username))
+ await self.cache.ensure_initialized()
+ chat = await self.cache.setdefault_lazy("chat", self.client.get_chat(username))
return ChatIdentity(type=cast(ChatType, chat.type), peers=chat.id)
async def resolve_chat_by_chat_id(self, chat_id: int) -> ChatIdentity:
- chat = await self.context.setdefault_lazy("chat", self.client.get_chat(chat_id))
+ await self.cache.ensure_initialized()
+ chat = await self.cache.setdefault_lazy("chat", self.client.get_chat(chat_id))
return ChatIdentity(type=cast(ChatType, chat.type), peers=chat.id)
async def resolve_chat_by_title_regex(self, title_regex: Pattern) -> ChatIdentity:
- LIMIT = 1000
-
- async for d in self.client.iter_dialogs(limit=LIMIT):
- # noinspection PyUnboundLocalVariable
- if (
- (chat := getattr(d, "chat", None))
- and (title := getattr(chat, "title", None))
- and title_regex.match(title)
- ):
- return ChatIdentity(type=cast(ChatType, chat.type), peers=chat.id)
-
- raise ValueError(
- f"No chat found matching pattern {title_regex} in the uppermost {LIMIT} dialogs."
- )
+ await self.cache.ensure_initialized()
+ LIMIT = 500
+
+ self.cache.setdefault("identity_titles", [])
+
+ try:
+ # In order to make use of caching, disallow running multiple iter_dialogs methods concurrently
+ async with self._iter_dialogs_lock:
+ # Check cached items first
+ for ident, t in self.cache["identity_titles"]:
+ if title_regex.match(t):
+ return ident
+
+ async for d in self.client.iter_dialogs(limit=LIMIT):
+ chat: Chat = getattr(d, "chat", None)
+
+ if not chat:
+ continue
+
+ title: str = display_name(chat)
+ identity = ChatIdentity(type=cast(ChatType, chat.type), peers=chat.id)
+
+ self.cache["identity_titles"].append((identity, title))
+
+ # noinspection PyUnboundLocalVariable
+ if title_regex.match(title):
+ return identity
+
+ raise ValueError(
+ f"No chat found matching pattern {title_regex} in the uppermost {LIMIT} dialogs."
+ )
+ except BotMethodInvalid as ex:
+ raise ValueError(
+ "Method invalid: Bots cannot read chat lists and thus not match via `title_regex`. "
+ "You should resolve with a user client instead."
+ ) from ex
diff --git a/botkit/views/botkit_context.py b/botkit/botkit_context.py
similarity index 82%
rename from botkit/views/botkit_context.py
rename to botkit/botkit_context.py
index 3d4c9ca..4a1d3c1 100644
--- a/botkit/views/botkit_context.py
+++ b/botkit/botkit_context.py
@@ -3,12 +3,12 @@
from typing import Any, Generic, Iterator, Optional, TypeVar
from botkit.dispatching.types import CallbackActionType
+from botkit.views.rendered_messages import RenderedMessage
from tgtypes.identities.chat_identity import ChatIdentity
from tgtypes.identities.message_identity import MessageIdentity
-from tgtypes.update_field_extractor import UpdateFieldExtractor
-from .rendered_messages import RenderedMessage
-from ..routing.types import TViewState
-from ..routing.update_types.updatetype import UpdateType
+from botkit.dispatching.update_field_extractor import UpdateFieldExtractor
+from tgtypes.updatetype import UpdateType
+from botkit.routing.types import TViewState
TPayload = TypeVar("TPayload")
@@ -46,14 +46,14 @@ def __init__(self):
@dataclass
-class Context(Generic[TViewState, TPayload], UpdateFieldExtractor): # TODO: maybe `RouteContext`?
+class Context(UpdateFieldExtractor): # TODO: maybe `RouteContext`?
# TODO: rename to `view_state`?
# TODO: maybe this shouldn't even be part of the context but always be passed separately (because of reducers)?
update_type: UpdateType
- view_state: TViewState
+ view_state: Any
action: Optional[CallbackActionType] = None
- payload: Optional[TPayload] = None
+ payload: Optional[Any] = None
message_state: Optional[Any] = None # TODO: wtf
user_state: Optional[UserState] = None
diff --git a/botkit/builders/__init__.py b/botkit/builders/__init__.py
index 9183168..ff8f672 100644
--- a/botkit/builders/__init__.py
+++ b/botkit/builders/__init__.py
@@ -1,60 +1,17 @@
-from typing import Any, TYPE_CHECKING
-
-from haps import Container, inject
+from injector import Binder
from .callbackbuilder import CallbackBuilder
-
-if TYPE_CHECKING:
- from botkit.widgets import Widget
-
from .htmlbuilder import HtmlBuilder
from .menubuilder import MenuBuilder
from .metabuilder import MetaBuilder
-from ..persistence.callback_store import ICallbackStore
-from ..settings import botkit_settings
-from ..views.rendered_messages import RenderedMessage, RenderedTextMessage
-
-
-class ViewBuilder:
- html: HtmlBuilder
- menu: MenuBuilder
- meta: MetaBuilder
-
- def __init__(self, callback_builder: CallbackBuilder):
- self.html = HtmlBuilder(callback_builder)
- self.menu = MenuBuilder(callback_builder)
- self.meta = MetaBuilder()
-
- def add(self, widget: "Widget"):
- self.html.add(widget)
- self.menu.add(widget)
- self.meta.add(widget)
- widget.render_html(self.html)
-
- @property
- def is_dirty(self) -> bool:
- return any((x.is_dirty for x in [self.html, self.menu, self.meta]))
-
- def render(self) -> RenderedMessage:
- # TODO: implement the other message types aswell
- html_text = self.html.render_html()
- rendered_menu = self.menu.render()
- return RenderedTextMessage(
- text=html_text,
- inline_buttons=rendered_menu,
- title=self.meta.title,
- description=self.meta.description,
- )
+from .quizbuilder import QuizBuilder
+from .viewbuilder import ViewBuilder
-# def _determine_message_type(msg: RenderedMessageMarkup) -> MessageType:
-# if isinstance(msg, RenderedMessage):
-# if msg.media and msg.sticker: # keep this check updated with new values!
-# raise ValueError("Ambiguous message type.")
-# if msg.sticker:
-# return MessageType.sticker
-# elif msg.media:
-# return MessageType.media
-# return MessageType.text
-# elif isinstance(msg, RenderedPollMessage):
-# return MessageType.poll
+def configure_builders(binder: Binder) -> None:
+ binder.bind(CallbackBuilder)
+ binder.bind(QuizBuilder)
+ binder.bind(HtmlBuilder)
+ binder.bind(MenuBuilder)
+ binder.bind(MetaBuilder)
+ binder.bind(ViewBuilder)
diff --git a/botkit/builders/callbackbuilder.py b/botkit/builders/callbackbuilder.py
index 3dd06f8..f74fec2 100644
--- a/botkit/builders/callbackbuilder.py
+++ b/botkit/builders/callbackbuilder.py
@@ -1,6 +1,8 @@
from contextlib import contextmanager
from typing import Any, Literal, Optional
+from injector import NoInject, inject
+
from botkit.abstractions._named import INamed
from botkit.core.services import service
from botkit.dispatching.types import CallbackActionType
@@ -12,6 +14,7 @@
class CallbackBuilder:
_SEPARATOR = "##"
+ @inject
def __init__(self, state: TViewState, callback_store: ICallbackStore):
self.state = state
self._callback_store = callback_store
diff --git a/botkit/builders/htmlbuilder.py b/botkit/builders/htmlbuilder.py
index 3a40c07..419e934 100644
--- a/botkit/builders/htmlbuilder.py
+++ b/botkit/builders/htmlbuilder.py
@@ -1,18 +1,58 @@
-from typing import Any, Callable, NoReturn, TYPE_CHECKING, Union
+from typing import Any, Callable, List, NoReturn, TYPE_CHECKING, Union
+from injector import inject
+from botkit.builders import CallbackBuilder
from botkit.builders.text.basetextbuilder import TState
+from botkit.builders.text.htmltextbuilder import _HtmlTextBuilder
from botkit.builders.text.telegram_entity_builder import EntityBuilder
from botkit.builders.text.typographybuilder import TypographyBuilder
if TYPE_CHECKING:
from botkit.widgets import HtmlWidget
+"""
+# More ideas:
+
+- `html.desc("https://blabla", "")` --> ""
+"""
+
class HtmlBuilder(TypographyBuilder, EntityBuilder):
+ @inject
+ def __init__(self, callback_builder: CallbackBuilder = None):
+ super().__init__(callback_builder)
+
def add(self, widget: "HtmlWidget") -> "HtmlBuilder":
with self.callback_builder.scope(widget):
widget.render_html(self)
return self
+ s = _HtmlTextBuilder.strike
+ u = _HtmlTextBuilder.underline
+ i = _HtmlTextBuilder.italic
+ b = _HtmlTextBuilder.bold
+ lin = _HtmlTextBuilder.link
+
HtmlRenderer = Callable[[TState, HtmlBuilder], Union[NoReturn, Any]]
+
+
+if __name__ == "__main__":
+ from botkit.widgets import HtmlWidget
+
+ class ListView(HtmlWidget):
+ def __init__(self, items: List[Any]):
+ self.items = items
+
+ unique_name = "my_list_view"
+
+ def render_html(self, html: HtmlBuilder):
+ html.list(self.items)
+
+ html = HtmlBuilder(None)
+
+ html.add(ListView(["henlo", "fren", "waddup"]))
+
+ html("I am ").b("testing")
+
+ print(html.render_html())
diff --git a/botkit/builders/text/basetextbuilder.py b/botkit/builders/text/basetextbuilder.py
index 5148532..bf66bd0 100644
--- a/botkit/builders/text/basetextbuilder.py
+++ b/botkit/builders/text/basetextbuilder.py
@@ -2,18 +2,19 @@
from haps import Container
from typing import Any, Optional, TypeVar
+from injector import inject
from pyrogram.parser import Parser
from pyrogram.types.messages_and_media.message import Str
from botkit.builders.callbackbuilder import CallbackBuilder
from botkit.persistence.callback_store import ICallbackStore
-from botkit.settings import botkit_settings
TState = TypeVar("TState")
class BaseTextBuilder:
- def __init__(self, callback_builder: CallbackBuilder): # TODO: make non-optional
+ @inject
+ def __init__(self, callback_builder: CallbackBuilder):
self.parts = []
self.callback_builder = callback_builder
@@ -32,8 +33,11 @@ def br(self, count: int = 1):
self.parts.append("\n" * count)
return self
+ def as_para(cls):
+ return "\n\n"
+
def para(self):
- self.parts.append("\n\n")
+ self.parts.append(self.as_para())
return self
def _append(self, text: str):
diff --git a/botkit/builders/text/emoji.py b/botkit/builders/text/emoji.py
index 5c27c82..c90064b 100644
--- a/botkit/builders/text/emoji.py
+++ b/botkit/builders/text/emoji.py
@@ -1274,6 +1274,8 @@
}
aliases_unicode = {
+ ":duck:": "🦆",
+ ":crossed_fingers:": "🤞",
":turtle:": "🐢",
":bike:": "🚲",
":family_mwg:": "👨👩👧",
@@ -6373,7 +6375,7 @@ def contains_emoji(text: str):
return False
-def replace_aliases(sentence: str):
+def replace_emoji_aliases(sentence: str):
pattern = r"(:[A-Za-z0-9_-]+:)"
matches = re.search(pattern, sentence)
if matches is None:
diff --git a/botkit/builders/text/htmltextbuilder.py b/botkit/builders/text/htmltextbuilder.py
index fe53314..414bcb6 100644
--- a/botkit/builders/text/htmltextbuilder.py
+++ b/botkit/builders/text/htmltextbuilder.py
@@ -32,6 +32,13 @@ def _wrap_and_escape(cls, text: str, tag: str, if_: bool = True) -> str:
return cls._wrap_html(cls.as_escaped_html(text), tag, if_)
# endregion internals
+ def __call__(self, *args):
+ if len(args) == 0:
+ self.spc()
+ else:
+ for a in args:
+ self.raw(a)
+ return self
def text(self, text: str, end=""):
return self._append_with_end(self.as_escaped_html(text), end)
@@ -68,6 +75,13 @@ def bold_and_underline(self, text: str, end=""):
def as_bold_and_underline(cls, text: str, end="") -> str:
return cls._apply_end(f"{cls.as_escaped_html(text)}", end)
+ def bold_and_italic(self, text: str, end=""):
+ return self._append(self.as_bold_and_italic(text=text, end=end))
+
+ @classmethod
+ def as_bold_and_italic(cls, text: str, end="") -> str:
+ return cls._apply_end(f"{cls.as_escaped_html(text)}", end)
+
@classmethod
def as_mono(cls, text: str, end="", if_: bool = True) -> str:
return cls.as_code(text, end, if_)
@@ -110,5 +124,5 @@ def as_link(cls, text: str, href: str, end="") -> str:
return cls._apply_end(html, end)
def link(self, text: str, href: str, end=""):
- html = f'{self.as_escaped_html(text)}'
+ html = f'{text}'
return self._append_with_end(html, end)
diff --git a/botkit/builders/text/iconographybuilder.py b/botkit/builders/text/iconographybuilder.py
index 61d88bc..91607e3 100644
--- a/botkit/builders/text/iconographybuilder.py
+++ b/botkit/builders/text/iconographybuilder.py
@@ -1,26 +1,31 @@
from typing import Optional
from botkit.builders.text.basetextbuilder import BaseTextBuilder
-from botkit.builders.text.emoji import replace_aliases
+from botkit.builders.text.emoji import replace_emoji_aliases
class Iconography:
ZERO_WIDTH_WHITESPACE = "\xad"
EMOJI_NUMBERS = "0️⃣1️⃣2️⃣3️⃣4️⃣5️⃣6️⃣7️⃣8️⃣9️⃣"
+ EMOJI_SPACE = " "
class IconographyBuilder(BaseTextBuilder):
def emoji_spc(self):
""" Renders the horizontal width of an emoji as two `en` whitespace characters (U+2002) """
- self.parts.append(" ")
+ self.parts.append(Iconography.EMOJI_SPACE)
return self
def zero_width_whitespace_1(self):
self.parts.append(Iconography.ZERO_WIDTH_WHITESPACE)
return self
+ @classmethod
+ def as_emojized(cls, alias: str):
+ return replace_emoji_aliases(alias)
+
def emojize(self, alias: str):
- self.parts.append(replace_aliases(alias))
+ self.parts.append(replace_emoji_aliases(alias))
return self
def dash_long(self, end: Optional[str] = " "):
diff --git a/botkit/builders/text/telegram_entity_builder.py b/botkit/builders/text/telegram_entity_builder.py
index eeea47e..2565b63 100644
--- a/botkit/builders/text/telegram_entity_builder.py
+++ b/botkit/builders/text/telegram_entity_builder.py
@@ -6,8 +6,10 @@
from botkit.builders.text.htmltextbuilder import _HtmlTextBuilder
from botkit.routing.triggers import ActionIdType
-from botkit.tghelpers.direct_links import direct_link, direct_link_user
+from botkit.tghelpers.direct_links import direct_link, direct_link_with_invite, direct_link_user
from botkit.tghelpers.names import display_name
+from tgtypes.protocols.chat import Chat
+from tgtypes.protocols.user import User
if TYPE_CHECKING:
from botkit.clients.client import IClient
@@ -17,20 +19,28 @@
class EntityBuilder(_HtmlTextBuilder):
@classmethod
- def as_user(cls, user: Any):
+ def as_user(cls, user: User):
link = cls.as_link(display_name(user), direct_link_user(user))
return f"{BUST_IN_SILHOUETTE} {link}"
- def user(self, user: Any, end: Optional[str] = " "):
+ def user(self, user: User, end: Optional[str] = " "):
return self._append_with_end(self.as_user(user=user), end)
@classmethod
- def as_chat(cls, user: Any):
- link = cls.as_link(display_name(user), direct_link_user(user))
+ def as_chat(cls, chat: Chat):
+ link = cls.as_link(display_name(chat), direct_link(chat))
+ return f"{BUSTS_IN_SILHOUETTE} {link}"
+
+ def chat(self, chat: Chat, end: Optional[str] = " "):
+ return self._append_with_end(self.as_chat(chat=chat), end)
+
+ @classmethod
+ def as_peer(cls, peer: Any):
+ link = cls.as_link(display_name(peer), direct_link(peer))
return f"{BUSTS_IN_SILHOUETTE} {link}"
- def chat(self, user: Any, end: Optional[str] = " "):
- return self._append_with_end(self.as_chat(user=user), end)
+ def peer(self, peer: Any, end: Optional[str] = " "):
+ return self._append_with_end(self.as_peer(peer=peer), end)
@classmethod
def as_command(cls, name: str, to_lower: bool = False):
diff --git a/botkit/builders/text/typographybuilder.py b/botkit/builders/text/typographybuilder.py
index 23c6734..050c16c 100644
--- a/botkit/builders/text/typographybuilder.py
+++ b/botkit/builders/text/typographybuilder.py
@@ -18,17 +18,24 @@ class TypographyBuilder(_HtmlTextBuilder, IconographyBuilder):
icons = IconSettings
def h1(self, title: str, breaks: int = 2):
- self.parts.append(f"▶️ ")
- self.bold(title)
+ self.br()
+ self.bold_and_underline(title)
self.br(breaks)
return self
def h2(self, title: str, breaks: int = 2):
- self.bold_and_underline(title)
+ self.br()
+ self.parts.append(f"▶️ ")
+ self.bold(title)
self.br(breaks)
return self
def h3(self, title: str):
+ self.br()
+ self.underline(title)
+ return self
+
+ def h4(self, title: str):
self.bold(title.upper(), end=" — ")
return self
@@ -36,6 +43,9 @@ def desc(self, key: str, value: str):
self.bold(key.rstrip(": "), end=": ").raw(value).br()
return self
+ def link_info(self, name_to_upper: str, link: str, desc: str):
+ return self.bold(self.as_link(name_to_upper.upper(), link)).raw(" ℹ️ ").text(desc).br()
+
def headline(self, title: str, level: int):
if level == 1:
return self.h1(title)
diff --git a/botkit/builders/viewbuilder.py b/botkit/builders/viewbuilder.py
new file mode 100644
index 0000000..91e0472
--- /dev/null
+++ b/botkit/builders/viewbuilder.py
@@ -0,0 +1,34 @@
+from botkit.builders import CallbackBuilder, HtmlBuilder, MenuBuilder, MetaBuilder
+from botkit.views.rendered_messages import RenderedMessage, RenderedTextMessage
+
+
+class ViewBuilder:
+ html: HtmlBuilder
+ menu: MenuBuilder
+ meta: MetaBuilder
+
+ def __init__(self, callback_builder: CallbackBuilder):
+ self.html = HtmlBuilder(callback_builder)
+ self.menu = MenuBuilder(callback_builder)
+ self.meta = MetaBuilder()
+
+ def add(self, widget: "Widget"):
+ self.html.add(widget)
+ self.menu.add(widget)
+ self.meta.add(widget)
+ widget.render_html(self.html)
+
+ @property
+ def is_dirty(self) -> bool:
+ return any((x.is_dirty for x in [self.html, self.menu, self.meta]))
+
+ def render(self) -> RenderedMessage:
+ # TODO: implement the other message types aswell
+ html_text = self.html.render_html()
+ rendered_menu = self.menu.render()
+ return RenderedTextMessage(
+ text=html_text,
+ inline_buttons=rendered_menu,
+ title=self.meta.title,
+ description=self.meta.description,
+ )
diff --git a/botkit/builtin_modules/system/status_pings.py b/botkit/builtin_modules/system/status_pings.py
index 37e318c..c057835 100644
--- a/botkit/builtin_modules/system/status_pings.py
+++ b/botkit/builtin_modules/system/status_pings.py
@@ -29,6 +29,7 @@ def __init__(
self.log_chat = log_chat
self.environment = environment
self.client = client
+ self.expect_msg_in_last: int = 30
self.ping_interval: int = 6
self.reactivate_after_seconds: int = 20 # minimum 20
self.last_sent_ping: Optional[Ping] = None
@@ -135,7 +136,7 @@ def has_higher_priority(self, env: str, compare_to: str) -> Optional[bool]:
async def query_most_recent_ping(self) -> Optional[Ping]:
found = None
- async for m in self.client.iter_history(self.log_chat, limit=100):
+ async for m in self.client.iter_history(self.log_chat, limit=self.expect_msg_in_last):
if not m or not m.text:
continue
if not m.text.startswith("{"):
diff --git a/botkit/builtin_modules/system/system_tests.py b/botkit/builtin_modules/system/system_tests.py
index 38675b4..d12938f 100644
--- a/botkit/builtin_modules/system/system_tests.py
+++ b/botkit/builtin_modules/system/system_tests.py
@@ -9,9 +9,10 @@
from botkit.persistence.callback_store import ICallbackStore
from botkit.routing.route import RouteDefinition, RouteHandler
from botkit.routing.route_builder.builder import RouteBuilder
-from botkit.routing.update_types.updatetype import UpdateType
-from botkit.settings import botkit_settings
+from tgtypes.updatetype import UpdateType
+from botkit import botkit_settings
from botkit.clients.client import IClient
+from injector import inject
def notests(func):
@@ -22,18 +23,14 @@ def notests(func):
class SelftestModule(Module):
loader: ModuleLoader = Inject()
+ @inject
+ def __init__(self, callback_store: ICallbackStore) -> None:
+ self.callback_store = callback_store
+
def register(self, routes: RouteBuilder):
pass
async def load(self) -> None:
- try:
- Container().get_object(ICallbackStore, botkit_settings.callback_manager_qualifier)
- except Exception as ex:
- self.log.exception("Callback manager could not be instantiated.")
- if botkit_settings.callback_manager_qualifier != "memory":
- self.log.warning("Falling back to `memory` callback manager.")
- botkit_settings.callback_manager_qualifier = "memory"
-
return # TODO: implement
for m in self.loader.modules:
diff --git a/botkit/builtin_modules/system/sytem_management_module.py b/botkit/builtin_modules/system/sytem_management_module.py
index 06e15dd..0411bd5 100644
--- a/botkit/builtin_modules/system/sytem_management_module.py
+++ b/botkit/builtin_modules/system/sytem_management_module.py
@@ -8,10 +8,12 @@
from pyrogram.types import Message
from typing import Optional, List, Any, Literal, Union
-from botkit.builders import ViewBuilder
+from botkit.abstractions import IAsyncLoadUnload
+from botkit.builders.viewbuilder import ViewBuilder
from botkit.builtin_modules.system.system_tests import notests
from botkit.core.modules.activation import ModuleLoader, ModuleStatus
from botkit.agnostic.annotations import IClient
+from botkit.core.modules.activation._di import discover_async_loadable
from botkit.persistence.callback_store import (
RedisCallbackStore,
ICallbackStore,
@@ -20,9 +22,9 @@
from botkit.builtin_services.eventing import command_bus
from botkit.routing.pipelines.executionplan import SendTo
from botkit.routing.route_builder.builder import RouteBuilder
-from botkit.settings import botkit_settings
+from botkit import botkit_settings
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
log = create_logger("system_management_module", use_standard_format=False)
@@ -55,6 +57,7 @@ def __init__(
self.system_paused: bool = False
self.paused_modules: Optional[List[Module]] = None
+ self.paused_async_loadables: Optional[List[IAsyncLoadUnload]] = None
def register(self, routes: RouteBuilder):
command_bus.register(_ToggleSystemStateCommandHandler(self))
@@ -123,19 +126,32 @@ async def handle_pause_command(self, _client, message: Message):
await message.reply("Bot paused.")
async def pause_system(self):
- loaded_modules = [
+ loaded_modules: List[Module] = [
x
for x in self.module_loader.modules
if self.module_loader.get_module_status(x) == ModuleStatus.active
and not isinstance(x, type(self))
]
+ async_loadables: List[IAsyncLoadUnload] = list(discover_async_loadable(Container()))
+ to_unload = [x for x in async_loadables if x not in self.module_loader.modules]
+ unload_tasks = [x.unload() for x in to_unload]
+
+ # TODO: remove debug check
+ print("remove debug check")
+ assert len(async_loadables) != len(to_unload)
+
self.log.info(
f"Pausing modules:\n" + "\n".join([m.get_name() for m in loaded_modules]) + "\n..."
)
tasks = [self.module_loader.deactivate_module_async(m) for m in loaded_modules]
+
+ self.log.info("Unloading services:\n" + "\n".join([str(m) for m in to_unload]) + "\n...")
+ tasks.extend(unload_tasks)
+
await asyncio.gather(*tasks, return_exceptions=True)
self.system_paused = True
self.paused_modules = loaded_modules
+ self.paused_async_loadables = async_loadables
try:
callback_manager: RedisCallbackStore = Container().get_object(ICallbackStore, "redis")
diff --git a/botkit/components/questionnaire.py b/botkit/components/questionnaire.py
index 514297e..3902eb9 100644
--- a/botkit/components/questionnaire.py
+++ b/botkit/components/questionnaire.py
@@ -5,7 +5,7 @@
from botkit.core.components import Component
from botkit.routing.route_builder.builder import RouteBuilder
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
class Questionnaire(BaseModel):
diff --git a/botkit/configuration/client_config.py b/botkit/configuration/client_config.py
index 0396c51..ee678da 100644
--- a/botkit/configuration/client_config.py
+++ b/botkit/configuration/client_config.py
@@ -1,11 +1,9 @@
from dataclasses import field
-from dataclasses import field
from enum import Enum
from pathlib import Path
from typing import Any, Dict, Optional, Type, Union, cast
from boltons.strutils import slugify
-from haps import base
from pydantic import DirectoryPath, FilePath, constr, root_validator, validator
from pydantic.dataclasses import dataclass
@@ -30,8 +28,10 @@ def as_kwargs(self) -> Dict:
PhoneNumber = Union[int, constr(regex=r"^[+]*[(]{0,1}[0-9]{1,4}[)]{0,1}[-\s\./0-9]*$")]
-@dataclass
+@dataclass(unsafe_hash=True)
class ClientConfig:
+ name: str
+
client_type: ClientType
flavor: SupportedLibraryName
@@ -102,12 +102,12 @@ def full_session_path(self) -> Optional[FilePath]:
@property
def description(self) -> str:
- result = f"{self.flavor} {self.client_type.value} client "
+ result = f"a {self.flavor.title()} {self.client_type.value} client "
if self.session_string:
- result += "using a string session"
+ result += "with a string session"
elif self.session_file:
- result += f"using session file {self.session_file}"
+ result += f"with session file '{self.session_file}'"
return result
@property
diff --git a/botkit/core/components.py b/botkit/core/components.py
index 3cf1141..e4baccc 100644
--- a/botkit/core/components.py
+++ b/botkit/core/components.py
@@ -6,7 +6,7 @@
from botkit.abstractions import IAsyncLoadUnload, IRegisterable
from botkit.routing.types import TViewState
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
# TODO: make sure components get properly destroyed/garbage collected when they're not needed anymore
# TODO: components can only have parameterless constructor..???
diff --git a/botkit/core/modules/activation/__init__.py b/botkit/core/modules/activation/__init__.py
index 796210b..1ec57ec 100644
--- a/botkit/core/modules/activation/__init__.py
+++ b/botkit/core/modules/activation/__init__.py
@@ -1,13 +1,24 @@
-from ._di import haps_disambiguate_module_eggs, resolve_modules
+from typing import List
+
+from haps.container import Container
+from ._di import discover_modules, haps_disambiguate_module_eggs
from ._module_activator import ModuleActivator
from ._module_loader import ModuleLoader
from ._module_status import ModuleStatus
+from injector import Binder
+
+from .. import Module
+
+
+def configure_module_activation(binder: Binder):
+ binder.multibind(List[Module], lambda: list(discover_modules(Container())))
+
haps_disambiguate_module_eggs()
__all__ = [
+ "configure_module_activation",
"haps_disambiguate_module_eggs",
- "resolve_modules",
"ModuleLoader",
"ModuleActivator",
"ModuleStatus",
diff --git a/botkit/core/modules/activation/_di.py b/botkit/core/modules/activation/_di.py
index 5221fa2..6f2985b 100644
--- a/botkit/core/modules/activation/_di.py
+++ b/botkit/core/modules/activation/_di.py
@@ -1,12 +1,11 @@
from typing import Iterable, List
-
-
from haps import Container, Egg, SINGLETON_SCOPE, egg
-from haps.config import Configuration
+from botkit.abstractions import IAsyncLoadUnload
from botkit.core.modules._module import Module
from botkit.utils.botkit_logging.setup import create_logger
+from injector import Binder, Provider, inject, provider, Module, Injector, multiprovider, singleton
logger = create_logger()
@@ -26,19 +25,25 @@ def haps_disambiguate_module_eggs() -> List[Egg]:
return eggs
-@Configuration.resolver("modules")
-def resolve_modules() -> List[Module]:
- return list(discover_modules(Container()))
+def discover_modules(container: Container) -> List[Module]:
+ haps_eggs: Iterable[Egg] = [m for m in container.config if m.base_ is Module]
+ for e in haps_eggs:
+ try:
+ with container._lock:
+ scope = container.scopes[SINGLETON_SCOPE]
+ yield scope.get_object(e.egg)
+ except:
+ logger.exception("Could not retrieve object from scope")
-def discover_modules(container: Container) -> Iterable[Module]:
- eggs: Iterable[Egg] = [m for m in container.config if m.base_ is Module]
+def discover_async_loadable(container: Container) -> Iterable[IAsyncLoadUnload]:
+ eggs: Iterable[Egg] = [m for m in container.config if IAsyncLoadUnload in m.base_.__bases__]
for e in eggs:
try:
- scope = container.scopes[SINGLETON_SCOPE]
with container._lock:
+ scope = container.scopes[SINGLETON_SCOPE]
yield scope.get_object(e.egg)
except:
logger.exception("Could not retrieve object from scope")
diff --git a/botkit/core/modules/activation/_hmr.py b/botkit/core/modules/activation/_hmr.py
index 6ca0453..cc0e730 100644
--- a/botkit/core/modules/activation/_hmr.py
+++ b/botkit/core/modules/activation/_hmr.py
@@ -53,7 +53,7 @@ def __init__(self):
self._worker_future: Optional[Future] = None
self.log = create_logger("hmr")
- def start(self, modules: Iterable[Module]):
+ def start(self, modules: List[Module]):
if self._worker_future:
self._worker_future.cancel()
self._worker_future = asyncio.ensure_future(self.__run(modules))
@@ -61,7 +61,7 @@ def start(self, modules: Iterable[Module]):
def reload_module(self, module: Module) -> None:
pass
- async def __run(self, modules: Iterable[Module]) -> None:
+ async def __run(self, modules: List[Module]) -> None:
modules: List[Module] = list(modules)
try:
# module_files = self._get_module_dependencies(modules)
@@ -130,7 +130,7 @@ async def __run(self, modules: Iterable[Module]) -> None:
self.log.exception("Error in HMR worker.")
@classmethod
- def _get_module_dependencies(cls, modules: Iterable[Module]) -> Dict[Module, Set[str]]:
+ def _get_module_dependencies(cls, modules: List[Module]) -> Dict[Module, Set[str]]:
module_files: Dict[Module, Set[str]] = {}
for module in modules:
diff --git a/botkit/core/modules/activation/_module_activator.py b/botkit/core/modules/activation/_module_activator.py
index 3de3d68..ca02d3d 100644
--- a/botkit/core/modules/activation/_module_activator.py
+++ b/botkit/core/modules/activation/_module_activator.py
@@ -9,7 +9,7 @@
from botkit.routing.route_builder.builder import RouteBuilder
from botkit.routing.route_builder.expressions import RouteBuilderContext
from botkit.routing.route_builder.route_collection import RouteCollection
-from botkit.settings import botkit_settings
+from botkit import botkit_settings
from botkit.core.modules._module import Module
from ._module_status import ModuleStatus
@@ -18,7 +18,9 @@
@egg
@scope(SINGLETON_SCOPE)
class ModuleActivator:
- def __init__(self, dispatcher: BotkitDispatcher = None):
+ def __init__(
+ self, dispatcher: BotkitDispatcher = None,
+ ):
self.dispatcher = dispatcher or Container().get_object(BotkitDispatcher)
self.route_builder_class: Type[
RouteBuilder
diff --git a/botkit/core/modules/activation/_module_loader.py b/botkit/core/modules/activation/_module_loader.py
index f229cc7..c249f64 100644
--- a/botkit/core/modules/activation/_module_loader.py
+++ b/botkit/core/modules/activation/_module_loader.py
@@ -1,15 +1,17 @@
import asyncio
from asyncio import CancelledError
+from dataclasses import dataclass
from typing import Callable, Coroutine, Dict, Iterable, List, Optional
from haps import Container, Inject
-from haps.config import Configuration
+from injector import Binder, inject
-from botkit.builtin_services.options.base import IOptionStore
+from botkit.abstractions import IAsyncLoadUnload
+from botkit.core.modules._module import Module
from botkit.core.services import service
from botkit.dispatching.dispatcher import BotkitDispatcher
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.core.modules._module import Module
+from ._di import discover_modules
from ._hmr import HotModuleReloadWorker
from ._module_activator import ModuleActivator
from ._module_status import ModuleStatus
@@ -26,6 +28,7 @@
"Html5GameModule",
# end
"NotionCollectorModule",
+ "ImageModule",
]
@@ -35,10 +38,12 @@ class ModuleLoader:
activator: ModuleActivator = Inject()
_hmr_worker: HotModuleReloadWorker = Inject()
- def __init__(self) -> None:
+ @inject
+ def __init__(self, discovered_modules: List[Module]) -> None:
self.log = create_logger("module_loader")
- discovered_modules: List[Module] = Configuration().get_var("modules")
+ # c = Container()
+ # discovered_modules: List[Module] = list(discover_modules(c))
self.log.debug(f"{len(discovered_modules)} modules discovered.")
self.__module_statuses: Dict[Module, ModuleStatus] = {
@@ -47,11 +52,11 @@ def __init__(self) -> None:
}
@property
- def modules(self) -> Iterable[Module]:
+ def modules(self) -> List[Module]:
return self.__module_statuses.keys()
@property
- def active_modules(self) -> Iterable[Module]:
+ def active_modules(self) -> List[Module]:
return (m for m, s in self.__module_statuses.items() if s == ModuleStatus.active)
def add_module_without_activation(self, module: Module) -> None:
@@ -118,7 +123,7 @@ async def deactivate_module_async(self, module: Module):
self.__module_statuses[module] = ModuleStatus.inactive
-# def run_validation_experiment(modules: Iterable[Module]):
+# def run_validation_experiment(modules: List[Module]):
# render_funcs = get_view_renderers(modules)
#
# for view in render_funcs:
@@ -145,7 +150,7 @@ async def deactivate_module_async(self, module: Module):
# log.exception("lala", stacklevel=1)
-def get_view_renderers(modules: Iterable[Module]) -> Iterable[Callable]:
+def get_view_renderers(modules: List[Module]) -> Iterable[Callable]:
for m in modules:
if not m.route_collection:
continue
diff --git a/botkit/core/services/_decorator.py b/botkit/core/services/_decorator.py
index f77c518..eaf280b 100644
--- a/botkit/core/services/_decorator.py
+++ b/botkit/core/services/_decorator.py
@@ -1,12 +1,12 @@
-from haps import base, egg, SINGLETON_SCOPE, scope as haps_scope
+from typing import Any, TypeVar
+
import decorators
-from typing import Any, Callable, TypeVar, no_type_check, no_type_check_decorator, overload
+from haps import SINGLETON_SCOPE, base, egg, scope as haps_scope
T = TypeVar("T")
-class _ServiceDecorator(decorators.ClassDecorator):
-
+class _ServiceDecorator(decorators.Decorator):
"""
Decorator for marking a class as an injectable service.
Defaults to SINGLETON_SCOPE as opposed to INSTANCE_SCOPE.
@@ -28,7 +28,15 @@ class MyService: ...
```
"""
- def decorate(self, klass, scope=SINGLETON_SCOPE, **kwargs) -> Any:
+ def decorate_func(self, func, scope=SINGLETON_SCOPE, *decorator_args, **decorator_kwargs):
+ base(func)
+ egg(func)
+ haps_scope(scope)(func)
+ return func
+
+ def decorate_class(
+ self, klass, scope=SINGLETON_SCOPE, *decorator_args, **decorator_kwargs
+ ) -> Any:
base(klass)
egg(klass)
haps_scope(scope)(klass)
diff --git a/botkit/core/services/_decorator.pyi b/botkit/core/services/_decorator.pyi
deleted file mode 100644
index 2ce0bb5..0000000
--- a/botkit/core/services/_decorator.pyi
+++ /dev/null
@@ -1,23 +0,0 @@
-from haps import base, egg, scope as haps_scope
-from typing import Any, Callable, TypeVar, overload
-
-_F = TypeVar("_F", bound=Any)
-
-
-@overload
-def service(class_: _F) -> _F:
- ...
-
-
-@overload
-def service(*, mode: str) -> Callable[[_F], _F]:
- ...
-
-
-
-@service(mode="abc")
-class Lala:
- x: int = 3
-
-
-x: Lala = Lala()
diff --git a/botkit/core/startup.py b/botkit/core/startup.py
index 2950db3..2bb5791 100644
--- a/botkit/core/startup.py
+++ b/botkit/core/startup.py
@@ -6,10 +6,12 @@
from haps import Inject, base
from haps.application import Application
+from injector import Injector, inject
from pyrogram import Client as PyrogramClient
from botkit.configuration import ClientConfig
-from botkit.core.modules.activation import ModuleLoader
+from botkit.core.modules import activation
+from botkit.core.modules.activation import ModuleLoader, configure_module_activation
from botkit.clients.client import IClient
from botkit.utils.botkit_logging.setup import create_logger
@@ -30,13 +32,15 @@
@base
class Startup(Application, ABC):
- module_loader: ModuleLoader = Inject()
-
- def __init__(self, clients: List[Client]):
+ def __init__(self, clients: List[Client], module_loader: ModuleLoader = None):
if not clients:
raise ValueError("Must pass at least one client for initialization.")
self.clients = clients
+ self.module_loader = module_loader or Injector(configure_module_activation).get(
+ ModuleLoader
+ )
+
self.log = create_logger("startup")
@abstractmethod
@@ -51,11 +55,15 @@ async def _start_clients(self):
start_tasks = (self.__start_client(c) for c in self.clients)
await asyncio.gather(*start_tasks)
+ async def _stop_clients(self):
+ self.log.debug("Starting clients...")
+ start_tasks = (self.__stop_client(c) for c in self.clients)
+ await asyncio.gather(*start_tasks)
+
async def __start_client(self, client: Union[IClient, Any]):
# TODO(XXX): This forces the client instances to have a `config` property, which is not reflected in `IClient`.
- self.log.debug(f"Starting {client.__class__.__name__}, {client.config.description}...")
- kwargs =client.config.start_kwargs()
- self.log.warning(kwargs)
+ self.log.info(f"Starting {client.__class__.__name__}, {client.config.description}...")
+ kwargs = client.config.start_kwargs()
await client.start(**kwargs)
me = await client.get_me()
@@ -64,6 +72,11 @@ async def __start_client(self, client: Union[IClient, Any]):
self.log.info(f"Started {user_or_display_name(me)} as {client.__class__.__name__}.")
+ async def __stop_client(self, client: Union[IClient, Any]):
+ self.log.info(f"Stopping {client.__class__.__name__}, {client.config.description}...")
+ await client.stop()
+ self.log.info(f"Stopped {client.__class__.__name__}.")
+
def run(self, loop: AbstractEventLoop = None) -> None:
self.log.debug("Initializing...")
loop = loop or asyncio.get_event_loop()
diff --git a/botkit/dispatching/callbackqueryactiondispatcher.py b/botkit/dispatching/callbackqueryactiondispatcher.py
index 46e7348..61f561b 100644
--- a/botkit/dispatching/callbackqueryactiondispatcher.py
+++ b/botkit/dispatching/callbackqueryactiondispatcher.py
@@ -13,10 +13,10 @@
from botkit.persistence.callback_store import CallbackActionContext, ICallbackStore
from botkit.routing.route import RouteHandler
from botkit.routing.triggers import ActionIdType
-from botkit.routing.update_types.updatetype import UpdateType
-from botkit.settings import botkit_settings
+from tgtypes.updatetype import UpdateType
+from botkit import botkit_settings
from botkit.clients.client import IClient
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
from botkit.widgets import Widget
@@ -92,7 +92,7 @@ async def handle(self, client: IClient, callback_query: CallbackQuery) -> Union[
@cached_property
def callback_manager(self) -> ICallbackStore:
- return Container().get_object(ICallbackStore, botkit_settings.callback_manager_qualifier)
+ return Container().get_object(ICallbackStore, botkit_settings.callback_store_qualifier)
async def _get_context_or_respond(
self, callback_query: CallbackQuery
diff --git a/botkit/dispatching/deeplinkstartactiondispatcher.py b/botkit/dispatching/deeplinkstartactiondispatcher.py
index ee039dc..358bd50 100644
--- a/botkit/dispatching/deeplinkstartactiondispatcher.py
+++ b/botkit/dispatching/deeplinkstartactiondispatcher.py
@@ -10,11 +10,11 @@
from botkit.persistence.callback_store import ICallbackStore
from botkit.routing.route import RouteHandler
from botkit.routing.triggers import ActionIdType
-from botkit.routing.update_types.updatetype import UpdateType
-from botkit.settings import botkit_settings
+from tgtypes.updatetype import UpdateType
+from botkit import botkit_settings
from botkit.clients.client import IClient
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
START_WITH_UUID4_ARG_REGEX = re.compile(r"^/start ([0-9a-f-]{36})$", re.MULTILINE)
@@ -63,4 +63,4 @@ async def handle(self, client: IClient, message: Message) -> Union[bool, Any]:
@cached_property
def callback_manager(self) -> ICallbackStore:
- return Container().get_object(ICallbackStore, botkit_settings.callback_manager_qualifier)
+ return Container().get_object(ICallbackStore, botkit_settings.callback_store_qualifier)
diff --git a/botkit/dispatching/dispatcher.py b/botkit/dispatching/dispatcher.py
index 77b3759..6ccce64 100644
--- a/botkit/dispatching/dispatcher.py
+++ b/botkit/dispatching/dispatcher.py
@@ -9,7 +9,7 @@
from botkit.core.modules import Module
from botkit.dispatching.deeplinkstartactiondispatcher import DeepLinkStartActionDispatcher
from botkit.routing.route import RouteHandler
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.clients.client import IClient
from botkit.utils.botkit_logging.setup import create_logger
diff --git a/botkit/dispatching/scopes.py b/botkit/dispatching/scopes.py
new file mode 100644
index 0000000..f5fa1d2
--- /dev/null
+++ b/botkit/dispatching/scopes.py
@@ -0,0 +1,9 @@
+from injector import Scope, ScopeDecorator
+
+
+class PerUpdateScope(Scope):
+ def get(self, key, provider):
+ return provider
+
+
+per_update = ScopeDecorator(PerUpdateScope)
diff --git a/botkit/dispatching/update_field_extractor.py b/botkit/dispatching/update_field_extractor.py
new file mode 100644
index 0000000..bb0a46a
--- /dev/null
+++ b/botkit/dispatching/update_field_extractor.py
@@ -0,0 +1,135 @@
+import re
+import traceback
+from botkit.tghelpers.entities.message_entities import ParsedEntity, MessageEntityType
+from dataclasses import dataclass
+from typing import *
+
+import pyrogram.types
+from pyrogram.types import Update
+
+from tgtypes.protocols.chat import Chat
+from tgtypes.identities.chat_identity import ChatIdentity
+from tgtypes.identities.message_identity import MessageIdentity
+
+from botkit.tghelpers.entities.message_entities import (
+ MessageEntityType,
+ ParsedEntity,
+ parse_entities,
+)
+from botkit.clients.client import IClient
+
+
+@dataclass
+class UpdateFieldExtractor:
+ update: Update
+ client: Union["IClient", Any]
+
+ @property
+ def chat(self) -> Optional[Chat]:
+ if hasattr(self.update, "chat"):
+ return self.update.chat
+ if message := getattr(self.update, "message", None):
+ return getattr(message, "chat", None)
+ return None
+
+ @property
+ def chat_identity(self) -> Optional[ChatIdentity]:
+ return ChatIdentity.from_chat_and_user(self.chat, self.user, self.client.own_user_id)
+
+ @property
+ def user(self) -> Optional[Chat]:
+ if isinstance(self.update, pyrogram.types.Message):
+ return self.update.from_user
+ return None
+
+ @property
+ def chat_id(self) -> Optional[int]:
+ return chat.id if (chat := self.chat) else None
+
+ @property
+ def user_id(self) -> int:
+ return user.id if (user := self.user) else None
+
+ @property
+ def message_identity(self) -> Optional[MessageIdentity]:
+ return MessageIdentity.from_update(self.update)
+
+ @property
+ def message_id(self) -> Optional[Union[int, str]]:
+ return descriptor.message_id if (descriptor := self.message_identity) else None
+
+ @property
+ def message_text(self) -> Optional[str]:
+ if hasattr(self.update, "text"):
+ return self.update.text
+
+ @property
+ def command_name(self) -> Optional[str]:
+ """
+ Returns the name of the command without the leading slash or `None` if the update is not a command.
+ """
+ if hasattr(self.update, "command"): # Pyrogram
+ return self.update.command[0]
+
+ @property
+ def command_args(self) -> Optional[List[str]]:
+ if hasattr(self.update, "command"): # Pyrogram
+ if len(self.update.command) == 1:
+ return []
+ return self.update.command[1:]
+
+ @property
+ def command_arg_str(self) -> Optional[str]:
+ """
+ Returns everything after the /command as a string.
+ """
+ return " ".join(self.command_args) if self.command_args else None
+
+ @property
+ def replied_to_message(self) -> Optional[pyrogram.types.Message]:
+ # TODO: turn into protocols
+ if isinstance(self.update, pyrogram.types.Message):
+ return self.update.reply_to_message
+
+ @property
+ def replied_to_message_text(self) -> Optional[str]:
+ if isinstance(self.update, pyrogram.types.Message):
+ if replied_to := self.update.reply_to_message:
+ return replied_to.text
+ return None
+
+ quoted = replied_to_message
+ quoted_text = replied_to_message_text
+
+ @property
+ def replied_to_message_id(self) -> Optional[int]:
+ return reply_msg.message_id if (reply_msg := self.replied_to_message) else None
+
+ @property
+ def command_args_or_quote(self) -> Optional[str]:
+ """ Prefers the command arguments over the replied-to message text, or None if neither is present. """
+ return self.command_arg_str or self.replied_to_message_text
+
+ @property
+ def matches(self) -> Optional[List[re.Match]]:
+ if hasattr(self.update, "matches"):
+ return self.update.matches
+
+ @property
+ def entities(self) -> List[ParsedEntity]:
+ try:
+ # noinspection PydanticTypeChecker
+ return parse_entities(self.update)
+ except Exception as ex:
+ traceback.print_exc(ex)
+ return []
+
+ def filter_entities(
+ self, only: Union[List[MessageEntityType], MessageEntityType]
+ ) -> List[ParsedEntity]:
+ try:
+ # noinspection PydanticTypeChecker
+ return parse_entities(self.update, types=only)
+ except Exception as ex:
+ traceback.print_exc(ex)
+ return []
diff --git a/botkit/models/_interfaces.py b/botkit/models/_interfaces.py
index 45c675d..0aaa6fe 100644
--- a/botkit/models/_interfaces.py
+++ b/botkit/models/_interfaces.py
@@ -1,6 +1,6 @@
from abc import ABC, abstractmethod
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
from typing import (
Any,
Callable,
diff --git a/botkit/models/_statemodel.py b/botkit/models/_statemodel.py
index 91cac7d..41387f2 100644
--- a/botkit/models/_statemodel.py
+++ b/botkit/models/_statemodel.py
@@ -3,7 +3,7 @@
from pydantic import BaseModel
from ._interfaces import IGatherer
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
class StateModel(BaseModel, IGatherer, ABC):
diff --git a/botkit/persistence/callback_store/__init__.py b/botkit/persistence/callback_store/__init__.py
index 14447ba..aae504a 100644
--- a/botkit/persistence/callback_store/__init__.py
+++ b/botkit/persistence/callback_store/__init__.py
@@ -1,12 +1,18 @@
-from ._base import ICallbackStore, CallbackActionContext
+from typing import Union
-# TODO: add proper try except for opt-in install of redis for callback management
-import redis_collections
+from injector import Binder, inject
-from ._redis import RedisCallbackStore
+from ._base import CallbackActionContext, ICallbackStore
from ._local import MemoryDictCallbackStore
+from ._redis import RedisCallbackStore, RedisClientUnavailableException
from ._simple import create_callback, lookup_callback
+try:
+ from redis import Redis
+except:
+ pass
+
+from botkit import botkit_settings
__all__ = [
"ICallbackStore",
@@ -15,3 +21,27 @@
"lookup_callback",
"create_callback",
]
+
+
+ran = [] # TODO: hack
+
+
+def configure_callback_store(binder: Binder) -> None:
+ @inject
+ def select_callback_store_impl(
+ redis: RedisCallbackStore, memory: MemoryDictCallbackStore
+ ) -> Union[RedisCallbackStore, MemoryDictCallbackStore]:
+ if botkit_settings.callback_store_qualifier == "redis":
+ if not ran:
+ redis.remove_outdated(botkit_settings.callbacks_ttl_days)
+ ran.append(True)
+ return redis
+
+ if botkit_settings.callback_store_qualifier == "memory":
+ return memory
+
+ return memory
+
+ binder.bind(MemoryDictCallbackStore)
+ binder.bind(RedisCallbackStore)
+ binder.bind(ICallbackStore, to=select_callback_store_impl)
diff --git a/botkit/persistence/callback_store/_base.py b/botkit/persistence/callback_store/_base.py
index 8e555af..b4aab93 100644
--- a/botkit/persistence/callback_store/_base.py
+++ b/botkit/persistence/callback_store/_base.py
@@ -10,7 +10,7 @@
from pydantic import BaseModel, Field
from botkit.dispatching.types import CallbackActionType
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
TViewState = TypeVar("TViewState")
diff --git a/botkit/persistence/callback_store/_redis.py b/botkit/persistence/callback_store/_redis.py
index a6077ed..734731f 100644
--- a/botkit/persistence/callback_store/_redis.py
+++ b/botkit/persistence/callback_store/_redis.py
@@ -1,7 +1,9 @@
import logging
from pprint import pprint
-from botkit.settings import botkit_settings
+from injector import NoInject, inject
+
+from ... import botkit_settings
from ...utils.botkit_logging.setup import create_logger
from haps import Container, SINGLETON_SCOPE, egg, scope
@@ -29,37 +31,21 @@ class RedisClientUnavailableException(Exception):
pass
-@egg("redis")
-@scope(SINGLETON_SCOPE)
-def create_redis_callback_manager() -> ICallbackStore:
- try:
- redis = Container().get_object(Redis)
- except Exception as e:
- raise RedisClientUnavailableException(
- "If `redis` is chosen as the qualifier for the botkit callback manager, "
- "you must provide an instantiated `Redis` client to the dependency "
- "injection. Refer to the `callback_manager_qualifier` setting documentation."
- ) from e
- redis_cbm = RedisCallbackStore(redis, "callbacks", maxsize=10)
- redis_cbm.remove_outdated(botkit_settings.callbacks_ttl_days)
- return redis_cbm
-
-
class RedisCallbackStore(ICallbackStore):
"""
# TODO: Try use json instead of pickled dicts? https://github.com/honzajavorek/redis-collections/issues/122
# TODO: Force pydantic models?
"""
+ @inject
def __init__(
self,
redis_client: Redis,
key: str = "callbacks",
- storage_type: Literal["lru", "normal"] = "normal",
+ storage_type: NoInject[Literal["lru", "normal"]] = "normal",
maxsize: int = 2000,
):
"""
-
:param redis_client:
:type redis_client:
:param key:
@@ -69,6 +55,7 @@ def __init__(
:param maxsize: Ignored if storage_type is "normal".
:type maxsize:
"""
+
# TODO: Add documentation that LRU should be used in production
if storage_type == "lru":
self.callbacks: LRUDict[str, Dict] = LRUDict(
diff --git a/botkit/persistence/data_store/data_store_base.py b/botkit/persistence/data_store/data_store_base.py
index 2872b8c..e6631f3 100644
--- a/botkit/persistence/data_store/data_store_base.py
+++ b/botkit/persistence/data_store/data_store_base.py
@@ -3,7 +3,7 @@
from haps import base
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
from tgtypes.identities.chat_identity import ChatIdentity
from tgtypes.identities.message_identity import MessageIdentity
diff --git a/botkit/persistence/data_store/memory_data_store.py b/botkit/persistence/data_store/memory_data_store.py
index 0192cde..d6774fe 100644
--- a/botkit/persistence/data_store/memory_data_store.py
+++ b/botkit/persistence/data_store/memory_data_store.py
@@ -4,7 +4,7 @@
from haps import SINGLETON_SCOPE, egg, scope
from botkit.persistence.data_store import DataStoreBase
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
from tgtypes.identities.chat_identity import ChatIdentity
from tgtypes.identities.message_identity import MessageIdentity
diff --git a/botkit/routing/pipelines/collector.py b/botkit/routing/pipelines/collector.py
index 91619d9..3faa265 100644
--- a/botkit/routing/pipelines/collector.py
+++ b/botkit/routing/pipelines/collector.py
@@ -1,5 +1,5 @@
from typing import Awaitable, Callable, Union
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
CollectorSignature = Callable[[Context], Union[None, Awaitable[None]]]
diff --git a/botkit/routing/pipelines/executionplan.py b/botkit/routing/pipelines/executionplan.py
index e7bd4f3..bdbb167 100644
--- a/botkit/routing/pipelines/executionplan.py
+++ b/botkit/routing/pipelines/executionplan.py
@@ -22,11 +22,11 @@
)
from botkit.routing.route_builder.types import TView
from botkit.routing.update_types.update_type_inference import infer_update_types
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.clients.client import IClient
from botkit.utils.botkit_logging.setup import create_logger
from botkit.utils.typed_callable import TypedCallable
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
from botkit.views.functional_views import ViewRenderFuncSignature
from botkit.views.views import MessageViewBase
@@ -87,7 +87,8 @@ class ViewParameters:
send_from: Optional[IClient] = None
"""
- The (user) client instance to send the response from.
+ The (user) client instance to send the response from. Will use the currently-interacting client, so this value
+ only needs to be given in special cases.
"""
send_via_bot: Optional[IClient] = None
diff --git a/botkit/routing/pipelines/factory_types.py b/botkit/routing/pipelines/factory_types.py
index 2fcc9f1..2e54c87 100644
--- a/botkit/routing/pipelines/factory_types.py
+++ b/botkit/routing/pipelines/factory_types.py
@@ -17,7 +17,7 @@
from boltons.typeutils import classproperty
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.utils.typed_callable import TypedCallable
TViewState = TypeVar("TViewState")
diff --git a/botkit/routing/pipelines/gatherer.py b/botkit/routing/pipelines/gatherer.py
index 329d251..578897e 100644
--- a/botkit/routing/pipelines/gatherer.py
+++ b/botkit/routing/pipelines/gatherer.py
@@ -3,7 +3,7 @@
from botkit.routing.types import TViewState
# noinspection PyMissingTypeHints
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
GathererSignature = Union[
Callable[[], Union[Any, Awaitable[TViewState]]],
diff --git a/botkit/routing/pipelines/reducer.py b/botkit/routing/pipelines/reducer.py
index 3d6838d..2d02ef6 100644
--- a/botkit/routing/pipelines/reducer.py
+++ b/botkit/routing/pipelines/reducer.py
@@ -2,7 +2,7 @@
from typing import Awaitable, Callable, Union, List
from botkit.routing.types import TViewState
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
ReducerSignature = Union[
Callable[[TViewState, Context], Union[TViewState, Awaitable[TViewState]]],
diff --git a/botkit/routing/pipelines/steps/call_step_factory.py b/botkit/routing/pipelines/steps/call_step_factory.py
index 2e69544..b2adcb4 100644
--- a/botkit/routing/pipelines/steps/call_step_factory.py
+++ b/botkit/routing/pipelines/steps/call_step_factory.py
@@ -6,7 +6,7 @@
from botkit.routing.pipelines.steps._base import StepError
from botkit.utils.typed_callable import TypedCallable
from botkit.views.base import ModelViewBase
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
class HandleStepError(StepError[HandlerSignature]):
diff --git a/botkit/routing/pipelines/steps/commit_rendered_view_step_factory.py b/botkit/routing/pipelines/steps/commit_rendered_view_step_factory.py
index dfde732..d9ce09b 100644
--- a/botkit/routing/pipelines/steps/commit_rendered_view_step_factory.py
+++ b/botkit/routing/pipelines/steps/commit_rendered_view_step_factory.py
@@ -6,10 +6,10 @@
from botkit.routing.pipelines.executionplan import SendTarget, SendTo, ViewParameters
from botkit.routing.pipelines.factory_types import IStepFactory
from botkit.routing.pipelines.steps._base import StepError
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.services.companionbotservice import CompanionBotService
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
from tgtypes.identities.chat_identity import ChatIdentity
from tgtypes.identities.message_identity import MessageIdentity
@@ -41,7 +41,7 @@ def create_step(cls, view_params: ViewParameters):
async def send_view(context: Context) -> None:
try:
- client = view_params.send_from if view_params.send_from else context.client
+ client = view_params.send_from or context.client
target = evaluate_send_target(send_target, context)
reply_log = (
diff --git a/botkit/routing/pipelines/steps/gather_step_factory.py b/botkit/routing/pipelines/steps/gather_step_factory.py
index 3f5a694..beaccda 100644
--- a/botkit/routing/pipelines/steps/gather_step_factory.py
+++ b/botkit/routing/pipelines/steps/gather_step_factory.py
@@ -9,10 +9,10 @@
)
from botkit.routing.pipelines.steps._base import StepError
from botkit.routing.pipelines.steps.helpers.state_generators import update_view_state_if_applicable
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.utils.botkit_logging.setup import create_logger
from botkit.utils.typed_callable import TypedCallable
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
class GatherStepError(StepError[GathererSignature]):
diff --git a/botkit/routing/pipelines/steps/helpers/state_generators.py b/botkit/routing/pipelines/steps/helpers/state_generators.py
index 28e2a90..c86f930 100644
--- a/botkit/routing/pipelines/steps/helpers/state_generators.py
+++ b/botkit/routing/pipelines/steps/helpers/state_generators.py
@@ -1,7 +1,7 @@
from typing import Any
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
log = create_logger("state_generation")
diff --git a/botkit/routing/pipelines/steps/initialize_context_step.py b/botkit/routing/pipelines/steps/initialize_context_step.py
index 23aa624..7b27101 100644
--- a/botkit/routing/pipelines/steps/initialize_context_step.py
+++ b/botkit/routing/pipelines/steps/initialize_context_step.py
@@ -8,9 +8,9 @@
Optional,
)
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
class InitializeContextStep(IPipelineStep):
diff --git a/botkit/routing/pipelines/steps/invoke_component_step_factory.py b/botkit/routing/pipelines/steps/invoke_component_step_factory.py
index b117f6d..fd9b735 100644
--- a/botkit/routing/pipelines/steps/invoke_component_step_factory.py
+++ b/botkit/routing/pipelines/steps/invoke_component_step_factory.py
@@ -4,7 +4,7 @@
from botkit.routing.pipelines.factory_types import IStepFactory
from botkit.routing.pipelines.steps._base import StepError
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
class InvokeComponentStepError(StepError):
diff --git a/botkit/routing/pipelines/steps/remove_trigger_step_factory.py b/botkit/routing/pipelines/steps/remove_trigger_step_factory.py
index e9ba6b1..8e58111 100644
--- a/botkit/routing/pipelines/steps/remove_trigger_step_factory.py
+++ b/botkit/routing/pipelines/steps/remove_trigger_step_factory.py
@@ -2,9 +2,9 @@
from botkit.routing.pipelines.executionplan import RemoveTrigger, RemoveTriggerParameters
from botkit.routing.pipelines.factory_types import IStepFactory
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
class RemoveTriggerStepFactory(
diff --git a/botkit/routing/pipelines/steps/render_view_step_factory.py b/botkit/routing/pipelines/steps/render_view_step_factory.py
index e572f01..af12713 100644
--- a/botkit/routing/pipelines/steps/render_view_step_factory.py
+++ b/botkit/routing/pipelines/steps/render_view_step_factory.py
@@ -8,10 +8,10 @@
from botkit.routing.pipelines.executionplan import ViewParameters
from botkit.routing.pipelines.factory_types import IStepFactory
from botkit.routing.pipelines.steps._base import StepError
-from botkit.routing.update_types.updatetype import UpdateType
-from botkit.settings import botkit_settings
+from tgtypes.updatetype import UpdateType
+from botkit import botkit_settings
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
from botkit.views.functional_views import (
quacks_like_view_render_func,
render_functional_view,
@@ -46,7 +46,7 @@ def create_step(cls, view_params: ViewParameters):
)
callback_store = Container().get_object(
- ICallbackStore, botkit_settings.callback_manager_qualifier
+ ICallbackStore, botkit_settings.callback_store_qualifier
)
log = create_logger("renderer")
diff --git a/botkit/routing/pipelines/updates/update_pipeline_factory.py b/botkit/routing/pipelines/updates/update_pipeline_factory.py
index cc3145f..25bd377 100644
--- a/botkit/routing/pipelines/updates/update_pipeline_factory.py
+++ b/botkit/routing/pipelines/updates/update_pipeline_factory.py
@@ -20,10 +20,10 @@
from botkit.routing.pipelines.steps.remove_trigger_step_factory import RemoveTriggerStepFactory
from botkit.routing.pipelines.steps.render_view_step_factory import RenderViewStepFactory
from botkit.routing.triggers import RouteTriggers
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.clients.client import IClient
from botkit.utils.botkit_logging.setup import create_logger
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
class UpdatePipelineFactory:
diff --git a/botkit/routing/pipelines_v2/IDEAS.md b/botkit/routing/pipelines_v2/IDEAS.md
new file mode 100644
index 0000000..9f6336b
--- /dev/null
+++ b/botkit/routing/pipelines_v2/IDEAS.md
@@ -0,0 +1,21 @@
+"""
+# Inspiration
+
+## Middleware & Pipelines:
+
+- [Redux stuff](https://blog.krawaller.se/posts/exploring-redux-middleware/)
+- https://github.com/robdmc/consecution/tree/master
+- [Middleware order](https://docs.microsoft.com/en-us/aspnet/core/fundamentals/middleware/?view=aspnetcore-5.0#middleware-order)
+- https://blog.ploeh.dk/2019/02/11/asynchronous-injection/
+- https://github.com/trooba/trooba
+- https://github.com/jondot/formation
+
+## State change immutability:
+
+- https://redux.js.org/recipes/structuring-reducers/immutable-update-patterns
+- https://www.freecodecamp.org/news/async-generators-as-an-alternative-to-state-management/
+
+# TODO
+
+- Define mutable and immutable fields in context
+"""
diff --git a/botkit/routing/pipelines_v2/__init__.py b/botkit/routing/pipelines_v2/__init__.py
new file mode 100644
index 0000000..d7eaa32
--- /dev/null
+++ b/botkit/routing/pipelines_v2/__init__.py
@@ -0,0 +1,116 @@
+import inspect
+from typing import Any, List, Type, TypeVar, Union
+
+from injector import (
+ Binder,
+ CallableProvider,
+ Injector,
+ InstanceProvider,
+ Module,
+ SingletonScope,
+ inject,
+ multiprovider,
+)
+
+from botkit.botkit_context import Context
+from botkit.routing.pipelines_v2.base._abstractions import (
+ Middleware,
+ MiddlewareChainer,
+ NextDelegate,
+ chain_middleware,
+)
+from botkit.routing.pipelines_v2.base.middleware import BaseMiddleware
+from botkit.routing.pipelines_v2.eventpipeline import EventPipeline
+from botkit.routing.pipelines_v2.middleware.gather_step_factory import GathererMiddleware
+from botkit.routing.pipelines_v2.middleware.pydepend import Dependency
+
+T = TypeVar("T")
+ClassOrInstance = Union[T, Type[T]]
+
+
+class EventPipelinesModule(Module):
+ def configure(self, binder: Binder):
+ binder.bind(EventPipeline)
+ binder.bind(
+ MiddlewareChainer, InstanceProvider(chain_middleware), SingletonScope # type: ignore
+ )
+
+
+def register_middleware(
+ binder: Binder,
+ middleware: ClassOrInstance[Middleware],
+ depends_on: List[ClassOrInstance[Middleware]],
+):
+ injector = binder.injector.get(Injector)
+
+ if inspect.isclass(middleware):
+ binder.bind(middleware, scope=SingletonScope)
+ binder.multibind(
+ List[Dependency[Middleware]],
+ CallableProvider(
+ lambda: [
+ Dependency(
+ injector.get(middleware),
+ deps=[Dependency(injector.get(middleware)) for d in depends_on],
+ )
+ ]
+ ),
+ scope=SingletonScope,
+ )
+ elif callable(middleware):
+ binder.bind(middleware, InstanceProvider(middleware), SingletonScope)
+ binder.multibind(
+ List[Dependency[Middleware]],
+ CallableProvider(
+ lambda: [
+ Dependency(
+ injector.get(middleware),
+ deps=[Dependency(injector.get(middleware)) for d in depends_on],
+ )
+ ]
+ ),
+ scope=SingletonScope,
+ )
+
+
+class A(BaseMiddleware):
+ async def __call__(self, context: Context, call_next: NextDelegate[Context]) -> Any:
+ pass
+
+
+async def my_middleware(context: Context, call_next: NextDelegate[Context]) -> Any:
+ pass
+
+
+class PrebuiltMiddlewareModule(Module):
+ def configure(self, binder: Binder) -> None:
+ register_middleware(binder, GathererMiddleware, [A])
+ register_middleware(binder, A, [])
+ register_middleware(binder, my_middleware, [])
+
+ @multiprovider
+ def list_middleware_ordered(
+ self, all_dependencies: List[Dependency[Middleware]]
+ ) -> List[Middleware[Context]]:
+ print(x.direct_deps for x in all_dependencies)
+ return [(x, x.ordered_deps) for x in all_dependencies]
+ # result = []
+ # for x in all_dependencies:
+ # deps = x.ordered_deps
+ #
+ # if x in result:
+ # pass
+ #
+ # else:
+
+
+class RoutingModule(Module):
+ def configure(self, binder: Binder) -> None:
+ binder.bind(Injector, binder.injector)
+ binder.install(EventPipelinesModule)
+ binder.install(PrebuiltMiddlewareModule)
+
+
+if __name__ == "__main__":
+ inj = Injector([PrebuiltMiddlewareModule])
+ print(inj.get(List[Middleware[Context]]))
diff --git a/botkit/routing/pipelines_v2/base/__init__.py b/botkit/routing/pipelines_v2/base/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/botkit/routing/pipelines_v2/base/_abstractions.py b/botkit/routing/pipelines_v2/base/_abstractions.py
new file mode 100644
index 0000000..ce2d6ff
--- /dev/null
+++ b/botkit/routing/pipelines_v2/base/_abstractions.py
@@ -0,0 +1,92 @@
+import inspect
+from abc import ABC, abstractmethod
+from functools import reduce, wraps
+from typing import Any, Awaitable, Callable, Coroutine, Generic, List, Protocol, TypeVar, cast
+
+from pydantic import BaseModel
+
+TContext = TypeVar("TContext")
+
+NextDelegate = Callable[[TContext], Coroutine[Any, Any, None]]
+
+
+# MiddlewareSignature = Callable[[TContext, NextDelegate[TContext]], Coroutine[Any, Any, Any]]
+
+
+class Middleware(Protocol[TContext]):
+ def __call__(
+ self, context: TContext, call_next: NextDelegate[TContext]
+ ) -> Coroutine[Any, Any, Any]:
+ ...
+
+
+class AbstractGenericMiddleware(ABC, Generic[TContext], Middleware[TContext]):
+ @abstractmethod
+ def __call__(
+ self, context: TContext, call_next: NextDelegate[TContext]
+ ) -> Coroutine[Any, Any, Any]:
+ ...
+
+
+# region Middleware chaining
+
+MiddlewareChainer = Callable[
+ [NextDelegate[TContext], List[Middleware[TContext]]], NextDelegate[TContext]
+]
+"""
+Type annotation for a function that can be used to build a chain of middleware, with the
+result being a single callback that execues all of the `delegates` recursively and the `bottom` last.
+"""
+
+
+def chain_middleware(
+ bottom: NextDelegate[TContext], delegates: List[Middleware[TContext]]
+) -> NextDelegate[TContext]:
+ def wrap_next(
+ acc: NextDelegate[TContext], nxt: Middleware[TContext]
+ ) -> NextDelegate[TContext]:
+ @wraps(nxt)
+ async def await_and_next(ctx: TContext) -> None:
+ try:
+ res = nxt(ctx, acc)
+ if inspect.isawaitable(res):
+ await cast(Awaitable[Any], res)
+ except TypeError as te:
+ if "missing 1 required positional argument" in str(te):
+ raise TypeError(
+ "Next middleware in pipeline has been called with incorrect arguments. "
+ "Make sure to pass on the context."
+ ) from te
+
+ return await_and_next
+
+ return reduce(wrap_next, reversed(delegates or []), bottom)
+
+
+# endregion
+
+# region Lifecycle
+
+
+class LifecycleEvent(BaseModel):
+ pass
+
+
+class LifecycleEventHandler(AbstractGenericMiddleware[LifecycleEvent], ABC):
+ ...
+
+
+# endregion
+
+# region Event pipeline
+
+
+class PipelineContext(BaseModel):
+ pass
+
+
+class UpdatePipelineStep(AbstractGenericMiddleware[PipelineContext], ABC):
+ pass
+
+
+# endregion
diff --git a/botkit/routing/pipelines_v2/base/middleware.py b/botkit/routing/pipelines_v2/base/middleware.py
new file mode 100644
index 0000000..dc6c195
--- /dev/null
+++ b/botkit/routing/pipelines_v2/base/middleware.py
@@ -0,0 +1,26 @@
+from abc import ABC, abstractmethod
+from typing import Any, Coroutine, Set
+
+from botkit.botkit_context import Context
+from botkit.routing.pipelines_v2.base._abstractions import (
+ NextDelegate,
+ TContext,
+ AbstractGenericMiddleware,
+)
+from tgtypes.updatetype import UpdateType
+
+
+class BaseMiddleware(AbstractGenericMiddleware[Context], ABC):
+ @abstractmethod
+ async def __call__(self, context: Context, call_next: NextDelegate[Context]) -> Any:
+ ...
+
+
+EventType = UpdateType # for now..
+
+
+class ConditionalMiddleware(BaseMiddleware, ABC):
+ @property
+ @abstractmethod
+ def applicable_event_types(self) -> Set[EventType]:
+ ...
diff --git a/botkit/routing/pipelines_v2/base/named_dependencies.py b/botkit/routing/pipelines_v2/base/named_dependencies.py
new file mode 100644
index 0000000..c038b69
--- /dev/null
+++ b/botkit/routing/pipelines_v2/base/named_dependencies.py
@@ -0,0 +1,47 @@
+import inspect
+from dataclasses import dataclass
+from functools import reduce
+from typing import Any, Awaitable, Callable, Generic, List, Type, TypeVar, Union, cast
+
+from injector import Binder, Injector, _infer_injected_bindings, inject
+
+from botkit.routing.pipelines_v2.base.middleware import (
+ MiddlewareSignature,
+ NextDelegate,
+)
+from botkit.routing.pipelines_v2.base.scopes import EventScope
+
+T = TypeVar("T")
+
+
+class _NamedDependency:
+ pass
+
+
+class Named(Generic[T]):
+ def __class_getitem__(cls, dependency: T, name: str) -> T:
+ try:
+ bindings = _infer_injected_bindings(function, only_explicit_bindings=False)
+ read_and_store_bindings(function, bindings)
+ except _BindingNotYetAvailable:
+ cast(Any, function).__bindings__ = "deferred"
+
+ dependency.__
+
+
+class Foo:
+ pass
+
+
+class Bar:
+ @inject
+ def __init__(self):
+ pass
+
+
+def configure(binder: Binder):
+ binder.bind(Named[Foo])
+
+
+inj = Injector()
+inj.get(Named[Abc, "your_mome"])
diff --git a/botkit/routing/pipelines_v2/base/scopes.py b/botkit/routing/pipelines_v2/base/scopes.py
new file mode 100644
index 0000000..35aa460
--- /dev/null
+++ b/botkit/routing/pipelines_v2/base/scopes.py
@@ -0,0 +1,37 @@
+from types import TracebackType
+from typing import Any, AsyncContextManager, Awaitable, Dict, Generic, Optional, Type, TypeVar
+from unittest.mock import Mock
+
+from injector import Injector, InstanceProvider, Provider, Scope, ScopeDecorator
+
+
+T = TypeVar("T")
+TEventContext = TypeVar("TEventContext")
+
+
+class EventScope(Scope, Generic[TEventContext]):
+ """
+ TODO: Make it a context manager and free up instantiated resources after the event pipeline is done.
+ """
+
+ REGISTRY_KEY = "UpdateScopeRegistry"
+
+ _instances: Dict[Type, Provider]
+
+ def __init__(self, injector: Injector, context: TEventContext):
+ super().__init__(injector)
+ self._event_context = context
+
+ def configure(self) -> None:
+ self._instances = {}
+
+ def get(self, key: Type[T], provider: Provider[T]) -> Provider[T]:
+ try:
+ return self._instances[key]
+ except KeyError:
+ provider = InstanceProvider(provider.get(self.injector))
+ self._instances[key] = provider
+ return provider
+
+
+update_scope = ScopeDecorator(EventScope)
diff --git a/botkit/routing/pipelines_v2/context_initializer.py b/botkit/routing/pipelines_v2/context_initializer.py
new file mode 100644
index 0000000..9c6a142
--- /dev/null
+++ b/botkit/routing/pipelines_v2/context_initializer.py
@@ -0,0 +1,60 @@
+import asyncio
+
+from loguru._logger import Logger
+
+from botkit.agnostic.annotations import IClient
+from botkit.botkit_context import Context
+from botkit.persistence.data_store import DataStoreBase
+from botkit.routing.pipelines.factory_types import IPipelineStep
+from typing import (
+ Any,
+ Optional,
+)
+
+from tgtypes.models import Update
+from tgtypes.updatetype import UpdateType
+from botkit.utils.botkit_logging.setup import create_logger
+from injector import Binder, Provider, inject, provider, Module, Injector, multiprovider, singleton
+
+
+class ContextInitializer:
+ @inject
+ def __init__(self, client: IClient, data_store: DataStoreBase, log: Logger):
+ self.client = client
+ self.data_store = data_store
+ self.log = log
+
+ async def __call__(self, event_type: UpdateType, event_data: Update) -> Context:
+ update = event_data
+
+ context = Context(
+ client=self.client,
+ update=update,
+ update_type=UpdateType.message, # TODO: hardcoded
+ view_state=None,
+ )
+
+ await self.fill_context_data(context)
+
+ if context.message_state:
+ self.log.debug(f"Carrying message_state of type {type(context.message_state)}")
+ if context.user_state:
+ self.log.debug(f"Carrying user_state of type {type(context.user_state)}")
+ if context.chat_state:
+ self.log.debug(f"Carrying chat_state of type {type(context.chat_state)}")
+
+ return context
+
+ async def fill_context_data(self, context: Context):
+ tasks = [
+ self.data_store.retrieve_user_data(context.user_id),
+ self.data_store.retrieve_chat_data(context.chat_identity),
+ self.data_store.retrieve_message_data(context.message_identity),
+ ]
+ res = await asyncio.gather(*tasks)
+
+ user_data, chat_data, message_data = res
+
+ context.user_state = user_data
+ context.chat_state = chat_data
+ context.message_state = message_data
diff --git a/botkit/routing/pipelines_v2/eventpipeline.py b/botkit/routing/pipelines_v2/eventpipeline.py
new file mode 100644
index 0000000..826c753
--- /dev/null
+++ b/botkit/routing/pipelines_v2/eventpipeline.py
@@ -0,0 +1,51 @@
+import inspect
+from dataclasses import dataclass
+from functools import reduce, wraps
+from typing import Any, Awaitable, Callable, Generic, List, Protocol, Type, TypeVar, Union, cast
+
+from injector import Injector, inject
+
+from botkit.routing.pipelines_v2.base._abstractions import (
+ MiddlewareChainer,
+ Middleware,
+ NextDelegate,
+ TContext,
+ chain_middleware,
+)
+from botkit.routing.pipelines_v2.base.scopes import EventScope
+from botkit.routing.pipelines_v2.context_initializer import ContextInitializer
+
+
+async def bottom_delegate(_: Any) -> None:
+ print("Reached bottom!")
+
+
+class EventPipeline(Generic[TContext]):
+ def __init__(
+ self,
+ middleware: List[Union[Middleware[TContext], Type[Middleware[TContext]]]],
+ injector: Injector,
+ context_initializer: ContextInitializer,
+ chain: MiddlewareChainer[TContext] = chain_middleware,
+ bottom_delegate: NextDelegate[TContext] = bottom_delegate,
+ ):
+ self.delegates = middleware
+ self.global_injector = injector
+ self.initialize_context = context_initializer
+ self.chain = chain
+ self.bottom_delegate = bottom_delegate
+
+ async def dispatch(self, event_type: Any, event_data: Any) -> None:
+ context = self.initialize_context(event_type, event_data)
+ EventScope(self.global_injector, context)
+
+ instantiated_delegates = [
+ self.global_injector.get(cast(Type[Middleware[TContext]], x))
+ if inspect.isclass(x)
+ else cast(Middleware[TContext], x)
+ for x in self.delegate_types
+ ]
+ await self.chain(self.bottom_delegate, instantiated_delegates)(context)
+
+
+inject(EventPipeline) # type: ignore
diff --git a/botkit/routing/pipelines_v2/middleware/__init__.py b/botkit/routing/pipelines_v2/middleware/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/botkit/routing/pipelines_v2/middleware/gather_step_factory.py b/botkit/routing/pipelines_v2/middleware/gather_step_factory.py
new file mode 100644
index 0000000..f9f3fd0
--- /dev/null
+++ b/botkit/routing/pipelines_v2/middleware/gather_step_factory.py
@@ -0,0 +1,114 @@
+from typing import Any, Coroutine, Set
+
+from injector import Injector, inject
+from unsync import Unfuture
+
+from botkit.botkit_context import Context
+from botkit.routing.pipelines.factory_types import ICallbackStepFactory, MaybeAsyncPipelineStep
+from botkit.routing.pipelines.gatherer import (
+ GathererSignature,
+ GathererSignatureExamplesStr,
+)
+from botkit.routing.pipelines.steps._base import StepError
+from botkit.routing.pipelines.steps.helpers.state_generators import update_view_state_if_applicable
+from botkit.routing.pipelines_v2.base.middleware import (
+ BaseMiddleware,
+ AbstractGenericMiddleware,
+ ConditionalMiddleware,
+ EventType,
+ NextDelegate,
+)
+from tgtypes.updatetype import UpdateType
+from botkit.utils.botkit_logging.setup import create_logger
+from botkit.utils.typed_callable import TypedCallable
+
+
+class GatherStepError(StepError[GathererSignature]):
+ pass
+
+
+class GathererMiddleware(ConditionalMiddleware):
+ @inject
+ def __init__(self, injector: Injector):
+ self.injector = injector
+
+ @property
+ def applicable_event_types(self) -> Set[EventType]:
+ return {
+ UpdateType.message,
+ UpdateType.callback_query,
+ UpdateType.inline_query,
+ UpdateType.poll,
+ UpdateType.user_status,
+ }
+
+ async def __call__(self, context: Context, call_next: NextDelegate[Context]) -> Any:
+ self.injector.get()
+
+ @classmethod
+ def create_step(
+ cls, gatherer: TypedCallable[GathererSignature]
+ ) -> MaybeAsyncPipelineStep[GathererSignature]:
+ if gatherer is None:
+ return (None, None)
+
+ if gatherer.num_non_optional_params == 0:
+ requires_context = False
+ elif gatherer.num_non_optional_params == 1:
+ requires_context = True
+ else:
+ raise ValueError(
+ f"Invalid number of arguments for gatherer {gatherer}. "
+ f"Expected signature is: {GathererSignatureExamplesStr}"
+ )
+
+ log = create_logger("gatherer")
+
+ is_coroutine = gatherer.is_coroutine
+
+ if is_coroutine:
+
+ async def gather_initial_state_async(context: Context):
+ log.debug(f"Gathering initial state via {gatherer.name}")
+ try:
+ if requires_context:
+ result = await gatherer.func(context)
+ else:
+ result = await gatherer.func()
+
+ if isinstance(result, Unfuture):
+ result = result.result()
+
+ if update_view_state_if_applicable(result, context):
+ log.debug("Initial state gathered")
+ else:
+ log.warning(f"No initial state has been gathered by {gatherer.name}")
+ return result
+
+ except Exception as e:
+ raise GatherStepError(e)
+
+ return gather_initial_state_async, is_coroutine
+
+ else:
+
+ def gather_initial_state(context: Context):
+ log.debug(f"Gathering initial state via {gatherer.name}")
+ try:
+ if requires_context:
+ result = gatherer.func(context)
+ else:
+ result = gatherer.func()
+
+ if isinstance(result, Unfuture):
+ result = result.result()
+
+ if update_view_state_if_applicable(result, context):
+ log.debug("Initial state gathered")
+ else:
+ log.warning(f"No initial state has been gathered by {gatherer.name}")
+ return result
+ except Exception as e:
+ raise GatherStepError(e)
+
+ return gather_initial_state, is_coroutine
diff --git a/botkit/routing/pipelines_v2/middleware/pydepend.py b/botkit/routing/pipelines_v2/middleware/pydepend.py
new file mode 100644
index 0000000..64322ae
--- /dev/null
+++ b/botkit/routing/pipelines_v2/middleware/pydepend.py
@@ -0,0 +1,137 @@
+# With unlicensed gratitude from https://github.com/Flushot/pydepend
+import collections
+from typing import Generic, List, Optional, TypeVar
+
+
+class CyclicDependencyError(Exception):
+ """
+ Indicates that there was a cyclic dependency in the graph.
+ e.g. a -> b -> c -> b
+ """
+
+ def __init__(self, dep):
+ self.dep = dep
+ self.message = "Cyclic dependency on %s" % dep
+
+
+T = TypeVar("T")
+
+
+class Dependency(Generic[T]):
+ """
+ Represents wrapped object :obj: in a dependency graph.
+
+ Usage example:
+
+ >>> a = Dependency('a')
+ >>> b = Dependency('b')
+ >>> c = Dependency('c')
+ >>> d = Dependency('d')
+ >>> a.depends_on([b, c])
+ >>> b.depends_on(d)
+ >>> a.ordered_deps
+ ['d', 'b', 'c']
+ """
+
+ def __init__(self, obj: T, deps: Optional[List[T]] = None):
+ """
+ :obj: can be any object you'd like to wrap
+ """
+ self.obj = obj
+ self._deps: List[T] = list(deps or [])
+ self._ordered_dep_cache = None
+
+ def depends_on(self, dep):
+ self._ordered_dep_cache = None
+
+ if isinstance(dep, collections.Sequence):
+ map(self.depends_on, dep)
+ return
+
+ if not isinstance(dep, Dependency):
+ raise ValueError("dep must be another Depdenency object")
+
+ if dep not in self._deps:
+ self._deps.append(dep)
+
+ @property
+ def direct_deps(self):
+ """
+ Returns a tuple of unordered, direct dependencies.
+ Does not traverse depdendency graph.
+ """
+ return tuple(self._deps)
+
+ @property
+ def ordered_deps(self):
+ """
+ Returns a tuple of ordered dependencies by traversing dependency graph.
+ Detected cycles will raise a CyclicDependencyError.
+ """
+ if self._ordered_dep_cache is not None:
+ return self._ordered_dep_cache
+
+ # DFS graph traversal
+ def _order_deps(dep, ordered, visited):
+ if dep is None or not isinstance(dep, Dependency):
+ raise ValueError("dep must be a Dependency object but is %s" % type(dep))
+ if dep in ordered:
+ raise CyclicDependencyError(dep)
+
+ if not dep.direct_deps:
+ visited.add(dep)
+ ordered.append(dep)
+ return
+
+ for parent in dep.direct_deps:
+ if parent in visited:
+ continue
+ visited.add(parent)
+ _order_deps(parent, ordered, visited)
+
+ visited.add(dep)
+ if dep in ordered:
+ raise CyclicDependencyError(dep)
+ ordered.append(dep)
+
+ self._ordered_dep_cache = [] # OrderedSet would be more ideal
+ _order_deps(self, self._ordered_dep_cache, set())
+ self._ordered_dep_cache = self._ordered_dep_cache[:-1] # Pop original dep
+ return tuple(self._ordered_dep_cache)
+
+ def __lt__(self, other):
+ if other == self:
+ return False
+ else:
+ return other in self.ordered_deps
+
+ def __gt__(self, other):
+ if other == self:
+ return False
+ else:
+ return other not in self.ordered_deps
+
+ def __eq__(self, other):
+ return self.obj == other.obj
+
+ def __hash__(self):
+ return hash(self.obj)
+
+ def __iter__(self):
+ for dep in self.ordered_deps:
+ yield dep
+
+ def __contains__(self, other):
+ return other in self.ordered_deps
+
+ def __len__(self):
+ return len(self.ordered_deps)
+
+ def __repr__(self):
+ return repr(self.obj)
+
+ def __str__(self):
+ return str(self.obj)
+
+ def __unicode__(self):
+ return unicode(self.obj)
diff --git a/botkit/routing/route.py b/botkit/routing/route.py
index 54561bf..e6f359f 100644
--- a/botkit/routing/route.py
+++ b/botkit/routing/route.py
@@ -19,7 +19,7 @@
from botkit.routing.pipelines.filters import UpdateFilterSignature
from botkit.routing.pipelines.updates.update_pipeline_factory import UpdatePipelineFactory
from botkit.routing.triggers import ActionIdType, RouteTriggers
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
M = TypeVar("M")
diff --git a/botkit/routing/route_builder/expressions/_split_this_up.py b/botkit/routing/route_builder/expressions/_split_this_up.py
index 9d93762..59e7818 100644
--- a/botkit/routing/route_builder/expressions/_split_this_up.py
+++ b/botkit/routing/route_builder/expressions/_split_this_up.py
@@ -29,7 +29,7 @@
from botkit.routing.route_builder.webhook_action_expression import WebhookActionExpressionMixin
from botkit.routing.triggers import RouteTriggers
from botkit.routing.types import TViewState
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.clients.client import IClient
from botkit.views.base import InlineResultViewBase
diff --git a/botkit/routing/triggers.py b/botkit/routing/triggers.py
index b0efc54..3b261d4 100644
--- a/botkit/routing/triggers.py
+++ b/botkit/routing/triggers.py
@@ -5,7 +5,7 @@
from boltons.iterutils import is_collection
from pyrogram.filters import Filter
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
ActionIdType = Union[int, str]
diff --git a/botkit/routing/update_types/update_type_inference.py b/botkit/routing/update_types/update_type_inference.py
index 9f974a6..8db0e96 100644
--- a/botkit/routing/update_types/update_type_inference.py
+++ b/botkit/routing/update_types/update_type_inference.py
@@ -1,7 +1,7 @@
from typing import Set
from botkit.agnostic._pyrogram_update_type_inference import determine_pyrogram_handler_update_types
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.utils.typed_callable import TypedCallable
diff --git a/botkit/routing/update_types/updatetype.py b/botkit/routing/update_types/updatetype.py
deleted file mode 100644
index 8ca9206..0000000
--- a/botkit/routing/update_types/updatetype.py
+++ /dev/null
@@ -1,25 +0,0 @@
-from enum import Enum, auto
-
-from boltons.typeutils import classproperty
-
-
-class UpdateType(Enum):
- raw = auto()
- message = auto()
- callback_query = auto()
- inline_query = auto()
- poll = auto()
- user_status = auto()
- start_command = auto()
-
- # noinspection PyMethodParameters
- @classproperty
- def all(cls):
- return [
- cls.raw,
- cls.message,
- cls.callback_query,
- cls.inline_query,
- cls.poll,
- cls.user_status,
- ]
diff --git a/botkit/services/backgroundworker.py b/botkit/services/backgroundworker.py
new file mode 100644
index 0000000..187dc25
--- /dev/null
+++ b/botkit/services/backgroundworker.py
@@ -0,0 +1,38 @@
+import asyncio
+from abc import ABC, abstractmethod
+from asyncio.exceptions import CancelledError
+from asyncio.futures import Future
+from typing import Any, NoReturn, Optional, Union
+
+from loguru import logger as log
+
+from botkit.abstractions import IAsyncLoadUnload
+
+
+class BackgroundWorker(IAsyncLoadUnload, ABC):
+ def __init__(self, initial_delay_seconds: Optional[int] = None):
+ self.initial_delay_seconds = initial_delay_seconds
+ self.worker_future: Optional[Future] = None
+ self._has_been_running_before: bool = False
+
+ def get_name(self) -> str:
+ return self.__class__.__name__
+
+ @abstractmethod
+ async def do_work(self):
+ pass
+
+ async def load(self) -> Union[NoReturn, Any]:
+ if self.initial_delay_seconds and not self._has_been_running_before:
+ await asyncio.sleep(self.initial_delay_seconds)
+
+ self.worker_future = asyncio.ensure_future(self.do_work)
+ self._has_been_running_before = True
+ log.debug(f"Background worker {self.get_name()} started.")
+
+ async def unload(self) -> NoReturn:
+ try:
+ self.worker_future.cancel("unload")
+ log.debug(f"Background worker {self.get_name()} has shut down gracefully.")
+ except CancelledError:
+ log.debug(f"Background worker {self.get_name()} has been terminated.")
diff --git a/botkit/services/companionbotservice.py b/botkit/services/companionbotservice.py
index e1578d1..b3102aa 100644
--- a/botkit/services/companionbotservice.py
+++ b/botkit/services/companionbotservice.py
@@ -1,6 +1,7 @@
import traceback
from asyncio import Event
from contextlib import asynccontextmanager
+from pathlib import Path
from typing import AsyncIterator, Generic, Optional, TypeVar, Union, cast
from uuid import uuid4
@@ -41,70 +42,6 @@ def __init__(self, user_client: IClient, bot_client: IClient):
self.user_client = user_client
self.bot_client = bot_client
- async def one_time_inline_results(
- self,
- query: str,
- results_generator: InlineResultGenerator,
- reply_to: Union[int, str] = None,
- silent: bool = False,
- hide_via: bool = False,
- ):
- user_client_id = (await self.user_client.get_me()).id
- bot_username = (await self.bot_client.get_me()).username
-
- query_text = "henlo"
-
- async def answer_inline_query(client: IClient, query: InlineQuery):
- rendered: RenderedMessage = view._default_renderer()
-
- result = InlineQueryResultArticle(
- title="sent via userbot",
- input_message_content=InputTextMessageContent(
- message_text=rendered.text,
- parse_mode=rendered.parse_mode,
- disable_web_page_preview=rendered.disable_web_page_preview,
- ),
- id=query.id,
- reply_markup=rendered.inline_keyboard_markup,
- url=None,
- description=None,
- thumb_url=None,
- )
-
- await self.bot_client.answer_inline_query(query.id, results=[result], cache_time=0)
-
- inline_id_filter = create(lambda _, __, ilq: ilq.query == query_text, "QueryFilter")
-
- group = -99
- dispatcher = self.bot_client.dispatcher
- if group not in dispatcher.groups:
- dispatcher.groups[group] = []
-
- handler = InlineQueryHandler(answer_inline_query, inline_id_filter)
- dispatcher.groups[group].append(handler)
-
- try:
- # Fetch inline results as user
- bot_results: BotResults = await self.user_client.get_inline_bot_results(
- bot_username, query_text
- )
- if not bot_results:
- raise RuntimeError("Could not fetch any inline query results from companionbot.")
-
- # Send result as user
- return await self.user_client.send_inline_bot_result(
- chat_id,
- query_id=bot_results.query_id,
- result_id=bot_results.results[0].id,
- disable_notification=silent,
- reply_to_message_id=reply_to,
- hide_via=hide_via,
- )
- except (AttributeError, TimeoutError):
- log.error("Bot did not respond.")
- finally:
- self.bot_client.remove_handler(handler, group)
-
async def send_rendered_message_via(
self,
chat_id: Union[int, str],
@@ -270,7 +207,7 @@ async def record_message(_, message: Message):
)
):
await self.user_client.forward_messages(
- (await self.bot_client.get_me()).id,
+ self.bot_client.own_user_id,
from_chat_id=user_message.chat.id,
message_ids=[user_message.message_id],
disable_notification=True,
@@ -279,7 +216,7 @@ async def record_message(_, message: Message):
return recorded_msg._recorded
- async def make_photo_known(self, photo: str) -> Photo:
+ async def make_photo_known(self, photo: Path) -> Photo:
recorded_msg = RecordedResponseContainer()
user_id = (await self.user_client.get_me()).id
bot_id = (await self.bot_client.get_me()).id
@@ -291,11 +228,75 @@ async def record_message(_, message: Message):
async with self.add_handler(
MessageHandler(record_message, filters=filters.photo & filters.chat(user_id))
):
- await self.user_client.send_photo(bot_id, photo=photo)
+ await self.user_client.send_photo(bot_id, photo=str(photo))
await recorded_msg.wait()
return recorded_msg._recorded.photo
+ async def one_time_inline_results(
+ self,
+ query: str,
+ results_generator: InlineResultGenerator,
+ reply_to: Union[int, str] = None,
+ silent: bool = False,
+ hide_via: bool = False,
+ ):
+ user_client_id = (await self.user_client.get_me()).id
+ bot_username = (await self.bot_client.get_me()).username
+
+ query_text = "henlo"
+
+ async def answer_inline_query(client: IClient, query: InlineQuery):
+ rendered: RenderedMessage = view._default_renderer()
+
+ result = InlineQueryResultArticle(
+ title="sent via userbot",
+ input_message_content=InputTextMessageContent(
+ message_text=rendered.text,
+ parse_mode=rendered.parse_mode,
+ disable_web_page_preview=rendered.disable_web_page_preview,
+ ),
+ id=query.id,
+ reply_markup=rendered.inline_keyboard_markup,
+ url=None,
+ description=None,
+ thumb_url=None,
+ )
+
+ await self.bot_client.answer_inline_query(query.id, results=[result], cache_time=0)
+
+ inline_id_filter = create(lambda _, __, ilq: ilq.query == query_text, "QueryFilter")
+
+ group = -99
+ dispatcher = self.bot_client.dispatcher
+ if group not in dispatcher.groups:
+ dispatcher.groups[group] = []
+
+ handler = InlineQueryHandler(answer_inline_query, inline_id_filter)
+ dispatcher.groups[group].append(handler)
+
+ try:
+ # Fetch inline results as user
+ bot_results: BotResults = await self.user_client.get_inline_bot_results(
+ bot_username, query_text
+ )
+ if not bot_results:
+ raise RuntimeError("Could not fetch any inline query results from companionbot.")
+
+ # Send result as user
+ return await self.user_client.send_inline_bot_result(
+ chat_id,
+ query_id=bot_results.query_id,
+ result_id=bot_results.results[0].id,
+ disable_notification=silent,
+ reply_to_message_id=reply_to,
+ hide_via=hide_via,
+ )
+ except (AttributeError, TimeoutError):
+ log.error("Bot did not respond.")
+ finally:
+ self.bot_client.remove_handler(handler, group)
+
T = TypeVar("T")
diff --git a/botkit/tghelpers/direct_links.py b/botkit/tghelpers/direct_links.py
index 29fbff2..fc6b587 100644
--- a/botkit/tghelpers/direct_links.py
+++ b/botkit/tghelpers/direct_links.py
@@ -2,9 +2,11 @@
from pyrogram import Client
from pyrogram.raw.types import Channel
-from pyrogram.types import Message, User, Chat
+from pyrogram.types import Message, Chat
from typing import Optional, Union, cast, Dict
+from tgtypes.protocols.user import User
+
_links_cache: Dict[int, str] = {}
@@ -18,11 +20,11 @@ class Platform(IntEnum):
async def direct_link_to_message(
reference: Message, platform: Optional[Platform] = Platform.android
) -> str:
- entity_link = await direct_link(reference._client, reference.chat, platform)
+ entity_link = await direct_link_with_invite(reference._client, reference.chat, platform)
return f"{entity_link}/{reference.message_id}"
-async def direct_link(
+async def direct_link_with_invite(
client: Client,
peer: Union[User, Chat, Channel],
platform: Optional[Platform] = Platform.android,
@@ -44,11 +46,30 @@ async def direct_link(
return invite_link
+def direct_link(
+ peer: Union[User, Chat, Channel], platform: Optional[Platform] = Platform.android,
+) -> str:
+ if getattr(peer, "username", False):
+ return f"https://t.me/{peer.username}"
+
+ if isinstance(peer, User):
+ return direct_link_user(peer, platform)
+
+ peer_id = peer.id
+ if isinstance(peer, Channel):
+ return f"https://t.me/c/{peer_id}"
+ invite_link: str = _links_cache.get(peer_id, None)
+ if invite_link:
+ return invite_link
+ raise ValueError(f"Could not create direct link for peer {peer}.")
+
+
def direct_link_user(user: User, platform: Optional[Platform] = Platform.android):
if user.username:
return f"https://t.me/{user.username}"
if platform == Platform.android:
+ # Also possible: tg://user?id=23122162
return f"tg://openmessage?user_id={user.id}"
elif platform == Platform.ios:
return f"t.me/@{user.id}"
diff --git a/botkit/utils/botkit_logging/chatlogger.py b/botkit/utils/botkit_logging/chatlogger.py
index c3b17e5..9fa42da 100644
--- a/botkit/utils/botkit_logging/chatlogger.py
+++ b/botkit/utils/botkit_logging/chatlogger.py
@@ -4,12 +4,12 @@
from logging import Handler, LogRecord
from typing import Tuple, Union
-from botkit.builders import ViewBuilder
+from botkit.builders.viewbuilder import ViewBuilder
from botkit.core.components import Component
from botkit.agnostic.annotations import IClient
from botkit.routing.route_builder.builder import RouteBuilder
-from botkit.settings import botkit_settings
-from botkit.views.botkit_context import Context
+from botkit import botkit_settings
+from botkit.botkit_context import Context
from botkit.views.functional_views import ViewRenderFuncSignature, render_functional_view
diff --git a/botkit/utils/botkit_logging/setup.py b/botkit/utils/botkit_logging/setup.py
index f07ae68..f0a59aa 100644
--- a/botkit/utils/botkit_logging/setup.py
+++ b/botkit/utils/botkit_logging/setup.py
@@ -5,8 +5,6 @@
from loguru import logger
from loguru._logger import Logger
-from botkit.settings import botkit_settings
-
# def botkit_log_filter(record):
# return (
diff --git a/botkit/utils/strutils.py b/botkit/utils/strutils.py
index 2a6a469..4b09e55 100644
--- a/botkit/utils/strutils.py
+++ b/botkit/utils/strutils.py
@@ -1,3 +1,4 @@
+from typing import Optional
import difflib
@@ -10,3 +11,11 @@ def string_similarity(user_input: str, compare_to: str) -> float:
add = 0.15
return min(1.0, difflib.SequenceMatcher(None, user_input, compare_to).ratio() + add)
+
+
+def is_none_or_whitespace(text: Optional[str]) -> bool:
+ return text is None or text.strip() == ""
+
+
+def is_none_or_empty(text: Optional[str]) -> bool:
+ return text is None or text == ""
diff --git a/botkit/views/functional_views.py b/botkit/views/functional_views.py
index 6ec2006..0ff7d5e 100644
--- a/botkit/views/functional_views.py
+++ b/botkit/views/functional_views.py
@@ -9,10 +9,10 @@
from decorators import FuncDecorator
-from botkit.builders import CallbackBuilder, HtmlBuilder, MenuBuilder, MetaBuilder, ViewBuilder
+from botkit.builders import CallbackBuilder, HtmlBuilder, MenuBuilder, MetaBuilder
+from botkit.builders.viewbuilder import ViewBuilder
from botkit.persistence.callback_store import ICallbackStore
from botkit.views.rendered_messages import RenderedMessage
-from paraminjector import call_with_args
T = TypeVar("T")
diff --git a/botkit/views/views.py b/botkit/views/views.py
index 6b2579d..1605e05 100644
--- a/botkit/views/views.py
+++ b/botkit/views/views.py
@@ -8,7 +8,7 @@
from botkit.builders.menubuilder import MenuBuilder
from botkit.builders.quizbuilder import QuizBuilder
from botkit.persistence.callback_store import ICallbackStore
-from botkit.settings import botkit_settings
+from botkit import botkit_settings
from botkit.utils.typed_callable import TypedCallable
from botkit.views.base import (
IRegisterable,
@@ -40,7 +40,7 @@ def render(self) -> RenderedMessage:
class TextView(MessageViewBase[TViewState], RenderMarkupBase):
- _callback_store: ICallbackStore = Inject(botkit_settings.callback_manager_qualifier)
+ _callback_store: ICallbackStore = Inject(botkit_settings.callback_store_qualifier)
@abstractmethod
def render_body(self, builder: HtmlBuilder) -> None:
@@ -59,7 +59,7 @@ def render(self) -> RenderedTextMessage:
class MediaView(MessageViewBase[TViewState]):
- _callback_store: ICallbackStore = Inject(botkit_settings.callback_manager_qualifier)
+ _callback_store: ICallbackStore = Inject(botkit_settings.callback_store_qualifier)
def __init__(self, state: TViewState):
super().__init__(state)
@@ -152,7 +152,7 @@ def _render_message_markup(obj: Union[ModelViewBase, RenderMarkupBase]) -> Rende
menu_builder = MenuBuilder(
CallbackBuilder(
obj.state,
- Container().get_object(ICallbackStore, botkit_settings.callback_manager_qualifier),
+ Container().get_object(ICallbackStore, botkit_settings.callback_store_qualifier),
)
)
obj.render_markup(menu_builder)
diff --git a/botkit/widgets/_base/__init__.py b/botkit/widgets/_base/__init__.py
index 188d4fb..a7c8163 100644
--- a/botkit/widgets/_base/__init__.py
+++ b/botkit/widgets/_base/__init__.py
@@ -15,8 +15,8 @@ class Widget(Generic[TViewState, TWidgetState], HtmlWidget, MenuWidget, MetaWidg
- no load/unload (nothing async)
## Problems:
- - how to register views? autoregistration?
- - (how) can widgets be used by views?
+ If widgets get associated by doing e.g. `html.add(MyNewWidget())`, they won't be routable anymore.
+ --> how to register views?
"""
def mutate(self):
diff --git a/docs/Injector Library Questions.md b/docs/Injector Library Questions.md
new file mode 100644
index 0000000..f657262
--- /dev/null
+++ b/docs/Injector Library Questions.md
@@ -0,0 +1,70 @@
+#### [Usage Question / Docs] Using @inject on free functions
+
+If I have an inner function inside of a `configure` method, according to the following note it's not ok to use inject
+there... What's the current state of this claim? Can I use it on functions or will that break in the future?
+
+In haps, with its global container, it is absolutely encouraged
+
+> This decorator is to be used on class constructors (or, as a convenience, on classes).
+> Using it on non-constructor methods worked in the past but it was an implementation
+> detail rather than a design decision.
+> Third party libraries may, however, provide support for injecting dependencies
+> into non-constructor methods or free functions in one form or another.
+>
+----
+
+#### Is type inspection expensive?
+
+----
+
+I was wondering if it'd make sense to cache
+
+----
+
+#### Hear your thoughts on FastAPI
+
+FastAPI took a slightly different approach in
+
+
+
+####
+
+One of the selling points of the [dpy Library](https://github.com/search?l=Python&q=lala&type=Repositories) is that
+plain old "Python modules serve as our injection modules". Is the same possible with injector?
+> Of course, you can also setup injectables behind conditionals if you like.
+>
+> Modules may import their own dependencies or you might prefer to defer importing all your dependencies in a "main"
+> module (or other organization). As long as all the dependencies are established at runtime, there's no problem.
+
+I like this approach a lot to be able to pick whether a whole python module, or a specific type should be used as
+the container.
+
+----
+
+#### [Bug] - Provide better error message when signature is dumb
+
+Traceback (most recent call last):
+File "C:/projects/josxabot/app/clients/__init__.py", line 49, in
+inj.get(JosXaBotClient)
+File "C:\git\injector\injector\__init__.py", line 963, in get result = scope_instance.get(interface, binding.provider)
+.get(self)
+File "C:\git\injector\injector\__init__.py", line 291, in get return injector.create_object(self._cls)
+File "C:\git\injector\injector\__init__.py", line 990, in create_object self.call_with_injection(cls.__init__, self_
+=instance, kwargs=additional_kwargs)
+File "C:\git\injector\injector\__init__.py", line 1021, in call_with_injection dependencies = self.args_to_inject(
+File "C:\git\injector\injector\__init__.py", line 111, in wrapper return function(*args, **kwargs)
+File "C:\git\injector\injector\__init__.py", line 1069, in args_to_inject instance = self.get(interface) # type: Any
+File "C:\git\injector\injector\__init__.py", line 963, in get result = scope_instance.get(interface, binding.provider)
+.get(self)
+File "C:\git\injector\injector\__init__.py", line 329, in get return injector.call_with_injection(self._callable)
+File "C:\git\injector\injector\__init__.py", line 1011, in call_with_injection signature = inspect.signature(callable)
+File "c:\program files\python38\lib\inspect.py", line 3093, in signature return Signature.from_callable(obj,
+follow_wrapped=follow_wrapped)
+File "c:\program files\python38\lib\inspect.py", line 2842, in from_callable return _signature_from_callable(obj,
+sigcls=cls, File "c:\program files\python38\lib\inspect.py", line 2228, in _signature_from_callable return _
+signature_bound_method(sig)
+File "c:\program files\python38\lib\inspect.py", line 1808, in _signature_bound_method raise ValueError('invalid method
+signature')
+ValueError: invalid method signature
+
+(I was missing a `self` and PyCharm didn't catch it)
diff --git a/poetry.lock b/poetry.lock
index b6c70c9..ba4d413 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,3 +1,11 @@
+[[package]]
+name = "aiofiles"
+version = "0.6.0"
+description = "File support for asyncio."
+category = "main"
+optional = false
+python-versions = "*"
+
[[package]]
name = "appdirs"
version = "1.4.4"
@@ -217,7 +225,7 @@ python-versions = "*"
[[package]]
name = "docker"
-version = "4.3.1"
+version = "4.4.0"
description = "A Python library for the Docker Engine API."
category = "dev"
optional = false
@@ -299,6 +307,11 @@ category = "main"
optional = false
python-versions = "*"
+[package.source]
+url = "https://github.com/JosXa/haps"
+reference = "150b4424cd21af6226e3a8aa66b876f5be2fce28"
+type = "git"
+
[[package]]
name = "humanize"
version = "3.1.0"
@@ -315,7 +328,7 @@ setuptools = "*"
[[package]]
name = "identify"
-version = "1.5.9"
+version = "1.5.10"
description = "File identification library for Python"
category = "dev"
optional = false
@@ -340,6 +353,19 @@ category = "dev"
optional = false
python-versions = "*"
+[[package]]
+name = "injector"
+version = "0.18.4"
+description = "Injector - Python dependency injection framework, inspired by Guice"
+category = "main"
+optional = false
+python-versions = "*"
+
+[package.dependencies]
+[package.dependencies.typing-extensions]
+version = ">=3.7.4"
+python = "<3.9"
+
[[package]]
name = "itsdangerous"
version = "1.1.0"
@@ -362,6 +388,17 @@ i18n = ["Babel (>=0.8)"]
[package.dependencies]
MarkupSafe = ">=0.23"
+[[package]]
+name = "lambdas"
+version = "0.1.0"
+description = "Typed lambdas that are short and readable"
+category = "main"
+optional = false
+python-versions = ">=3.6,<4.0"
+
+[package.dependencies]
+typing-extensions = ">=3.7,<4.0"
+
[[package]]
name = "littleutils"
version = "0.2.2"
@@ -477,7 +514,7 @@ requests = "*"
[[package]]
name = "packaging"
-version = "20.4"
+version = "20.7"
description = "Core utilities for Python packages"
category = "dev"
optional = false
@@ -485,7 +522,6 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[package.dependencies]
pyparsing = ">=2.0.2"
-six = "*"
[[package]]
name = "paraminjector"
@@ -520,7 +556,7 @@ dev = ["pre-commit", "tox"]
[[package]]
name = "pre-commit"
-version = "2.8.2"
+version = "2.9.2"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
category = "dev"
optional = false
@@ -560,7 +596,7 @@ python-versions = "*"
[[package]]
name = "pydantic"
-version = "1.7.2"
+version = "1.7.3"
description = "Data validation and settings management using python 3.6 type hinting"
category = "main"
optional = false
@@ -740,7 +776,7 @@ redis = ">=3.1.0,<4.0.0"
[[package]]
name = "regex"
-version = "2020.10.28"
+version = "2020.11.13"
description = "Alternative regular expression module, to replace re."
category = "dev"
optional = false
@@ -749,7 +785,7 @@ marker = "platform_python_implementation == \"CPython\""
[[package]]
name = "requests"
-version = "2.24.0"
+version = "2.25.0"
description = "Python HTTP for Humans."
category = "dev"
optional = false
@@ -763,7 +799,18 @@ socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"]
certifi = ">=2017.4.17"
chardet = ">=3.0.2,<4"
idna = ">=2.5,<3"
-urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26"
+urllib3 = ">=1.21.1,<1.27"
+
+[[package]]
+name = "returns"
+version = "0.15.0"
+description = "Make your functions return something meaningful, typed, and safe!"
+category = "main"
+optional = false
+python-versions = ">=3.7,<4.0"
+
+[package.dependencies]
+typing-extensions = ">=3.7,<4.0"
[[package]]
name = "rsa"
@@ -806,7 +853,7 @@ pymysql = ["pymysql"]
[[package]]
name = "telethon"
-version = "1.17.5"
+version = "1.18.2"
description = "Full-featured Telegram client library for Python 3"
category = "dev"
optional = false
@@ -893,7 +940,7 @@ python-versions = "*"
[[package]]
name = "urllib3"
-version = "1.25.11"
+version = "1.26.2"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "dev"
optional = false
@@ -906,7 +953,7 @@ socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"]
[[package]]
name = "virtualenv"
-version = "20.1.0"
+version = "20.2.1"
description = "Virtual Python Environment builder"
category = "dev"
optional = false
@@ -971,9 +1018,13 @@ hmr = ["watchgod"]
[metadata]
python-versions = "^3.8"
-content-hash = "20c75cef3a619661a8389936986dfe547fd43203ac294c261a6c71890f40c974"
+content-hash = "e2ee1f1d607df168564e2b641dcdf7a5c99289b5658e8053c99244ce47aeddae"
[metadata.files]
+aiofiles = [
+ {file = "aiofiles-0.6.0-py3-none-any.whl", hash = "sha256:bd3019af67f83b739f8e4053c6c0512a7f545b9a8d91aaeab55e6e0f9d123c27"},
+ {file = "aiofiles-0.6.0.tar.gz", hash = "sha256:e0281b157d3d5d59d803e3f4557dcc9a3dff28a4dd4829a9ff478adae50ca092"},
+]
appdirs = [
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
@@ -1082,8 +1133,8 @@ distlib = [
{file = "distlib-0.3.1.zip", hash = "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"},
]
docker = [
- {file = "docker-4.3.1-py2.py3-none-any.whl", hash = "sha256:13966471e8bc23b36bfb3a6fb4ab75043a5ef1dac86516274777576bed3b9828"},
- {file = "docker-4.3.1.tar.gz", hash = "sha256:bad94b8dd001a8a4af19ce4becc17f41b09f228173ffe6a4e0355389eef142f2"},
+ {file = "docker-4.4.0-py2.py3-none-any.whl", hash = "sha256:317e95a48c32de8c1aac92a48066a5b73e218ed096e03758bcdd799a7130a1a1"},
+ {file = "docker-4.4.0.tar.gz", hash = "sha256:cffc771d4ea1389fc66bc95cb72d304aa41d1a1563482a9a000fba3a84ed5071"},
]
ensure = [
{file = "ensure-1.0.0-py2.py3-none-any.whl", hash = "sha256:e8ee163a949a7b5ba3f0e2c6b1d20fe603864bb4ae034a219047547989c59ea3"},
@@ -1104,17 +1155,14 @@ flask-humanize = [
future = [
{file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"},
]
-haps = [
- {file = "haps-1.1.2-py3-none-any.whl", hash = "sha256:2f67f4fa4c272c1f11560a6de9d5ddd3662e1598c10e20048a10a8424eff2ccf"},
- {file = "haps-1.1.2.tar.gz", hash = "sha256:203f25a81d6b4c544a8dc5dbd03b14510a96bd129f961cbe3452caf938eb33cd"},
-]
+haps = []
humanize = [
{file = "humanize-3.1.0-py3-none-any.whl", hash = "sha256:6790d9ba139ce09761ae901be9b22bd32a131fa65ecc82cdfc4d86f377f7395d"},
{file = "humanize-3.1.0.tar.gz", hash = "sha256:fd3eb915310335c63a54d4507289ecc7b3a7454cd2c22ac5086d061a3cbfd592"},
]
identify = [
- {file = "identify-1.5.9-py2.py3-none-any.whl", hash = "sha256:5dd84ac64a9a115b8e0b27d1756b244b882ad264c3c423f42af8235a6e71ca12"},
- {file = "identify-1.5.9.tar.gz", hash = "sha256:c9504ba6a043ee2db0a9d69e43246bc138034895f6338d5aed1b41e4a73b1513"},
+ {file = "identify-1.5.10-py2.py3-none-any.whl", hash = "sha256:cc86e6a9a390879dcc2976cef169dd9cc48843ed70b7380f321d1b118163c60e"},
+ {file = "identify-1.5.10.tar.gz", hash = "sha256:943cd299ac7f5715fcb3f684e2fc1594c1e0f22a90d15398e5888143bd4144b5"},
]
idna = [
{file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"},
@@ -1123,6 +1171,10 @@ idna = [
iniconfig = [
{file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"},
]
+injector = [
+ {file = "injector-0.18.4-py2.py3-none-any.whl", hash = "sha256:9e6e7e63050630b2fc3fe1472bdc7ae5f98ca0242ca8506f562f3d9836170f91"},
+ {file = "injector-0.18.4.tar.gz", hash = "sha256:ced88ee14183b9f95b2cb6cdb17bf7382499fad187dee0dace6891874ae4b182"},
+]
itsdangerous = [
{file = "itsdangerous-1.1.0-py2.py3-none-any.whl", hash = "sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"},
{file = "itsdangerous-1.1.0.tar.gz", hash = "sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19"},
@@ -1131,6 +1183,10 @@ jinja2 = [
{file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"},
{file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"},
]
+lambdas = [
+ {file = "lambdas-0.1.0-py3-none-any.whl", hash = "sha256:b197d7cf464b9733944cf9b9cf73d05d934b19eaf0c32fc04a9e7dd25b5ce321"},
+ {file = "lambdas-0.1.0.tar.gz", hash = "sha256:a595cfbcea23bf7295a821563044785d0dc843b39d21ccaee5c3a79afe79e140"},
+]
littleutils = [
{file = "littleutils-0.2.2.tar.gz", hash = "sha256:e6cae3a4203e530d51c9667ed310ffe3b1948f2876e3d69605b3de4b7d96916f"},
]
@@ -1215,8 +1271,8 @@ outdated = [
{file = "outdated-0.2.0.tar.gz", hash = "sha256:bcb145e0e372ba467e998c327d3d1ba72a134b0d5a729749729df6c6244ce643"},
]
packaging = [
- {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"},
- {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"},
+ {file = "packaging-20.7-py2.py3-none-any.whl", hash = "sha256:eb41423378682dadb7166144a4926e443093863024de508ca5c9737d6bc08376"},
+ {file = "packaging-20.7.tar.gz", hash = "sha256:05af3bb85d320377db281cf254ab050e1a7ebcbf5410685a9a407e18a1f81236"},
]
paraminjector = [
{file = "paraminjector-0.1.1-py3-none-any.whl", hash = "sha256:ff031ddcde74a1eb2e92e7f6af7d68b743967fafb833c509923ee8e0d62a5e4c"},
@@ -1231,8 +1287,8 @@ pluggy = [
{file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"},
]
pre-commit = [
- {file = "pre_commit-2.8.2-py2.py3-none-any.whl", hash = "sha256:22e6aa3bd571debb01eb7d34483f11c01b65237be4eebbf30c3d4fb65762d315"},
- {file = "pre_commit-2.8.2.tar.gz", hash = "sha256:905ebc9b534b991baec87e934431f2d0606ba27f2b90f7f652985f5a5b8b6ae6"},
+ {file = "pre_commit-2.9.2-py2.py3-none-any.whl", hash = "sha256:949b13efb7467ae27e2c8f9e83434dacf2682595124d8902554a4e18351e5781"},
+ {file = "pre_commit-2.9.2.tar.gz", hash = "sha256:e31c04bc23741194a7c0b983fe512801e151a0638c6001c49f2bd034f8a664a1"},
]
py = [
{file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"},
@@ -1257,28 +1313,28 @@ pyasn1 = [
{file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"},
]
pydantic = [
- {file = "pydantic-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dfaa6ed1d509b5aef4142084206584280bb6e9014f01df931ec6febdad5b200a"},
- {file = "pydantic-1.7.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:2182ba2a9290964b278bcc07a8d24207de709125d520efec9ad6fa6f92ee058d"},
- {file = "pydantic-1.7.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:0fe8b45d31ae53d74a6aa0bf801587bd49970070eac6a6326f9fa2a302703b8a"},
- {file = "pydantic-1.7.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:01f0291f4951580f320f7ae3f2ecaf0044cdebcc9b45c5f882a7e84453362420"},
- {file = "pydantic-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:4ba6b903e1b7bd3eb5df0e78d7364b7e831ed8b4cd781ebc3c4f1077fbcb72a4"},
- {file = "pydantic-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b11fc9530bf0698c8014b2bdb3bbc50243e82a7fa2577c8cfba660bcc819e768"},
- {file = "pydantic-1.7.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a3c274c49930dc047a75ecc865e435f3df89715c775db75ddb0186804d9b04d0"},
- {file = "pydantic-1.7.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:c68b5edf4da53c98bb1ccb556ae8f655575cb2e676aef066c12b08c724a3f1a1"},
- {file = "pydantic-1.7.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:95d4410c4e429480c736bba0db6cce5aaa311304aea685ebcf9ee47571bfd7c8"},
- {file = "pydantic-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a2fc7bf77ed4a7a961d7684afe177ff59971828141e608f142e4af858e07dddc"},
- {file = "pydantic-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9572c0db13c8658b4a4cb705dcaae6983aeb9842248b36761b3fbc9010b740f"},
- {file = "pydantic-1.7.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f83f679e727742b0c465e7ef992d6da4a7e5268b8edd8fdaf5303276374bef52"},
- {file = "pydantic-1.7.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:e5fece30e80087d9b7986104e2ac150647ec1658c4789c89893b03b100ca3164"},
- {file = "pydantic-1.7.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce2d452961352ba229fe1e0b925b41c0c37128f08dddb788d0fd73fd87ea0f66"},
- {file = "pydantic-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:fc21a37ff3f545de80b166e1735c4172b41b017948a3fb2d5e2f03c219eac50a"},
- {file = "pydantic-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c9760d1556ec59ff745f88269a8f357e2b7afc75c556b3a87b8dda5bc62da8ba"},
- {file = "pydantic-1.7.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c1673633ad1eea78b1c5c420a47cd48717d2ef214c8230d96ca2591e9e00958"},
- {file = "pydantic-1.7.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:388c0c26c574ff49bad7d0fd6ed82fbccd86a0473fa3900397d3354c533d6ebb"},
- {file = "pydantic-1.7.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ab1d5e4d8de00575957e1c982b951bffaedd3204ddd24694e3baca3332e53a23"},
- {file = "pydantic-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:f045cf7afb3352a03bc6cb993578a34560ac24c5d004fa33c76efec6ada1361a"},
- {file = "pydantic-1.7.2-py3-none-any.whl", hash = "sha256:6665f7ab7fbbf4d3c1040925ff4d42d7549a8c15fe041164adfe4fc2134d4cce"},
- {file = "pydantic-1.7.2.tar.gz", hash = "sha256:c8200aecbd1fb914e1bd061d71a4d1d79ecb553165296af0c14989b89e90d09b"},
+ {file = "pydantic-1.7.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c59ea046aea25be14dc22d69c97bee629e6d48d2b2ecb724d7fe8806bf5f61cd"},
+ {file = "pydantic-1.7.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a4143c8d0c456a093387b96e0f5ee941a950992904d88bc816b4f0e72c9a0009"},
+ {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:d8df4b9090b595511906fa48deda47af04e7d092318bfb291f4d45dfb6bb2127"},
+ {file = "pydantic-1.7.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:514b473d264671a5c672dfb28bdfe1bf1afd390f6b206aa2ec9fed7fc592c48e"},
+ {file = "pydantic-1.7.3-cp36-cp36m-win_amd64.whl", hash = "sha256:dba5c1f0a3aeea5083e75db9660935da90216f8a81b6d68e67f54e135ed5eb23"},
+ {file = "pydantic-1.7.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59e45f3b694b05a69032a0d603c32d453a23f0de80844fb14d55ab0c6c78ff2f"},
+ {file = "pydantic-1.7.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:5b24e8a572e4b4c18f614004dda8c9f2c07328cb5b6e314d6e1bbd536cb1a6c1"},
+ {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:b2b054d095b6431cdda2f852a6d2f0fdec77686b305c57961b4c5dd6d863bf3c"},
+ {file = "pydantic-1.7.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:025bf13ce27990acc059d0c5be46f416fc9b293f45363b3d19855165fee1874f"},
+ {file = "pydantic-1.7.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6e3874aa7e8babd37b40c4504e3a94cc2023696ced5a0500949f3347664ff8e2"},
+ {file = "pydantic-1.7.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e682f6442ebe4e50cb5e1cfde7dda6766fb586631c3e5569f6aa1951fd1a76ef"},
+ {file = "pydantic-1.7.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:185e18134bec5ef43351149fe34fda4758e53d05bb8ea4d5928f0720997b79ef"},
+ {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:f5b06f5099e163295b8ff5b1b71132ecf5866cc6e7f586d78d7d3fd6e8084608"},
+ {file = "pydantic-1.7.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:24ca47365be2a5a3cc3f4a26dcc755bcdc9f0036f55dcedbd55663662ba145ec"},
+ {file = "pydantic-1.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:d1fe3f0df8ac0f3a9792666c69a7cd70530f329036426d06b4f899c025aca74e"},
+ {file = "pydantic-1.7.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f6864844b039805add62ebe8a8c676286340ba0c6d043ae5dea24114b82a319e"},
+ {file = "pydantic-1.7.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ecb54491f98544c12c66ff3d15e701612fc388161fd455242447083350904730"},
+ {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:ffd180ebd5dd2a9ac0da4e8b995c9c99e7c74c31f985ba090ee01d681b1c4b95"},
+ {file = "pydantic-1.7.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8d72e814c7821125b16f1553124d12faba88e85405b0864328899aceaad7282b"},
+ {file = "pydantic-1.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:475f2fa134cf272d6631072554f845d0630907fce053926ff634cc6bc45bf1af"},
+ {file = "pydantic-1.7.3-py3-none-any.whl", hash = "sha256:38be427ea01a78206bcaf9a56f835784afcba9e5b88fbdce33bbbfbcd7841229"},
+ {file = "pydantic-1.7.3.tar.gz", hash = "sha256:213125b7e9e64713d16d988d10997dabc6a1f73f3991e1ff8e35ebb1409c7dc9"},
]
pygments = [
{file = "Pygments-2.7.2-py3-none-any.whl", hash = "sha256:88a0bbcd659fcb9573703957c6b9cff9fab7295e6e76db54c9d00ae42df32773"},
@@ -1359,37 +1415,55 @@ redis-collections = [
{file = "redis-collections-0.8.1.tar.gz", hash = "sha256:b0c1213b57ed2d5a351dcec05826ce42de9bae88f74c12f2917aa7523f94269e"},
]
regex = [
- {file = "regex-2020.10.28-cp27-cp27m-win32.whl", hash = "sha256:4b5a9bcb56cc146c3932c648603b24514447eafa6ce9295234767bf92f69b504"},
- {file = "regex-2020.10.28-cp27-cp27m-win_amd64.whl", hash = "sha256:c13d311a4c4a8d671f5860317eb5f09591fbe8259676b86a85769423b544451e"},
- {file = "regex-2020.10.28-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c8a2b7ccff330ae4c460aff36626f911f918555660cc28163417cb84ffb25789"},
- {file = "regex-2020.10.28-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4afa350f162551cf402bfa3cd8302165c8e03e689c897d185f16a167328cc6dd"},
- {file = "regex-2020.10.28-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:b88fa3b8a3469f22b4f13d045d9bd3eda797aa4e406fde0a2644bc92bbdd4bdd"},
- {file = "regex-2020.10.28-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f43109822df2d3faac7aad79613f5f02e4eab0fc8ad7932d2e70e2a83bd49c26"},
- {file = "regex-2020.10.28-cp36-cp36m-win32.whl", hash = "sha256:8092a5a06ad9a7a247f2a76ace121183dc4e1a84c259cf9c2ce3bbb69fac3582"},
- {file = "regex-2020.10.28-cp36-cp36m-win_amd64.whl", hash = "sha256:49461446b783945597c4076aea3f49aee4b4ce922bd241e4fcf62a3e7c61794c"},
- {file = "regex-2020.10.28-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:8ca9dca965bd86ea3631b975d63b0693566d3cc347e55786d5514988b6f5b84c"},
- {file = "regex-2020.10.28-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ea37320877d56a7f0a1e6a625d892cf963aa7f570013499f5b8d5ab8402b5625"},
- {file = "regex-2020.10.28-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:3a5f08039eee9ea195a89e180c5762bfb55258bfb9abb61a20d3abee3b37fd12"},
- {file = "regex-2020.10.28-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:cb905f3d2e290a8b8f1579d3984f2cfa7c3a29cc7cba608540ceeed18513f520"},
- {file = "regex-2020.10.28-cp37-cp37m-win32.whl", hash = "sha256:a62162be05edf64f819925ea88d09d18b09bebf20971b363ce0c24e8b4aa14c0"},
- {file = "regex-2020.10.28-cp37-cp37m-win_amd64.whl", hash = "sha256:03855ee22980c3e4863dc84c42d6d2901133362db5daf4c36b710dd895d78f0a"},
- {file = "regex-2020.10.28-cp38-cp38-manylinux1_i686.whl", hash = "sha256:625116aca6c4b57c56ea3d70369cacc4d62fead4930f8329d242e4fe7a58ce4b"},
- {file = "regex-2020.10.28-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2dc522e25e57e88b4980d2bdd334825dbf6fa55f28a922fc3bfa60cc09e5ef53"},
- {file = "regex-2020.10.28-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:119e0355dbdd4cf593b17f2fc5dbd4aec2b8899d0057e4957ba92f941f704bf5"},
- {file = "regex-2020.10.28-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:cfcf28ed4ce9ced47b9b9670a4f0d3d3c0e4d4779ad4dadb1ad468b097f808aa"},
- {file = "regex-2020.10.28-cp38-cp38-win32.whl", hash = "sha256:06b52815d4ad38d6524666e0d50fe9173533c9cc145a5779b89733284e6f688f"},
- {file = "regex-2020.10.28-cp38-cp38-win_amd64.whl", hash = "sha256:c3466a84fce42c2016113101018a9981804097bacbab029c2d5b4fcb224b89de"},
- {file = "regex-2020.10.28-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c2c6c56ee97485a127555c9595c069201b5161de9d05495fbe2132b5ac104786"},
- {file = "regex-2020.10.28-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1ec66700a10e3c75f1f92cbde36cca0d3aaee4c73dfa26699495a3a30b09093c"},
- {file = "regex-2020.10.28-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:11116d424734fe356d8777f89d625f0df783251ada95d6261b4c36ad27a394bb"},
- {file = "regex-2020.10.28-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f1fce1e4929157b2afeb4bb7069204d4370bab9f4fc03ca1fbec8bd601f8c87d"},
- {file = "regex-2020.10.28-cp39-cp39-win32.whl", hash = "sha256:832339223b9ce56b7b15168e691ae654d345ac1635eeb367ade9ecfe0e66bee0"},
- {file = "regex-2020.10.28-cp39-cp39-win_amd64.whl", hash = "sha256:654c1635f2313d0843028487db2191530bca45af61ca85d0b16555c399625b0e"},
- {file = "regex-2020.10.28.tar.gz", hash = "sha256:dd3e6547ecf842a29cf25123fbf8d2461c53c8d37aa20d87ecee130c89b7079b"},
+ {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"},
+ {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"},
+ {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"},
+ {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"},
+ {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"},
+ {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"},
+ {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"},
+ {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"},
+ {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"},
+ {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"},
+ {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"},
+ {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"},
+ {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"},
+ {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"},
+ {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"},
+ {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"},
+ {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"},
+ {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"},
+ {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"},
+ {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"},
+ {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"},
+ {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"},
+ {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"},
+ {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"},
+ {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"},
+ {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"},
+ {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"},
+ {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"},
+ {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"},
+ {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"},
+ {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"},
+ {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"},
+ {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"},
+ {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"},
+ {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"},
+ {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"},
+ {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"},
+ {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"},
+ {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"},
+ {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"},
+ {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"},
]
requests = [
- {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"},
- {file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"},
+ {file = "requests-2.25.0-py2.py3-none-any.whl", hash = "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998"},
+ {file = "requests-2.25.0.tar.gz", hash = "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8"},
+]
+returns = [
+ {file = "returns-0.15.0-py3-none-any.whl", hash = "sha256:5c96ca1c3ec12051ea159c1115470520d83ad0413c461b98f94b0f8f88b57ad6"},
+ {file = "returns-0.15.0.tar.gz", hash = "sha256:32f0595da0a1e2401520204eee64cae937fedaf95c516e892287e0c9275d3f6b"},
]
rsa = [
{file = "rsa-4.6-py3-none-any.whl", hash = "sha256:6166864e23d6b5195a5cfed6cd9fed0fe774e226d8f854fcb23b7bbef0350233"},
@@ -1440,8 +1514,8 @@ sqlalchemy = [
{file = "SQLAlchemy-1.3.20.tar.gz", hash = "sha256:d2f25c7f410338d31666d7ddedfa67570900e248b940d186b48461bd4e5569a1"},
]
telethon = [
- {file = "Telethon-1.17.5-py3-none-any.whl", hash = "sha256:8763f35908d694e08a33a9d9354fc74751a50e1410d2006b2c595797ff7c87f6"},
- {file = "Telethon-1.17.5.tar.gz", hash = "sha256:958432bb3849d3e1fecfb45e211832a579fb4340a1d2b21b0d1a7d30407e39ec"},
+ {file = "Telethon-1.18.2-py3-none-any.whl", hash = "sha256:5e4eedbd81baeae2187e8bc81f3d3880d965914a46ce3855850076496c53f634"},
+ {file = "Telethon-1.18.2.tar.gz", hash = "sha256:4695ff737692f745c2e44f1f22996b415ce7715d7e764e27b0528fb61d30cefb"},
]
tgtypes = [
{file = "tgtypes-0.1.1-py3-none-any.whl", hash = "sha256:ae9243086cd9a868f1a067555a5977aba07eba00970b8f871a61bfadba81e6dd"},
@@ -1492,12 +1566,12 @@ unsync = [
{file = "unsync-1.2.1.tar.gz", hash = "sha256:ae7c7928013c25f4e23d18071a061151299f71abfdc84f9382c23979c69665c6"},
]
urllib3 = [
- {file = "urllib3-1.25.11-py2.py3-none-any.whl", hash = "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"},
- {file = "urllib3-1.25.11.tar.gz", hash = "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2"},
+ {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"},
+ {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"},
]
virtualenv = [
- {file = "virtualenv-20.1.0-py2.py3-none-any.whl", hash = "sha256:b0011228208944ce71052987437d3843e05690b2f23d1c7da4263fde104c97a2"},
- {file = "virtualenv-20.1.0.tar.gz", hash = "sha256:b8d6110f493af256a40d65e29846c69340a947669eec8ce784fcf3dd3af28380"},
+ {file = "virtualenv-20.2.1-py2.py3-none-any.whl", hash = "sha256:07cff122e9d343140366055f31be4dcd61fd598c69d11cd33a9d9c8df4546dd7"},
+ {file = "virtualenv-20.2.1.tar.gz", hash = "sha256:e0aac7525e880a429764cefd3aaaff54afb5d9f25c82627563603f5d7de5a6e5"},
]
watchgod = [
{file = "watchgod-0.6-py35.py36.py37-none-any.whl", hash = "sha256:59700dab7445aa8e6067a5b94f37bae90fc367554549b1ed2e9d0f4f38a90d2a"},
diff --git a/pyproject.toml b/pyproject.toml
index e5b9e3b..c0d6663 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -34,7 +34,7 @@ cached_property = "^1.5.1"
python-decouple = "3.3"
more-itertools = "^8.2.0"
unsync = "^1.2.1"
-haps = "^1.1.2"
+haps = { git = "https://github.com/JosXa/haps" }
typing-inspect = ">=0.5.0"
pydantic = { extras = ["dotenv"], version = "^1.6.1" }
ordered_set = "^3.1.1"
@@ -48,6 +48,10 @@ ensure = "^1.0.0"
loguru = "^0.5.3"
paraminjector = "^0.1.0"
buslane = "^0.0.5"
+lambdas = "^0.1.0"
+returns = "^0.15.0"
+injector = "^0.18.4"
+aiofiles = "^0.6.0"
[tool.poetry.extras]
redis = ["redis-collections"]
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..7de305c
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,3 @@
+[mypy]
+plugins =
+ lambdas.contrib.mypy.lambdas_plugin
diff --git a/tests/builders/test_menubuilder.py b/tests/builders/test_menubuilder.py
index 0e1b575..1f93ce6 100644
--- a/tests/builders/test_menubuilder.py
+++ b/tests/builders/test_menubuilder.py
@@ -9,7 +9,7 @@
MemoryDictCallbackStore,
)
from botkit.persistence.callback_store._simple import lookup_callback
-from botkit.settings import botkit_settings
+from botkit import botkit_settings
@pytest.fixture(scope="function")
diff --git a/tests/logging/test_logging.py b/tests/logging/test_logging.py
index 7cee488..cbe56fa 100644
--- a/tests/logging/test_logging.py
+++ b/tests/logging/test_logging.py
@@ -4,7 +4,7 @@
import pytest
from loguru import logger
-from botkit.settings import botkit_settings
+from botkit import botkit_settings
from botkit.utils.botkit_logging.setup import create_logger
@@ -68,7 +68,9 @@ def test_botkit_loguru_sub_logger_can_log_when_level_set_before_creation(caplog)
with caplog.at_level(logging.DEBUG):
log.debug("debug")
- assert caplog.record_tuples == [("test_logging", 10, "debug {'identity': 'botkit.test', 'botkit': True}")]
+ assert caplog.record_tuples == [
+ ("test_logging", 10, "debug {'identity': 'botkit.test', 'botkit': True}")
+ ]
def test_botkit_loguru_sub_logger_can_log_when_level_set_after_creation(caplog):
@@ -77,10 +79,12 @@ def test_botkit_loguru_sub_logger_can_log_when_level_set_after_creation(caplog):
with caplog.at_level(logging.DEBUG):
botkit_settings.log_level = "DEBUG"
log.debug("debug")
- assert caplog.record_tuples == [("test_logging", 10, "debug {'identity': 'botkit.test', 'botkit': True}")]
+ assert caplog.record_tuples == [
+ ("test_logging", 10, "debug {'identity': 'botkit.test', 'botkit': True}")
+ ]
-def test_botkit_loguru_sub_logger_level_can_be_increased_from_root_before_creation(caplog, ):
+def test_botkit_loguru_sub_logger_level_can_be_increased_from_root_before_creation(caplog,):
botkit_settings.log_level = "INFO"
with caplog.at_level(logging.INFO):
sub_log = create_logger("sub")
@@ -107,4 +111,5 @@ def bind_mock(**kwargs):
with mock.patch.object(logger, "bind", bind_mock):
create_logger(name)
+
# endregion
diff --git a/tests/persistence/__init__.py b/tests/persistence/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/persistence/test_callback_stores.py b/tests/persistence/test_callback_stores.py
new file mode 100644
index 0000000..cbb51d9
--- /dev/null
+++ b/tests/persistence/test_callback_stores.py
@@ -0,0 +1,37 @@
+from unittest.mock import Mock
+
+from injector import Injector
+from lambdas import _
+from redis import Redis
+
+from botkit import botkit_settings
+from botkit.persistence.callback_store import (
+ ICallbackStore,
+ MemoryDictCallbackStore,
+ RedisCallbackStore,
+ configure_callback_store,
+)
+from botkit.utils import nameof
+
+
+def test_configure_callback_store():
+ inj = Injector([configure_callback_store, lambda binder: binder.bind(Redis, to=Mock(Redis))])
+ inj.get(RedisCallbackStore)
+ inj.get(MemoryDictCallbackStore)
+
+ cbs = inj.get(ICallbackStore)
+ assert (
+ type(cbs) == MemoryDictCallbackStore
+ ), f"Default store was not {nameof(MemoryDictCallbackStore)}"
+
+ botkit_settings.callback_store_qualifier = "memory"
+ cbs = inj.get(ICallbackStore)
+ assert type(cbs) == MemoryDictCallbackStore
+
+ botkit_settings.callback_store_qualifier = "redis"
+ cbs = inj.get(ICallbackStore)
+ assert type(cbs) == RedisCallbackStore
+
+ botkit_settings.callback_store_qualifier = "memory"
+ cbs = inj.get(ICallbackStore)
+ assert type(cbs) == MemoryDictCallbackStore
diff --git a/tests/routing/pipelines/factories/steps/test_evaluate_send_target.py b/tests/routing/pipelines/factories/steps/test_evaluate_send_target.py
index d6734ba..7155752 100644
--- a/tests/routing/pipelines/factories/steps/test_evaluate_send_target.py
+++ b/tests/routing/pipelines/factories/steps/test_evaluate_send_target.py
@@ -6,8 +6,8 @@
from botkit.clients.client import IClient
from botkit.routing.pipelines.executionplan import SendTo
from botkit.routing.pipelines.steps.commit_rendered_view_step_factory import evaluate_send_target
-from botkit.routing.update_types.updatetype import UpdateType
-from botkit.views.botkit_context import Context
+from tgtypes.updatetype import UpdateType
+from botkit.botkit_context import Context
from tgtypes.identities.chat_identity import ChatIdentity
SAME_CHAT_ID = 123
diff --git a/tests/routing/pipelines_v2/__init__.py b/tests/routing/pipelines_v2/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/routing/pipelines_v2/reproduce.py b/tests/routing/pipelines_v2/reproduce.py
new file mode 100644
index 0000000..7c2b9c8
--- /dev/null
+++ b/tests/routing/pipelines_v2/reproduce.py
@@ -0,0 +1,143 @@
+from typing import List, Protocol, TypeVar
+from typing import (
+ Any,
+ Callable,
+ ClassVar,
+ Generic,
+ Optional,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ AbstractSet,
+ Hashable,
+ Iterable,
+ Iterator,
+ Mapping,
+ MutableMapping,
+ MutableSequence,
+ MutableSet,
+ Sequence,
+ AsyncIterator,
+ AsyncIterable,
+ Coroutine,
+ Collection,
+ AsyncGenerator,
+ Deque,
+ Dict,
+ List,
+ Set,
+ FrozenSet,
+ NamedTuple,
+ Generator,
+ cast,
+ overload,
+ TYPE_CHECKING,
+)
+from typing_extensions import TypedDict
+
+from injector import Binder, Injector, InstanceProvider, MultiBindProvider, inject
+from injector import Binder, Provider, inject, provider, Module, Injector, multiprovider, singleton
+
+T = TypeVar("T")
+
+# region Callables
+
+
+class FuncWithProtocol(Protocol[T]):
+ def __call__(self, n: int) -> T:
+ ...
+
+
+class TypeUsingProtocol(FuncWithProtocol[T]):
+ def __call__(self, n: int):
+ pass
+
+
+class UsesComplicated:
+ @inject
+ def __init__(self, c: List[FuncWithProtocol]):
+ self.c = c
+
+
+def complicated_thing(n: int) -> None:
+ pass
+
+
+def test_sth():
+ def configure(binder: Binder):
+ binder.bind(FuncWithProtocol, to=InstanceProvider(complicated_thing))
+
+ inj = Injector([configure])
+ c = inj.get(FuncWithProtocol)
+ assert c is complicated_thing
+
+
+def test_manual_multibind_provider_on_functions_matching_protocol_succeeds():
+ def configure(binder: Binder):
+ binder.bind(UsesComplicated)
+
+ prov = MultiBindProvider()
+ prov.append(InstanceProvider(complicated_thing))
+ binder.multibind(List[UsesComplicated], to=prov)
+
+ inj = Injector([configure])
+ _using = inj.get(UsesComplicated)
+
+
+# endregion
+
+# region Generics
+
+T = TypeVar("T")
+
+
+def test_generic_module_without_initializer_injector_can_be_created():
+ class GenericModuleNoInit(Module, Generic[T]):
+ pass
+
+ def configure(binder: Binder):
+ binder.install(GenericModuleNoInit[str])
+
+ Injector([configure])
+
+
+def test_generic_module_injector_can_be_created():
+ class GenericModule(Module, Generic[T]):
+ def __init__(self):
+ pass
+
+ def configure(binder: Binder):
+ binder.install(GenericModule[str])
+
+ Injector([configure])
+
+
+def test_useful_generic_module_finds_generic_type():
+ class Foo(Generic[T]):
+ pass
+
+ class ClassUsingInt(Foo[int]):
+ pass
+
+ class ClassUsingStr(Foo[str]):
+ pass
+
+ class LibraryProvidedGenericModule(Module, Generic[T]):
+ @provider
+ def t_provider(self) -> List[T]:
+
+
+ def configure(binder: Binder):
+ binder.install(GenericModule[str])
+
+ Injector([configure])
+
+
+# endregion
+
+
+# region Other collections
+
+
+# endregion
diff --git a/tests/routing/pipelines_v2/test_eventpipeline.py b/tests/routing/pipelines_v2/test_eventpipeline.py
new file mode 100644
index 0000000..9ba6cee
--- /dev/null
+++ b/tests/routing/pipelines_v2/test_eventpipeline.py
@@ -0,0 +1,140 @@
+from typing import Any, Dict, List, Literal, Optional, Type, Union, cast
+
+import pytest
+from asynctest import MagicMock
+from boltons.iterutils import flatten
+from injector import Binder, Injector, InstanceProvider, MultiBindProvider
+from itertools import permutations
+
+from botkit.routing.pipelines_v2.base import chain_middleware
+from botkit.routing.pipelines_v2.eventpipeline import EventPipeline
+from botkit.routing.pipelines_v2.base.middleware import (
+ AbstractGenericMiddleware,
+ MiddlewareChainer,
+ MiddlewareSignature,
+ NextDelegate,
+ TContext,
+)
+
+pytestmark = pytest.mark.asyncio
+
+
+async def test_async_delegate_dispatch_happy_path():
+ CType = Dict[str, Optional[Literal[True]]]
+ value: CType = dict()
+
+ async def callback(ctx: CType, nxt: NextDelegate[CType]) -> None:
+ assert ctx is value
+ assert callable(next)
+ assert "called" not in ctx
+ ctx["called"] = True
+ await nxt(ctx)
+ assert ctx["called"] is True
+
+ def make_context(x: Any, y: Any) -> CType:
+ return value
+
+ # TODO: Why the heck are these typings failing...
+ pipeline = EventPipeline(
+ [callback], context_initializer=make_context, injector=cast(Injector, None)
+ )
+ res = await pipeline.dispatch(value, None)
+ assert res is None
+ assert value["called"] is True
+
+
+async def test_class_middleware_dispatch_happy_path():
+ Context = Dict[str, Optional[Literal[True]]]
+ value: Context = dict()
+
+ class MyMiddleware(AbstractGenericMiddleware[Context]):
+ async def __call__(self, ctx: Context, nxt: NextDelegate[Context]) -> Any:
+ assert ctx is value
+ assert callable(next)
+ assert "called" not in ctx
+ ctx["called"] = True
+ await nxt(ctx)
+ assert ctx["called"] is True
+
+ delegates = [MyMiddleware]
+ pipeline = EventPipeline(delegates, Injector(), lambda _1, _2: value)
+ res = await pipeline.dispatch(value, None)
+ assert res is None
+ assert (
+ value["called"] is True
+ ), "Class-based middleware has likely only been instantiated, but not called afterwards"
+
+
+async def test_coroutine_callback_gets_awaited_automatically():
+ async def async_callback(ctx, nxt): # type: ignore
+ next(ctx) # type: ignore
+
+ delegates = [async_callback] # type: ignore
+ pipeline = EventPipeline(delegates, cast(Injector, None), context_initializer=lambda c, _: c) # type: ignore
+
+ with pytest.warns(None) as record:
+ await pipeline.dispatch(dict(), None)
+ assert len(record) == 0
+
+
+async def test_call_without_context_raises():
+ pipeline = EventPipeline([lambda _, nxt: nxt()], injector=cast(Injector, None), context_initializer=lambda c, _: c) # type: ignore
+ with pytest.raises(TypeError, match=".*has been called with incorrect arguments.*"):
+ await pipeline.dispatch(dict(), None)
+
+
+async def test_mix_sync_async_delegates_in_dispatch():
+ async def async_callback(ctx, nxt): # type: ignore
+ print(next)
+ await nxt(ctx)
+
+ delegates = list(flatten(permutations([async_callback, lambda ctx, nxt: nxt(ctx)]))) # type: ignore
+ pipeline = EventPipeline(delegates, cast(Injector, None), context_initializer=lambda c, _: c) # type: ignore
+ await pipeline.dispatch(dict(), None)
+
+
+async def test_inject_():
+ def configure(binder: Binder):
+ binder.bind(MiddlewareChainer[TContext], to=InstanceProvider(chain_middleware))
+
+ inj = Injector([configure])
+ ch = inj.get(MiddlewareChainer[TContext])
+ assert ch is chain_middleware
+
+
+async def test_dispatch_with_dependency_injection():
+ ctx = object()
+ call_mock = MagicMock()
+
+ def configure(binder: Binder):
+ binder.bind(EventPipeline)
+ binder.multibind(
+ List[MiddlewareSignature[object]], to=InstanceProvider([call_mock]) # type: ignore
+ )
+
+ inj = Injector([configure])
+ pipeline = inj.get(EventPipeline[object])
+ await pipeline.dispatch(None, None)
+
+ call_mock.assert_called_with(ctx) # type: ignore
+
+
+async def test_dispatch_switch_out_chain_algorithm():
+ TCtx = Dict[str, object]
+ call_mock = MagicMock()
+
+ def chain_with_exc(_1, _2):
+ raise ValueError("TEST SUCCESS")
+
+ def configure(binder: Binder):
+ binder.bind(EventPipeline)
+ binder.bind(MiddlewareChainer[TCtx], to=InstanceProvider(chain_with_exc))
+
+ prov = MultiBindProvider()
+ prov.append(InstanceProvider(call_mock))
+ binder.multibind(
+ List[Union[MiddlewareSignature[TCtx], Type[MiddlewareSignature[TCtx]]]], to=prov
+ )
+
+ inj = Injector([configure])
+ pipeline = inj.get(EventPipeline)
diff --git a/tests/routing/pipelines_v2/test_module.py b/tests/routing/pipelines_v2/test_module.py
new file mode 100644
index 0000000..6cdf021
--- /dev/null
+++ b/tests/routing/pipelines_v2/test_module.py
@@ -0,0 +1,9 @@
+from injector import Binder, Provider, inject, provider, Module, Injector, multiprovider, singleton
+
+from botkit.routing import MiddlewareModule
+from botkit.routing.pipelines_v2.base import EventPipeline
+
+
+def test_module():
+ inj = Injector([MiddlewareModule()])
+ # module = inj.get(EventPipeline[str])
diff --git a/tests/routing/plan/test_update_types.py b/tests/routing/plan/test_update_types.py
index fab4807..b554a82 100644
--- a/tests/routing/plan/test_update_types.py
+++ b/tests/routing/plan/test_update_types.py
@@ -5,7 +5,7 @@
from botkit.agnostic.annotations import IClient
from botkit.agnostic._pyrogram_update_type_inference import determine_pyrogram_handler_update_types
-from botkit.routing.update_types.updatetype import UpdateType
+from tgtypes.updatetype import UpdateType
from botkit.utils.typed_callable import TypedCallable
diff --git a/tests/routing/test_routing.py b/tests/routing/test_routing.py
index 2a1e594..81b02d3 100644
--- a/tests/routing/test_routing.py
+++ b/tests/routing/test_routing.py
@@ -14,9 +14,9 @@
from botkit.persistence.data_store import DataStoreBase, MemoryDataStore
from botkit.routing.route import RouteDefinition
from botkit.routing.route_builder.builder import RouteBuilder
-from botkit.routing.update_types.updatetype import UpdateType
-from botkit.settings import botkit_settings
-from botkit.views.botkit_context import Context
+from tgtypes.updatetype import UpdateType
+from botkit import botkit_settings
+from botkit.botkit_context import Context
client: IClient = Mock(IClient)
callback_query: CallbackQuery = Mock(CallbackQuery)
@@ -29,7 +29,7 @@ def configure_data_stores():
Egg(
ICallbackStore,
ICallbackStore,
- botkit_settings.callback_manager_qualifier,
+ botkit_settings.callback_store_qualifier,
MemoryDictCallbackStore,
),
Egg(DataStoreBase, DataStoreBase, None, MemoryDataStore,),
diff --git a/tests/utils/test_typed_callables.py b/tests/utils/test_typed_callables.py
index eadd839..82657bc 100644
--- a/tests/utils/test_typed_callables.py
+++ b/tests/utils/test_typed_callables.py
@@ -2,7 +2,7 @@
from botkit.utils.typed_callable import TypedCallable
-from botkit.views.botkit_context import Context
+from botkit.botkit_context import Context
def my_func_1(ctx: Context[Any, Any], test_int: int = 1, test_none: Optional[str] = None) -> Any:
@@ -37,7 +37,7 @@ def test_regular_function_properties():
"ctx": Context[Any, Any],
"test_int": int,
"test_none": Optional[str],
- "return": Any
+ "return": Any,
}
diff --git a/tests/views/test_functional_views.py b/tests/views/test_functional_views.py
index 0fe7e11..7e53772 100644
--- a/tests/views/test_functional_views.py
+++ b/tests/views/test_functional_views.py
@@ -1,6 +1,6 @@
from dataclasses import dataclass
-from botkit.builders import ViewBuilder
+from botkit.builders.viewbuilder import ViewBuilder
from botkit.persistence import callback_store
from botkit.persistence.callback_store import MemoryDictCallbackStore
from botkit.views.functional_views import render_functional_view, view
diff --git a/typings/asynctest/__init__.pyi b/typings/asynctest/__init__.pyi
new file mode 100644
index 0000000..0d39be3
--- /dev/null
+++ b/typings/asynctest/__init__.pyi
@@ -0,0 +1,26 @@
+"""
+This type stub file was generated by pyright.
+"""
+
+import unittest
+from unittest import *
+from .case import *
+from .mock import *
+from ._fail_on import *
+from .helpers import *
+from .selector import *
+
+"""
+The package asynctest is built on top of the standard :mod:`unittest` module
+and cuts down boilerplate code when testing libraries for :mod:`asyncio`.
+
+asynctest imports the standard unittest package, overrides some of its features
+and adds new ones. A test author can import asynctest in place of
+:mod:`unittest` safely.
+
+It is divided in submodules, but they are all imported at the top level,
+so :class:`asynctest.case.TestCase` is equivalent to :class:`asynctest.TestCase`.
+
+Currently, asynctest targets the "selector" model. Hence, some features will
+not (yet) work with Windows' proactor.
+"""
diff --git a/typings/asynctest/_fail_on.pyi b/typings/asynctest/_fail_on.pyi
new file mode 100644
index 0000000..5cdbcc8
--- /dev/null
+++ b/typings/asynctest/_fail_on.pyi
@@ -0,0 +1,78 @@
+"""
+This type stub file was generated by pyright.
+"""
+
+"""
+:class:`asynctest.TestCase` decorator which controls checks performed after
+tests.
+
+This module is separated from :mod:`asynctest.case` to avoid circular imports
+in modules registering new checks.
+
+To implement new checks:
+
+ * its name must be added in the ``DEFAULTS`` dict,
+
+ * a static method of the same name must be added to the :class:`_fail_on`
+ class,
+
+ * an optional static method named ``before_[name of the check]`` can be
+ added to :class:`_fail_on` to implement some set-up before the test runs.
+
+A check may be only available on some platforms, activated by a conditional
+import. In this case, ``DEFAULT`` and :class:`_fail_on` can be updated in the
+module. There is an example in the :mod:`asynctest.selector` module.
+"""
+_FAIL_ON_ATTR = "_asynctest_fail_on"
+DEFAULTS = { "unused_loop": False,"active_handles": False }
+class _fail_on:
+ def __init__(self, checks=...) -> None:
+ ...
+
+ def __call__(self, func):
+ ...
+
+ def update(self, checks, override=...):
+ ...
+
+ def copy(self):
+ ...
+
+ def get_checks(self, case):
+ ...
+
+ def before_test(self, case):
+ ...
+
+ def check_test(self, case):
+ ...
+
+ @staticmethod
+ def unused_loop(case):
+ ...
+
+ @classmethod
+ def active_handles(cls, case):
+ ...
+
+
+
+def fail_on(**kwargs):
+ """
+ Enable checks on the loop state after a test ran to help testers to
+ identify common mistakes.
+ """
+ ...
+
+def strict(func=...):
+ """
+ Activate strict checking of the state of the loop after a test ran.
+ """
+ ...
+
+def lenient(func=...):
+ """
+ Deactivate all checks after a test ran.
+ """
+ ...
+
diff --git a/typings/asynctest/case.pyi b/typings/asynctest/case.pyi
new file mode 100644
index 0000000..c273a53
--- /dev/null
+++ b/typings/asynctest/case.pyi
@@ -0,0 +1,238 @@
+"""
+This type stub file was generated by pyright.
+"""
+
+import asyncio
+import unittest
+from unittest.case import *
+
+"""
+Module ``case``
+---------------
+
+Enhance :class:`unittest.TestCase`:
+
+* a new loop is issued and set as the default loop before each test, and
+ closed and disposed after,
+
+* if the loop uses a selector, it will be wrapped with
+ :class:`asynctest.TestSelector`,
+
+* a test method in a TestCase identified as a coroutine function or returning
+ a coroutine will run on the loop,
+
+* :meth:`~TestCase.setUp()` and :meth:`~TestCase.tearDown()` methods can be
+ coroutine functions,
+
+* cleanup functions registered with :meth:`~TestCase.addCleanup()` can be
+ coroutine functions,
+
+* a test fails if the loop did not run during the test.
+
+class-level set-up
+~~~~~~~~~~~~~~~~~~
+
+Since each test runs in its own loop, it is not possible to run
+:meth:`~TestCase.setUpClass()` and :meth:`~TestCase.tearDownClass()` as
+coroutines.
+
+If one needs to perform set-up actions at the class level (meaning
+once for all tests in the class), it should be done using a loop created for
+this sole purpose and that is not shared with the tests. Ideally, the loop
+shall be closed in the method which creates it.
+
+If one really needs to share a loop between tests,
+:attr:`TestCase.use_default_loop` can be set to ``True`` (as a class
+attribute). The test case will use the loop returned by
+:meth:`asyncio.get_event_loop()` instead of creating a new loop for each test.
+This way, the event loop or event loop policy can be set during class-level
+set-up and tear down.
+"""
+class _Policy(asyncio.AbstractEventLoopPolicy):
+ def __init__(self, original_policy, loop, forbid_get_event_loop) -> None:
+ ...
+
+ def get_event_loop(self):
+ ...
+
+ def new_event_loop(self):
+ ...
+
+ def set_event_loop(self, loop):
+ ...
+
+ def get_child_watcher(self):
+ ...
+
+ def set_child_watcher(self, watcher):
+ ...
+
+ def reset_watcher(self):
+ ...
+
+
+
+class TestCase(unittest.TestCase):
+ """
+ A test which is a coroutine function or which returns a coroutine will run
+ on the loop.
+
+ Once the test returned, one or more assertions are checked. For instance,
+ a test fails if the loop didn't run. These checks can be enabled or
+ disabled using the :func:`~asynctest.fail_on` decorator.
+
+ By default, a new loop is created and is set as the default loop before
+ each test. Test authors can retrieve this loop with
+ :attr:`~asynctest.TestCase.loop`.
+
+ If :attr:`~asynctest.TestCase.use_default_loop` is set to ``True``, the
+ current default event loop is used instead. In this case, it is up to the
+ test author to deal with the state of the loop in each test: the loop might
+ be closed, callbacks and tasks may be scheduled by previous tests. It is
+ also up to the test author to close the loop and dispose the related
+ resources.
+
+ If :attr:`~asynctest.TestCase.forbid_get_event_loop` is set to ``True``,
+ a call to :func:`asyncio.get_event_loop()` will raise an
+ :exc:`AssertionError`. Since Python 3.6, calling
+ :func:`asyncio.get_event_loop()` from a callback or a coroutine will return
+ the running loop (instead of raising an exception).
+
+ These behaviors should be configured when defining the test case class::
+
+ class With_Reusable_Loop_TestCase(asynctest.TestCase):
+ use_default_loop = True
+
+ forbid_get_event_loop = False
+
+ def test_something(self):
+ pass
+
+ If :meth:`setUp()` and :meth:`tearDown()` are coroutine functions, they
+ will run on the loop. Note that :meth:`setUpClass()` and
+ :meth:`tearDownClass()` can not be coroutines.
+
+ .. versionadded:: 0.5
+
+ attribute :attr:`~asynctest.TestCase.use_default_loop`.
+
+ .. versionadded:: 0.7
+
+ attribute :attr:`~asynctest.TestCase.forbid_get_event_loop`.
+ In any case, the default loop is now reset to its original state
+ outside a test function.
+
+ .. versionadded:: 0.8
+
+ ``ignore_loop`` has been deprecated in favor of the extensible
+ :func:`~asynctest.fail_on` decorator.
+ """
+ use_default_loop = ...
+ forbid_get_event_loop = ...
+ loop = ...
+ def run(self, result=...):
+ ...
+
+ def debug(self):
+ ...
+
+ @asyncio.coroutine
+ def doCleanups(self):
+ """
+ Execute all cleanup functions. Normally called for you after tearDown.
+ """
+ ...
+
+ def addCleanup(self, function, *args, **kwargs):
+ """
+ Add a function, with arguments, to be called when the test is
+ completed. If function is a coroutine function, it will run on the loop
+ before it's cleaned.
+ """
+ ...
+
+ @asyncio.coroutine
+ def assertAsyncRaises(self, exception, awaitable):
+ """
+ Test that an exception of type ``exception`` is raised when an
+ exception is raised when awaiting ``awaitable``, a future or coroutine.
+
+ :see: :meth:`unittest.TestCase.assertRaises()`
+ """
+ ...
+
+ @asyncio.coroutine
+ def assertAsyncRaisesRegex(self, exception, regex, awaitable):
+ """
+ Like :meth:`assertAsyncRaises()` but also tests that ``regex`` matches
+ on the string representation of the raised exception.
+
+ :see: :meth:`unittest.TestCase.assertRaisesRegex()`
+ """
+ ...
+
+ @asyncio.coroutine
+ def assertAsyncWarns(self, warning, awaitable):
+ """
+ Test that a warning is triggered when awaiting ``awaitable``, a future
+ or a coroutine.
+
+ :see: :meth:`unittest.TestCase.assertWarns()`
+ """
+ ...
+
+ @asyncio.coroutine
+ def assertAsyncWarnsRegex(self, warning, regex, awaitable):
+ """
+ Like :meth:`assertAsyncWarns()` but also tests that ``regex`` matches
+ on the message of the triggered warning.
+
+ :see: :meth:`unittest.TestCase.assertWarnsRegex()`
+ """
+ ...
+
+
+
+class FunctionTestCase(TestCase, unittest.FunctionTestCase):
+ """
+ Enables the same features as :class:`~asynctest.TestCase`, but for
+ :class:`~asynctest.FunctionTestCase`.
+ """
+ ...
+
+
+class ClockedTestCase(TestCase):
+ """
+ Subclass of :class:`~asynctest.TestCase` with a controlled loop clock,
+ useful for testing timer based behaviour without slowing test run time.
+
+ The clock will only advance when :meth:`advance()` is called.
+ """
+ @asyncio.coroutine
+ def advance(self, seconds):
+ """
+ Fast forward time by a number of ``seconds``.
+
+ Callbacks scheduled to run up to the destination clock time will be
+ executed on time:
+
+ >>> self.loop.call_later(1, print_time)
+ >>> self.loop.call_later(2, self.loop.call_later, 1, print_time)
+ >>> await self.advance(3)
+ 1
+ 3
+
+ In this example, the third callback is scheduled at ``t = 2`` to be
+ executed at ``t + 1``. Hence, it will run at ``t = 3``. The callback as
+ been called on time.
+ """
+ ...
+
+
+
+def ignore_loop(func=...):
+ """
+ Ignore the error case where the loop did not run during the test.
+ """
+ ...
+
diff --git a/typings/asynctest/helpers.pyi b/typings/asynctest/helpers.pyi
new file mode 100644
index 0000000..25b8620
--- /dev/null
+++ b/typings/asynctest/helpers.pyi
@@ -0,0 +1,25 @@
+"""
+This type stub file was generated by pyright.
+"""
+
+import asyncio
+
+"""
+Module ``helpers``
+------------------
+
+Helper functions and coroutines for :mod:`asynctest`.
+"""
+@asyncio.coroutine
+def exhaust_callbacks(loop):
+ """
+ Run the loop until all ready callbacks are executed.
+
+ The coroutine doesn't wait for callbacks scheduled in the future with
+ :meth:`~asyncio.BaseEventLoop.call_at()` or
+ :meth:`~asyncio.BaseEventLoop.call_later()`.
+
+ :param loop: event loop
+ """
+ ...
+
diff --git a/typings/asynctest/mock.pyi b/typings/asynctest/mock.pyi
new file mode 100644
index 0000000..52fb98c
--- /dev/null
+++ b/typings/asynctest/mock.pyi
@@ -0,0 +1,539 @@
+"""
+This type stub file was generated by pyright.
+"""
+
+import asyncio
+import enum
+
+"""
+Module ``mock``
+---------------
+
+Wrapper to unittest.mock reducing the boilerplate when testing asyncio powered
+code.
+
+A mock can behave as a coroutine, as specified in the documentation of
+:class:`~asynctest.mock.Mock`.
+"""
+class _AsyncIterator:
+ """
+ Wraps an iterator in an asynchronous iterator.
+ """
+ def __init__(self, iterator) -> None:
+ ...
+
+ def __aiter__(self):
+ ...
+
+ async def __anext__(self):
+ ...
+
+
+
+async_magic_coroutines = ("__aenter__", "__aexit__", "__anext__")
+_async_magics = async_magic_coroutines + ("__aiter__", )
+async_magic_coroutines = set(async_magic_coroutines)
+_async_magics = set(_async_magics)
+class FakeInheritanceMeta(type):
+ """
+ A metaclass which recreates the original inheritance model from
+ unittest.mock.
+
+ - NonCallableMock > NonCallableMagicMock
+ - NonCallable > Mock
+ - Mock > MagicMock
+ """
+ def __init__(self, name, bases, attrs) -> None:
+ ...
+
+ def __instancecheck__(cls, obj):
+ ...
+
+
+
+class MockMetaMixin(FakeInheritanceMeta):
+ def __new__(meta, name, base, namespace):
+ ...
+
+
+
+class IsCoroutineArgMeta(MockMetaMixin):
+ def __new__(meta, name, base, namespace):
+ ...
+
+
+
+class AsyncMagicMixin:
+ """
+ Add support for async magic methods to :class:`MagicMock` and
+ :class:`NonCallableMagicMock`.
+
+ Actually, it's a shameless copy-paste of :class:`unittest.mock.MagicMixin`:
+ when added to our classes, it will just do exactly what its
+ :mod:`unittest` counterpart does, but for magic methods. It adds some
+ behavior but should be compatible with future additions of
+ :class:`MagicMock`.
+ """
+ def __init__(self, *args, **kwargs) -> None:
+ ...
+
+ def mock_add_spec(self, *args, **kwargs):
+ ...
+
+ def __setattr__(self, name, value):
+ ...
+
+
+
+class NonCallableMock(unittest.mock.NonCallableMock, metaclass=IsCoroutineArgMeta):
+ """
+ Enhance :class:`unittest.mock.NonCallableMock` with features allowing to
+ mock a coroutine function.
+
+ If ``is_coroutine`` is set to ``True``, the :class:`NonCallableMock`
+ object will behave so :func:`asyncio.iscoroutinefunction` will return
+ ``True`` with ``mock`` as parameter.
+
+ If ``spec`` or ``spec_set`` is defined and an attribute is get,
+ :class:`~asynctest.CoroutineMock` is returned instead of
+ :class:`~asynctest.Mock` when the matching spec attribute is a coroutine
+ function.
+
+ The test author can also specify a wrapped object with ``wraps``. In this
+ case, the :class:`~asynctest.Mock` object behavior is the same as with an
+ :class:`unittest.mock.Mock` object: the wrapped object may have methods
+ defined as coroutine functions.
+
+ See :class:`unittest.mock.NonCallableMock`
+ """
+ def __init__(self, spec=..., wraps=..., name=..., spec_set=..., is_coroutine=..., parent=..., **kwargs) -> None:
+ ...
+
+
+
+class NonCallableMagicMock(AsyncMagicMixin, unittest.mock.NonCallableMagicMock, metaclass=IsCoroutineArgMeta):
+ """
+ A version of :class:`~asynctest.MagicMock` that isn't callable.
+ """
+ def __init__(self, spec=..., wraps=..., name=..., spec_set=..., is_coroutine=..., parent=..., **kwargs) -> None:
+ ...
+
+
+
+class Mock(unittest.mock.Mock, metaclass=MockMetaMixin):
+ """
+ Enhance :class:`unittest.mock.Mock` so it returns
+ a :class:`~asynctest.CoroutineMock` object instead of
+ a :class:`~asynctest.Mock` object where a method on a ``spec`` or
+ ``spec_set`` object is a coroutine.
+
+ For instance:
+
+ >>> class Foo:
+ ... @asyncio.coroutine
+ ... def foo(self):
+ ... pass
+ ...
+ ... def bar(self):
+ ... pass
+
+ >>> type(asynctest.mock.Mock(Foo()).foo)
+
+
+ >>> type(asynctest.mock.Mock(Foo()).bar)
+
+
+ The test author can also specify a wrapped object with ``wraps``. In this
+ case, the :class:`~asynctest.Mock` object behavior is the same as with an
+ :class:`unittest.mock.Mock` object: the wrapped object may have methods
+ defined as coroutine functions.
+
+ If you want to mock a coroutine function, use :class:`CoroutineMock`
+ instead.
+
+ See :class:`~asynctest.NonCallableMock` for details about :mod:`asynctest`
+ features, and :mod:`unittest.mock` for the comprehensive documentation
+ about mocking.
+ """
+ ...
+
+
+class MagicMock(AsyncMagicMixin, unittest.mock.MagicMock, metaclass=MockMetaMixin):
+ """
+ Enhance :class:`unittest.mock.MagicMock` so it returns
+ a :class:`~asynctest.CoroutineMock` object instead of
+ a :class:`~asynctest.Mock` object where a method on a ``spec`` or
+ ``spec_set`` object is a coroutine.
+
+ If you want to mock a coroutine function, use :class:`CoroutineMock`
+ instead.
+
+ :class:`MagicMock` allows to mock ``__aenter__``, ``__aexit__``,
+ ``__aiter__`` and ``__anext__``.
+
+ When mocking an asynchronous iterator, you can set the
+ ``return_value`` of ``__aiter__`` to an iterable to define the list of
+ values to be returned during iteration.
+
+ You can not mock ``__await__``. If you want to mock an object implementing
+ __await__, :class:`CoroutineMock` will likely be sufficient.
+
+ see :class:`~asynctest.Mock`.
+
+ .. versionadded:: 0.11
+
+ support of asynchronous iterators and asynchronous context managers.
+ """
+ ...
+
+
+class _AwaitEvent:
+ def __init__(self, mock) -> None:
+ ...
+
+ @asyncio.coroutine
+ def wait(self, skip=...):
+ """
+ Wait for await.
+
+ :param skip: How many awaits will be skipped.
+ As a result, the mock should be awaited at least
+ ``skip + 1`` times.
+ """
+ ...
+
+ @asyncio.coroutine
+ def wait_next(self, skip=...):
+ """
+ Wait for the next await.
+
+ Unlike :meth:`wait` that counts any await, mock has to be awaited once
+ more, disregarding to the current
+ :attr:`asynctest.CoroutineMock.await_count`.
+
+ :param skip: How many awaits will be skipped.
+ As a result, the mock should be awaited at least
+ ``skip + 1`` more times.
+ """
+ ...
+
+ @asyncio.coroutine
+ def wait_for(self, predicate):
+ """
+ Wait for a given predicate to become True.
+
+ :param predicate: A callable that receives mock which result
+ will be interpreted as a boolean value.
+ The final predicate value is the return value.
+ """
+ ...
+
+ def __bool__(self):
+ ...
+
+
+
+class CoroutineMock(Mock):
+ """
+ Enhance :class:`~asynctest.mock.Mock` with features allowing to mock
+ a coroutine function.
+
+ The :class:`~asynctest.CoroutineMock` object will behave so the object is
+ recognized as coroutine function, and the result of a call as a coroutine:
+
+ >>> mock = CoroutineMock()
+ >>> asyncio.iscoroutinefunction(mock)
+ True
+ >>> asyncio.iscoroutine(mock())
+ True
+
+
+ The result of ``mock()`` is a coroutine which will have the outcome of
+ ``side_effect`` or ``return_value``:
+
+ - if ``side_effect`` is a function, the coroutine will return the result
+ of that function,
+ - if ``side_effect`` is an exception, the coroutine will raise the
+ exception,
+ - if ``side_effect`` is an iterable, the coroutine will return the next
+ value of the iterable, however, if the sequence of result is exhausted,
+ ``StopIteration`` is raised immediately,
+ - if ``side_effect`` is not defined, the coroutine will return the value
+ defined by ``return_value``, hence, by default, the coroutine returns
+ a new :class:`~asynctest.CoroutineMock` object.
+
+ If the outcome of ``side_effect`` or ``return_value`` is a coroutine, the
+ mock coroutine obtained when the mock object is called will be this
+ coroutine itself (and not a coroutine returning a coroutine).
+
+ The test author can also specify a wrapped object with ``wraps``. In this
+ case, the :class:`~asynctest.Mock` object behavior is the same as with an
+ :class:`unittest.mock.Mock` object: the wrapped object may have methods
+ defined as coroutine functions.
+ """
+ awaited = ...
+ await_count = ...
+ await_args = ...
+ await_args_list = ...
+ def __init__(self, *args, **kwargs) -> None:
+ ...
+
+ def assert_awaited(_mock_self):
+ """
+ Assert that the mock was awaited at least once.
+
+ .. versionadded:: 0.12
+ """
+ ...
+
+ def assert_awaited_once(_mock_self, *args, **kwargs):
+ """
+ Assert that the mock was awaited exactly once.
+
+ .. versionadded:: 0.12
+ """
+ ...
+
+ def assert_awaited_with(_mock_self, *args, **kwargs):
+ """
+ Assert that the last await was with the specified arguments.
+
+ .. versionadded:: 0.12
+ """
+ ...
+
+ def assert_awaited_once_with(_mock_self, *args, **kwargs):
+ """
+ Assert that the mock was awaited exactly once and with the specified arguments.
+
+ .. versionadded:: 0.12
+ """
+ ...
+
+ def assert_any_await(_mock_self, *args, **kwargs):
+ """
+ Assert the mock has ever been awaited with the specified arguments.
+
+ .. versionadded:: 0.12
+ """
+ ...
+
+ def assert_has_awaits(_mock_self, calls, any_order=...):
+ """
+ Assert the mock has been awaited with the specified calls.
+ The :attr:`await_args_list` list is checked for the awaits.
+
+ If `any_order` is False (the default) then the awaits must be
+ sequential. There can be extra calls before or after the
+ specified awaits.
+
+ If `any_order` is True then the awaits can be in any order, but
+ they must all appear in :attr:`await_args_list`.
+
+ .. versionadded:: 0.12
+ """
+ ...
+
+ def assert_not_awaited(_mock_self):
+ """
+ Assert that the mock was never awaited.
+
+ .. versionadded:: 0.12
+ """
+ ...
+
+ def reset_mock(self, *args, **kwargs):
+ """
+ See :func:`unittest.mock.Mock.reset_mock()`
+ """
+ ...
+
+
+
+def create_autospec(spec, spec_set=..., instance=..., _parent=..., _name=..., **kwargs):
+ """
+ Create a mock object using another object as a spec. Attributes on the mock
+ will use the corresponding attribute on the spec object as their spec.
+
+ ``spec`` can be a coroutine function, a class or object with coroutine
+ functions as attributes.
+
+ If ``spec`` is a coroutine function, and ``instance`` is not ``False``, a
+ :exc:`RuntimeError` is raised.
+
+ .. versionadded:: 0.12
+ """
+ ...
+
+def mock_open(mock=..., read_data=...):
+ """
+ A helper function to create a mock to replace the use of :func:`open()`. It
+ works for :func:`open()` called directly or used as a context manager.
+
+ :param mock: mock object to configure, by default
+ a :class:`~asynctest.MagicMock` object is
+ created with the API limited to methods or attributes
+ available on standard file handles.
+
+ :param read_data: string for the :func:`read()` and :func:`readlines()` of
+ the file handle to return. This is an empty string by
+ default.
+ """
+ ...
+
+ANY = unittest.mock.ANY
+DEFAULT = unittest.mock.sentinel.DEFAULT
+PatchScope = enum.Enum('PatchScope', 'LIMITED GLOBAL')
+LIMITED = PatchScope.LIMITED
+GLOBAL = PatchScope.GLOBAL
+class _PatchedGenerator(asyncio.coroutines.CoroWrapper):
+ def __init__(self, gen, patchings, is_coroutine) -> None:
+ ...
+
+ def __repr__(self):
+ ...
+
+ def __next__(self):
+ ...
+
+ def send(self, value):
+ ...
+
+ def throw(self, exc, value=..., traceback=...):
+ ...
+
+ def close(self):
+ ...
+
+ def __del__(self):
+ ...
+
+
+
+class _patch(unittest.mock._patch):
+ def __init__(self, *args, scope=..., **kwargs) -> None:
+ ...
+
+ def copy(self):
+ ...
+
+ def __enter__(self):
+ ...
+
+ def decorate_callable(self, func):
+ ...
+
+
+
+def patch(target, new=..., spec=..., create=..., spec_set=..., autospec=..., new_callable=..., scope=..., **kwargs):
+ """
+ A context manager, function decorator or class decorator which patches the
+ target with the value given by the ``new`` argument.
+
+ ``new`` specifies which object will replace the ``target`` when the patch
+ is applied. By default, the target will be patched with an instance of
+ :class:`~asynctest.CoroutineMock` if it is a coroutine, or
+ a :class:`~asynctest.MagicMock` object.
+
+ It is a replacement to :func:`unittest.mock.patch`, but using
+ :mod:`asynctest.mock` objects.
+
+ When a generator or a coroutine is patched using the decorator, the patch
+ is activated or deactivated according to the ``scope`` argument value:
+
+ * :const:`asynctest.GLOBAL`: the default, enables the patch until the
+ generator or the coroutine finishes (returns or raises an exception),
+
+ * :const:`asynctest.LIMITED`: the patch will be activated when the
+ generator or coroutine is being executed, and deactivated when it
+ yields a value and pauses its execution (with ``yield``, ``yield from``
+ or ``await``).
+
+ The behavior differs from :func:`unittest.mock.patch` for generators.
+
+ When used as a context manager, the patch is still active even if the
+ generator or coroutine is paused, which may affect concurrent tasks::
+
+ @asyncio.coroutine
+ def coro():
+ with asynctest.mock.patch("module.function"):
+ yield from asyncio.get_event_loop().sleep(1)
+
+ @asyncio.coroutine
+ def independent_coro():
+ assert not isinstance(module.function, asynctest.mock.Mock)
+
+ asyncio.create_task(coro())
+ asyncio.create_task(independent_coro())
+ # this will raise an AssertionError(coro() is scheduled first)!
+ loop.run_forever()
+
+ :param scope: :const:`asynctest.GLOBAL` or :const:`asynctest.LIMITED`,
+ controls when the patch is activated on generators and coroutines
+
+ When used as a decorator with a generator based coroutine, the order of
+ the decorators matters. The order of the ``@patch()`` decorators is in
+ the reverse order of the parameters produced by these patches for the
+ patched function. And the ``@asyncio.coroutine`` decorator should be
+ the last since ``@patch()`` conceptually patches the coroutine, not
+ the function::
+
+ @patch("module.function2")
+ @patch("module.function1")
+ @asyncio.coroutine
+ def test_coro(self, mock_function1, mock_function2):
+ yield from asyncio.get_event_loop().sleep(1)
+
+ see :func:`unittest.mock.patch()`.
+
+ .. versionadded:: 0.6 patch into generators and coroutines with
+ a decorator.
+ """
+ ...
+
+class _patch_dict(unittest.mock._patch_dict):
+ def __init__(self, in_dict, values=..., clear=..., scope=..., **kwargs) -> None:
+ ...
+
+ def decorate_class(self, klass):
+ ...
+
+ def __call__(self, func):
+ ...
+
+
+
+_clear_dict = unittest.mock._clear_dict
+sentinel = unittest.mock.sentinel
+call = unittest.mock.call
+PropertyMock = unittest.mock.PropertyMock
+def return_once(value, then=...):
+ """
+ Helper to use with ``side_effect``, so a mock will return a given value
+ only once, then return another value.
+
+ When used as a ``side_effect`` value, if one of ``value`` or ``then`` is an
+ :class:`Exception` type, an instance of this exception will be raised.
+
+ >>> mock.recv = Mock(side_effect=return_once(b"data"))
+ >>> mock.recv()
+ b"data"
+ >>> repr(mock.recv())
+ 'None'
+ >>> repr(mock.recv())
+ 'None'
+
+ >>> mock.recv = Mock(side_effect=return_once(b"data", then=BlockingIOError))
+ >>> mock.recv()
+ b"data"
+ >>> mock.recv()
+ Traceback BlockingIOError
+
+ :param value: value to be returned once by the mock when called.
+
+ :param then: value returned for any subsequent call.
+
+ .. versionadded:: 0.4
+ """
+ ...
+
diff --git a/typings/asynctest/selector.pyi b/typings/asynctest/selector.pyi
new file mode 100644
index 0000000..c38d430
--- /dev/null
+++ b/typings/asynctest/selector.pyi
@@ -0,0 +1,228 @@
+"""
+This type stub file was generated by pyright.
+"""
+
+import asyncio
+from . import mock
+
+"""
+Module ``selector``
+-------------------
+
+Mock of :mod:`selectors` and compatible objects performing asynchronous IO.
+
+This module provides classes to mock objects performing IO (files, sockets,
+etc). These mocks are compatible with :class:`~asynctest.TestSelector`, which
+can simulate the behavior of a selector on the mock objects, or forward actual
+work to a real selector.
+"""
+class FileDescriptor(int):
+ """
+ A subclass of int which allows to identify the virtual file-descriptor of a
+ :class:`~asynctest.FileMock`.
+
+ If :class:`~asynctest.FileDescriptor()` without argument, its value will be
+ the value of :data:`~FileDescriptor.next_fd`.
+
+ When an object is created, :data:`~FileDescriptor.next_fd` is set to the
+ highest value for a :class:`~asynctest.FileDescriptor` object + 1.
+ """
+ next_fd = ...
+ def __new__(cls, *args, **kwargs):
+ ...
+
+ def __hash__(self) -> int:
+ ...
+
+
+
+def fd(fileobj):
+ """
+ Return the :class:`~asynctest.FileDescriptor` value of ``fileobj``.
+
+ If ``fileobj`` is a :class:`~asynctest.FileDescriptor`, ``fileobj`` is
+ returned, else ``fileobj.fileno()`` is returned instead.
+
+ Note that if fileobj is an int, :exc:`ValueError` is raised.
+
+ :raise ValueError: if ``fileobj`` is not a :class:`~asynctest.FileMock`,
+ a file-like object or
+ a :class:`~asynctest.FileDescriptor`.
+ """
+ ...
+
+def isfilemock(obj):
+ """
+ Return ``True`` if the ``obj`` or ``obj.fileno()`` is
+ a :class:`asynctest.FileDescriptor`.
+ """
+ ...
+
+class FileMock(mock.Mock):
+ """
+ Mock a file-like object.
+
+ A FileMock is an intelligent mock which can work with TestSelector to
+ simulate IO events during tests.
+
+ .. method:: fileno()
+
+ Return a :class:`~asynctest.FileDescriptor` object.
+ """
+ def __init__(self, *args, **kwargs) -> None:
+ ...
+
+
+
+class SocketMock(FileMock):
+ """
+ Mock a socket.
+
+ See :class:`~asynctest.FileMock`.
+ """
+ def __init__(self, side_effect=..., return_value=..., wraps=..., name=..., spec_set=..., parent=..., **kwargs) -> None:
+ ...
+
+
+
+if ssl:
+ class SSLSocketMock(SocketMock):
+ """
+ Mock a socket wrapped by the :mod:`ssl` module.
+
+ See :class:`~asynctest.FileMock`.
+
+ .. versionadded:: 0.5
+ """
+ def __init__(self, side_effect=..., return_value=..., wraps=..., name=..., spec_set=..., parent=..., **kwargs) -> None:
+ ...
+
+
+
+def set_read_ready(fileobj, loop):
+ """
+ Schedule callbacks registered on ``loop`` as if the selector notified that
+ data is ready to be read on ``fileobj``.
+
+ :param fileobj: file object or :class:`~asynctest.FileMock` on which the
+ event is mocked.
+
+ :param loop: :class:`asyncio.SelectorEventLoop` watching for events on
+ ``fileobj``.
+
+ ::
+
+ mock = asynctest.SocketMock()
+ mock.recv.return_value = b"Data"
+
+ def read_ready(sock):
+ print("received:", sock.recv(1024))
+
+ loop.add_reader(mock, read_ready, mock)
+
+ set_read_ready(mock, loop)
+
+ loop.run_forever() # prints received: b"Data"
+
+ .. versionadded:: 0.4
+ """
+ ...
+
+def set_write_ready(fileobj, loop):
+ """
+ Schedule callbacks registered on ``loop`` as if the selector notified that
+ data can be written to ``fileobj``.
+
+ :param fileobj: file object or :class:`~asynctest.FileMock` on which th
+ event is mocked.
+ :param loop: :class:`asyncio.SelectorEventLoop` watching for events on
+ ``fileobj``.
+
+ .. versionadded:: 0.4
+ """
+ ...
+
+class TestSelector(selectors._BaseSelectorImpl):
+ """
+ A selector which supports IOMock objects.
+
+ It can wrap an actual implementation of a selector, so the selector will
+ work both with mocks and real file-like objects.
+
+ A common use case is to patch the selector loop::
+
+ loop._selector = asynctest.TestSelector(loop._selector)
+
+ :param selector: optional, if provided, this selector will be used to work
+ with real file-like objects.
+ """
+ def __init__(self, selector=...) -> None:
+ ...
+
+ def register(self, fileobj, events, data=...):
+ """
+ Register a file object or a :class:`~asynctest.FileMock`.
+
+ If a real selector object has been supplied to the
+ :class:`~asynctest.TestSelector` object and ``fileobj`` is not
+ a :class:`~asynctest.FileMock` or a :class:`~asynctest.FileDescriptor`
+ returned by :meth:`FileMock.fileno()`, the object will be registered to
+ the real selector.
+
+ See :meth:`selectors.BaseSelector.register`.
+ """
+ ...
+
+ def unregister(self, fileobj):
+ """
+ Unregister a file object or a :class:`~asynctest.FileMock`.
+
+ See :meth:`selectors.BaseSelector.unregister`.
+ """
+ ...
+
+ def modify(self, fileobj, events, data=...):
+ """
+ Shortcut when calling :meth:`TestSelector.unregister` then
+ :meth:`TestSelector.register` to update the registration of a an object
+ to the selector.
+
+ See :meth:`selectors.BaseSelector.modify`.
+ """
+ ...
+
+ def select(self, timeout=...):
+ """
+ Perform the selection.
+
+ This method is a no-op if no actual selector has been supplied.
+
+ See :meth:`selectors.BaseSelector.select`.
+ """
+ ...
+
+ def close(self):
+ """
+ Close the selector.
+
+ Close the actual selector if supplied, unregister all mocks.
+
+ See :meth:`selectors.BaseSelector.close`.
+ """
+ ...
+
+
+
+def get_registered_events(selector):
+ ...
+
+if hasattr(asyncio, "format_helpers"):
+ ...
+else:
+ ...
+def fail_on_before_test_active_selector_callbacks(case):
+ ...
+
+def fail_on_active_selector_callbacks(case):
+ ...
+
diff --git a/typings/boltons/iterutils.pyi b/typings/boltons/iterutils.pyi
new file mode 100644
index 0000000..07e88ad
--- /dev/null
+++ b/typings/boltons/iterutils.pyi
@@ -0,0 +1,930 @@
+"""
+This type stub file was generated by pyright.
+"""
+
+""":mod:`itertools` is full of great examples of Python generator
+usage. However, there are still some critical gaps. ``iterutils``
+fills many of those gaps with featureful, tested, and Pythonic
+solutions.
+
+Many of the functions below have two versions, one which
+returns an iterator (denoted by the ``*_iter`` naming pattern), and a
+shorter-named convenience form that returns a list. Some of the
+following are based on examples in itertools docs.
+"""
+def is_iterable(obj):
+ """Similar in nature to :func:`callable`, ``is_iterable`` returns
+ ``True`` if an object is `iterable`_, ``False`` if not.
+
+ >>> is_iterable([])
+ True
+ >>> is_iterable(object())
+ False
+
+ .. _iterable: https://docs.python.org/2/glossary.html#term-iterable
+ """
+ ...
+
+def is_scalar(obj):
+ """A near-mirror of :func:`is_iterable`. Returns ``False`` if an
+ object is an iterable container type. Strings are considered
+ scalar as well, because strings are more often treated as whole
+ values as opposed to iterables of 1-character substrings.
+
+ >>> is_scalar(object())
+ True
+ >>> is_scalar(range(10))
+ False
+ >>> is_scalar('hello')
+ True
+ """
+ ...
+
+def is_collection(obj):
+ """The opposite of :func:`is_scalar`. Returns ``True`` if an object
+ is an iterable other than a string.
+
+ >>> is_collection(object())
+ False
+ >>> is_collection(range(10))
+ True
+ >>> is_collection('hello')
+ False
+ """
+ ...
+
+def split(src, sep=..., maxsplit=...):
+ """Splits an iterable based on a separator. Like :meth:`str.split`,
+ but for all iterables. Returns a list of lists.
+
+ >>> split(['hi', 'hello', None, None, 'sup', None, 'soap', None])
+ [['hi', 'hello'], ['sup'], ['soap']]
+
+ See :func:`split_iter` docs for more info.
+ """
+ ...
+
+def split_iter(src, sep=..., maxsplit=...):
+ """Splits an iterable based on a separator, *sep*, a max of
+ *maxsplit* times (no max by default). *sep* can be:
+
+ * a single value
+ * an iterable of separators
+ * a single-argument callable that returns True when a separator is
+ encountered
+
+ ``split_iter()`` yields lists of non-separator values. A separator will
+ never appear in the output.
+
+ >>> list(split_iter(['hi', 'hello', None, None, 'sup', None, 'soap', None]))
+ [['hi', 'hello'], ['sup'], ['soap']]
+
+ Note that ``split_iter`` is based on :func:`str.split`, so if
+ *sep* is ``None``, ``split()`` **groups** separators. If empty lists
+ are desired between two contiguous ``None`` values, simply use
+ ``sep=[None]``:
+
+ >>> list(split_iter(['hi', 'hello', None, None, 'sup', None]))
+ [['hi', 'hello'], ['sup']]
+ >>> list(split_iter(['hi', 'hello', None, None, 'sup', None], sep=[None]))
+ [['hi', 'hello'], [], ['sup'], []]
+
+ Using a callable separator:
+
+ >>> falsy_sep = lambda x: not x
+ >>> list(split_iter(['hi', 'hello', None, '', 'sup', False], falsy_sep))
+ [['hi', 'hello'], [], ['sup'], []]
+
+ See :func:`split` for a list-returning version.
+
+ """
+ ...
+
+def lstrip(iterable, strip_value=...):
+ """Strips values from the beginning of an iterable. Stripped items will
+ match the value of the argument strip_value. Functionality is analigous
+ to that of the method str.lstrip. Returns a list.
+
+ >>> lstrip(['Foo', 'Bar', 'Bam'], 'Foo')
+ ['Bar', 'Bam']
+
+ """
+ ...
+
+def lstrip_iter(iterable, strip_value=...):
+ """Strips values from the beginning of an iterable. Stripped items will
+ match the value of the argument strip_value. Functionality is analigous
+ to that of the method str.lstrip. Returns a generator.
+
+ >>> list(lstrip_iter(['Foo', 'Bar', 'Bam'], 'Foo'))
+ ['Bar', 'Bam']
+
+ """
+ ...
+
+def rstrip(iterable, strip_value=...):
+ """Strips values from the end of an iterable. Stripped items will
+ match the value of the argument strip_value. Functionality is analigous
+ to that of the method str.rstrip. Returns a list.
+
+ >>> rstrip(['Foo', 'Bar', 'Bam'], 'Bam')
+ ['Foo', 'Bar']
+
+ """
+ ...
+
+def rstrip_iter(iterable, strip_value=...):
+ """Strips values from the end of an iterable. Stripped items will
+ match the value of the argument strip_value. Functionality is analigous
+ to that of the method str.rstrip. Returns a generator.
+
+ >>> list(rstrip_iter(['Foo', 'Bar', 'Bam'], 'Bam'))
+ ['Foo', 'Bar']
+
+ """
+ ...
+
+def strip(iterable, strip_value=...):
+ """Strips values from the beginning and end of an iterable. Stripped items
+ will match the value of the argument strip_value. Functionality is
+ analigous to that of the method str.strip. Returns a list.
+
+ >>> strip(['Fu', 'Foo', 'Bar', 'Bam', 'Fu'], 'Fu')
+ ['Foo', 'Bar', 'Bam']
+
+ """
+ ...
+
+def strip_iter(iterable, strip_value=...):
+ """Strips values from the beginning and end of an iterable. Stripped items
+ will match the value of the argument strip_value. Functionality is
+ analigous to that of the method str.strip. Returns a generator.
+
+ >>> list(strip_iter(['Fu', 'Foo', 'Bar', 'Bam', 'Fu'], 'Fu'))
+ ['Foo', 'Bar', 'Bam']
+
+ """
+ ...
+
+def chunked(src, size, count=..., **kw):
+ """Returns a list of *count* chunks, each with *size* elements,
+ generated from iterable *src*. If *src* is not evenly divisible by
+ *size*, the final chunk will have fewer than *size* elements.
+ Provide the *fill* keyword argument to provide a pad value and
+ enable padding, otherwise no padding will take place.
+
+ >>> chunked(range(10), 3)
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+ >>> chunked(range(10), 3, fill=None)
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, None, None]]
+ >>> chunked(range(10), 3, count=2)
+ [[0, 1, 2], [3, 4, 5]]
+
+ See :func:`chunked_iter` for more info.
+ """
+ ...
+
+def chunked_iter(src, size, **kw):
+ """Generates *size*-sized chunks from *src* iterable. Unless the
+ optional *fill* keyword argument is provided, iterables not evenly
+ divisible by *size* will have a final chunk that is smaller than
+ *size*.
+
+ >>> list(chunked_iter(range(10), 3))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9]]
+ >>> list(chunked_iter(range(10), 3, fill=None))
+ [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, None, None]]
+
+ Note that ``fill=None`` in fact uses ``None`` as the fill value.
+ """
+ ...
+
+def pairwise(src):
+ """Convenience function for calling :func:`windowed` on *src*, with
+ *size* set to 2.
+
+ >>> pairwise(range(5))
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
+ >>> pairwise([])
+ []
+
+ The number of pairs is always one less than the number of elements
+ in the iterable passed in, except on empty inputs, which returns
+ an empty list.
+ """
+ ...
+
+def pairwise_iter(src):
+ """Convenience function for calling :func:`windowed_iter` on *src*,
+ with *size* set to 2.
+
+ >>> list(pairwise_iter(range(5)))
+ [(0, 1), (1, 2), (2, 3), (3, 4)]
+ >>> list(pairwise_iter([]))
+ []
+
+ The number of pairs is always one less than the number of elements
+ in the iterable passed in, or zero, when *src* is empty.
+
+ """
+ ...
+
+def windowed(src, size):
+ """Returns tuples with exactly length *size*. If the iterable is
+ too short to make a window of length *size*, no tuples are
+ returned. See :func:`windowed_iter` for more.
+ """
+ ...
+
+def windowed_iter(src, size):
+ """Returns tuples with length *size* which represent a sliding
+ window over iterable *src*.
+
+ >>> list(windowed_iter(range(7), 3))
+ [(0, 1, 2), (1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6)]
+
+ If the iterable is too short to make a window of length *size*,
+ then no window tuples are returned.
+
+ >>> list(windowed_iter(range(3), 5))
+ []
+ """
+ ...
+
+def xfrange(stop, start=..., step=...):
+ """Same as :func:`frange`, but generator-based instead of returning a
+ list.
+
+ >>> tuple(xfrange(1, 3, step=0.75))
+ (1.0, 1.75, 2.5)
+
+ See :func:`frange` for more details.
+ """
+ ...
+
+def frange(stop, start=..., step=...):
+ """A :func:`range` clone for float-based ranges.
+
+ >>> frange(5)
+ [0.0, 1.0, 2.0, 3.0, 4.0]
+ >>> frange(6, step=1.25)
+ [0.0, 1.25, 2.5, 3.75, 5.0]
+ >>> frange(100.5, 101.5, 0.25)
+ [100.5, 100.75, 101.0, 101.25]
+ >>> frange(5, 0)
+ []
+ >>> frange(5, 0, step=-1.25)
+ [5.0, 3.75, 2.5, 1.25]
+ """
+ ...
+
+def backoff(start, stop, count=..., factor=..., jitter=...):
+ """Returns a list of geometrically-increasing floating-point numbers,
+ suitable for usage with `exponential backoff`_. Exactly like
+ :func:`backoff_iter`, but without the ``'repeat'`` option for
+ *count*. See :func:`backoff_iter` for more details.
+
+ .. _exponential backoff: https://en.wikipedia.org/wiki/Exponential_backoff
+
+ >>> backoff(1, 10)
+ [1.0, 2.0, 4.0, 8.0, 10.0]
+ """
+ ...
+
+def backoff_iter(start, stop, count=..., factor=..., jitter=...):
+ """Generates a sequence of geometrically-increasing floats, suitable
+ for usage with `exponential backoff`_. Starts with *start*,
+ increasing by *factor* until *stop* is reached, optionally
+ stopping iteration once *count* numbers are yielded. *factor*
+ defaults to 2. In general retrying with properly-configured
+ backoff creates a better-behaved component for a larger service
+ ecosystem.
+
+ .. _exponential backoff: https://en.wikipedia.org/wiki/Exponential_backoff
+
+ >>> list(backoff_iter(1.0, 10.0, count=5))
+ [1.0, 2.0, 4.0, 8.0, 10.0]
+ >>> list(backoff_iter(1.0, 10.0, count=8))
+ [1.0, 2.0, 4.0, 8.0, 10.0, 10.0, 10.0, 10.0]
+ >>> list(backoff_iter(0.25, 100.0, factor=10))
+ [0.25, 2.5, 25.0, 100.0]
+
+ A simplified usage example:
+
+ .. code-block:: python
+
+ for timeout in backoff_iter(0.25, 5.0):
+ try:
+ res = network_call()
+ break
+ except Exception as e:
+ log(e)
+ time.sleep(timeout)
+
+ An enhancement for large-scale systems would be to add variation,
+ or *jitter*, to timeout values. This is done to avoid a thundering
+ herd on the receiving end of the network call.
+
+ Finally, for *count*, the special value ``'repeat'`` can be passed to
+ continue yielding indefinitely.
+
+ Args:
+
+ start (float): Positive number for baseline.
+ stop (float): Positive number for maximum.
+ count (int): Number of steps before stopping
+ iteration. Defaults to the number of steps between *start* and
+ *stop*. Pass the string, `'repeat'`, to continue iteration
+ indefinitely.
+ factor (float): Rate of exponential increase. Defaults to `2.0`,
+ e.g., `[1, 2, 4, 8, 16]`.
+ jitter (float): A factor between `-1.0` and `1.0`, used to
+ uniformly randomize and thus spread out timeouts in a distributed
+ system, avoiding rhythm effects. Positive values use the base
+ backoff curve as a maximum, negative values use the curve as a
+ minimum. Set to 1.0 or `True` for a jitter approximating
+ Ethernet's time-tested backoff solution. Defaults to `False`.
+
+ """
+ ...
+
+def bucketize(src, key=..., value_transform=..., key_filter=...):
+ """Group values in the *src* iterable by the value returned by *key*.
+
+ >>> bucketize(range(5))
+ {False: [0], True: [1, 2, 3, 4]}
+ >>> is_odd = lambda x: x % 2 == 1
+ >>> bucketize(range(5), is_odd)
+ {False: [0, 2, 4], True: [1, 3]}
+
+ *key* is :class:`bool` by default, but can either be a callable or a string
+ name of the attribute on which to bucketize objects.
+
+ >>> bucketize([1+1j, 2+2j, 1, 2], key='real')
+ {1.0: [(1+1j), 1], 2.0: [(2+2j), 2]}
+
+ Value lists are not deduplicated:
+
+ >>> bucketize([None, None, None, 'hello'])
+ {False: [None, None, None], True: ['hello']}
+
+ Bucketize into more than 3 groups
+
+ >>> bucketize(range(10), lambda x: x % 3)
+ {0: [0, 3, 6, 9], 1: [1, 4, 7], 2: [2, 5, 8]}
+
+ ``bucketize`` has a couple of advanced options useful in certain
+ cases. *value_transform* can be used to modify values as they are
+ added to buckets, and *key_filter* will allow excluding certain
+ buckets from being collected.
+
+ >>> bucketize(range(5), value_transform=lambda x: x*x)
+ {False: [0], True: [1, 4, 9, 16]}
+
+ >>> bucketize(range(10), key=lambda x: x % 3, key_filter=lambda k: k % 3 != 1)
+ {0: [0, 3, 6, 9], 2: [2, 5, 8]}
+
+ Note in some of these examples there were at most two keys, ``True`` and
+ ``False``, and each key present has a list with at least one
+ item. See :func:`partition` for a version specialized for binary
+ use cases.
+
+ """
+ ...
+
+def partition(src, key=...):
+ """No relation to :meth:`str.partition`, ``partition`` is like
+ :func:`bucketize`, but for added convenience returns a tuple of
+ ``(truthy_values, falsy_values)``.
+
+ >>> nonempty, empty = partition(['', '', 'hi', '', 'bye'])
+ >>> nonempty
+ ['hi', 'bye']
+
+ *key* defaults to :class:`bool`, but can be carefully overridden to
+ use either a function that returns either ``True`` or ``False`` or
+ a string name of the attribute on which to partition objects.
+
+ >>> import string
+ >>> is_digit = lambda x: x in string.digits
+ >>> decimal_digits, hexletters = partition(string.hexdigits, is_digit)
+ >>> ''.join(decimal_digits), ''.join(hexletters)
+ ('0123456789', 'abcdefABCDEF')
+ """
+ ...
+
+def unique(src, key=...):
+ """``unique()`` returns a list of unique values, as determined by
+ *key*, in the order they first appeared in the input iterable,
+ *src*.
+
+ >>> ones_n_zeros = '11010110001010010101010'
+ >>> ''.join(unique(ones_n_zeros))
+ '10'
+
+ See :func:`unique_iter` docs for more details.
+ """
+ ...
+
+def unique_iter(src, key=...):
+ """Yield unique elements from the iterable, *src*, based on *key*,
+ in the order in which they first appeared in *src*.
+
+ >>> repetitious = [1, 2, 3] * 10
+ >>> list(unique_iter(repetitious))
+ [1, 2, 3]
+
+ By default, *key* is the object itself, but *key* can either be a
+ callable or, for convenience, a string name of the attribute on
+ which to uniqueify objects, falling back on identity when the
+ attribute is not present.
+
+ >>> pleasantries = ['hi', 'hello', 'ok', 'bye', 'yes']
+ >>> list(unique_iter(pleasantries, key=lambda x: len(x)))
+ ['hi', 'hello', 'bye']
+ """
+ ...
+
+def redundant(src, key=..., groups=...):
+ """The complement of :func:`unique()`.
+
+ By default returns non-unique/duplicate values as a list of the
+ *first* redundant value in *src*. Pass ``groups=True`` to get
+ groups of all values with redundancies, ordered by position of the
+ first redundant value. This is useful in conjunction with some
+ normalizing *key* function.
+
+ >>> redundant([1, 2, 3, 4])
+ []
+ >>> redundant([1, 2, 3, 2, 3, 3, 4])
+ [2, 3]
+ >>> redundant([1, 2, 3, 2, 3, 3, 4], groups=True)
+ [[2, 2], [3, 3, 3]]
+
+ An example using a *key* function to do case-insensitive
+ redundancy detection.
+
+ >>> redundant(['hi', 'Hi', 'HI', 'hello'], key=str.lower)
+ ['Hi']
+ >>> redundant(['hi', 'Hi', 'HI', 'hello'], groups=True, key=str.lower)
+ [['hi', 'Hi', 'HI']]
+
+ *key* should also be used when the values in *src* are not hashable.
+
+ .. note::
+
+ This output of this function is designed for reporting
+ duplicates in contexts when a unique input is desired. Due to
+ the grouped return type, there is no streaming equivalent of
+ this function for the time being.
+
+ """
+ ...
+
+def one(src, default=..., key=...):
+ """Along the same lines as builtins, :func:`all` and :func:`any`, and
+ similar to :func:`first`, ``one()`` returns the single object in
+ the given iterable *src* that evaluates to ``True``, as determined
+ by callable *key*. If unset, *key* defaults to :class:`bool`. If
+ no such objects are found, *default* is returned. If *default* is
+ not passed, ``None`` is returned.
+
+ If *src* has more than one object that evaluates to ``True``, or
+ if there is no object that fulfills such condition, return
+ *default*. It's like an `XOR`_ over an iterable.
+
+ >>> one((True, False, False))
+ True
+ >>> one((True, False, True))
+ >>> one((0, 0, 'a'))
+ 'a'
+ >>> one((0, False, None))
+ >>> one((True, True), default=False)
+ False
+ >>> bool(one(('', 1)))
+ True
+ >>> one((10, 20, 30, 42), key=lambda i: i > 40)
+ 42
+
+ See `Martín Gaitán's original repo`_ for further use cases.
+
+ .. _Martín Gaitán's original repo: https://github.com/mgaitan/one
+ .. _XOR: https://en.wikipedia.org/wiki/Exclusive_or
+
+ """
+ ...
+
+def first(iterable, default=..., key=...):
+ """Return first element of *iterable* that evaluates to ``True``, else
+ return ``None`` or optional *default*. Similar to :func:`one`.
+
+ >>> first([0, False, None, [], (), 42])
+ 42
+ >>> first([0, False, None, [], ()]) is None
+ True
+ >>> first([0, False, None, [], ()], default='ohai')
+ 'ohai'
+ >>> import re
+ >>> m = first(re.match(regex, 'abc') for regex in ['b.*', 'a(.*)'])
+ >>> m.group(1)
+ 'bc'
+
+ The optional *key* argument specifies a one-argument predicate function
+ like that used for *filter()*. The *key* argument, if supplied, should be
+ in keyword form. For example, finding the first even number in an iterable:
+
+ >>> first([1, 1, 3, 4, 5], key=lambda x: x % 2 == 0)
+ 4
+
+ Contributed by Hynek Schlawack, author of `the original standalone module`_.
+
+ .. _the original standalone module: https://github.com/hynek/first
+ """
+ ...
+
+def flatten_iter(iterable):
+ """``flatten_iter()`` yields all the elements from *iterable* while
+ collapsing any nested iterables.
+
+ >>> nested = [[1, 2], [[3], [4, 5]]]
+ >>> list(flatten_iter(nested))
+ [1, 2, 3, 4, 5]
+ """
+ ...
+
+def flatten(iterable):
+ """``flatten()`` returns a collapsed list of all the elements from
+ *iterable* while collapsing any nested iterables.
+
+ >>> nested = [[1, 2], [[3], [4, 5]]]
+ >>> flatten(nested)
+ [1, 2, 3, 4, 5]
+ """
+ ...
+
+def same(iterable, ref=...):
+ """``same()`` returns ``True`` when all values in *iterable* are
+ equal to one another, or optionally a reference value,
+ *ref*. Similar to :func:`all` and :func:`any` in that it evaluates
+ an iterable and returns a :class:`bool`. ``same()`` returns
+ ``True`` for empty iterables.
+
+ >>> same([])
+ True
+ >>> same([1])
+ True
+ >>> same(['a', 'a', 'a'])
+ True
+ >>> same(range(20))
+ False
+ >>> same([[], []])
+ True
+ >>> same([[], []], ref='test')
+ False
+
+ """
+ ...
+
+def default_visit(path, key, value):
+ ...
+
+_orig_default_visit = default_visit
+def default_enter(path, key, value):
+ ...
+
+def default_exit(path, key, old_parent, new_parent, new_items):
+ ...
+
+def remap(root, visit=..., enter=..., exit=..., **kwargs):
+ """The remap ("recursive map") function is used to traverse and
+ transform nested structures. Lists, tuples, sets, and dictionaries
+ are just a few of the data structures nested into heterogenous
+ tree-like structures that are so common in programming.
+ Unfortunately, Python's built-in ways to manipulate collections
+ are almost all flat. List comprehensions may be fast and succinct,
+ but they do not recurse, making it tedious to apply quick changes
+ or complex transforms to real-world data.
+
+ remap goes where list comprehensions cannot.
+
+ Here's an example of removing all Nones from some data:
+
+ >>> from pprint import pprint
+ >>> reviews = {'Star Trek': {'TNG': 10, 'DS9': 8.5, 'ENT': None},
+ ... 'Babylon 5': 6, 'Dr. Who': None}
+ >>> pprint(remap(reviews, lambda p, k, v: v is not None))
+ {'Babylon 5': 6, 'Star Trek': {'DS9': 8.5, 'TNG': 10}}
+
+ Notice how both Nones have been removed despite the nesting in the
+ dictionary. Not bad for a one-liner, and that's just the beginning.
+ See `this remap cookbook`_ for more delicious recipes.
+
+ .. _this remap cookbook: http://sedimental.org/remap.html
+
+ remap takes four main arguments: the object to traverse and three
+ optional callables which determine how the remapped object will be
+ created.
+
+ Args:
+
+ root: The target object to traverse. By default, remap
+ supports iterables like :class:`list`, :class:`tuple`,
+ :class:`dict`, and :class:`set`, but any object traversable by
+ *enter* will work.
+ visit (callable): This function is called on every item in
+ *root*. It must accept three positional arguments, *path*,
+ *key*, and *value*. *path* is simply a tuple of parents'
+ keys. *visit* should return the new key-value pair. It may
+ also return ``True`` as shorthand to keep the old item
+ unmodified, or ``False`` to drop the item from the new
+ structure. *visit* is called after *enter*, on the new parent.
+
+ The *visit* function is called for every item in root,
+ including duplicate items. For traversable values, it is
+ called on the new parent object, after all its children
+ have been visited. The default visit behavior simply
+ returns the key-value pair unmodified.
+ enter (callable): This function controls which items in *root*
+ are traversed. It accepts the same arguments as *visit*: the
+ path, the key, and the value of the current item. It returns a
+ pair of the blank new parent, and an iterator over the items
+ which should be visited. If ``False`` is returned instead of
+ an iterator, the value will not be traversed.
+
+ The *enter* function is only called once per unique value. The
+ default enter behavior support mappings, sequences, and
+ sets. Strings and all other iterables will not be traversed.
+ exit (callable): This function determines how to handle items
+ once they have been visited. It gets the same three
+ arguments as the other functions -- *path*, *key*, *value*
+ -- plus two more: the blank new parent object returned
+ from *enter*, and a list of the new items, as remapped by
+ *visit*.
+
+ Like *enter*, the *exit* function is only called once per
+ unique value. The default exit behavior is to simply add
+ all new items to the new parent, e.g., using
+ :meth:`list.extend` and :meth:`dict.update` to add to the
+ new parent. Immutable objects, such as a :class:`tuple` or
+ :class:`namedtuple`, must be recreated from scratch, but
+ use the same type as the new parent passed back from the
+ *enter* function.
+ reraise_visit (bool): A pragmatic convenience for the *visit*
+ callable. When set to ``False``, remap ignores any errors
+ raised by the *visit* callback. Items causing exceptions
+ are kept. See examples for more details.
+
+ remap is designed to cover the majority of cases with just the
+ *visit* callable. While passing in multiple callables is very
+ empowering, remap is designed so very few cases should require
+ passing more than one function.
+
+ When passing *enter* and *exit*, it's common and easiest to build
+ on the default behavior. Simply add ``from boltons.iterutils import
+ default_enter`` (or ``default_exit``), and have your enter/exit
+ function call the default behavior before or after your custom
+ logic. See `this example`_.
+
+ Duplicate and self-referential objects (aka reference loops) are
+ automatically handled internally, `as shown here`_.
+
+ .. _this example: http://sedimental.org/remap.html#sort_all_lists
+ .. _as shown here: http://sedimental.org/remap.html#corner_cases
+
+ """
+ ...
+
+class PathAccessError(KeyError, IndexError, TypeError):
+ """An amalgamation of KeyError, IndexError, and TypeError,
+ representing what can occur when looking up a path in a nested
+ object.
+ """
+ def __init__(self, exc, seg, path) -> None:
+ ...
+
+ def __repr__(self):
+ ...
+
+ def __str__(self) -> str:
+ ...
+
+
+
+def get_path(root, path, default=...):
+ """Retrieve a value from a nested object via a tuple representing the
+ lookup path.
+
+ >>> root = {'a': {'b': {'c': [[1], [2], [3]]}}}
+ >>> get_path(root, ('a', 'b', 'c', 2, 0))
+ 3
+
+ The path format is intentionally consistent with that of
+ :func:`remap`.
+
+ One of get_path's chief aims is improved error messaging. EAFP is
+ great, but the error messages are not.
+
+ For instance, ``root['a']['b']['c'][2][1]`` gives back
+ ``IndexError: list index out of range``
+
+ What went out of range where? get_path currently raises
+ ``PathAccessError: could not access 2 from path ('a', 'b', 'c', 2,
+ 1), got error: IndexError('list index out of range',)``, a
+ subclass of IndexError and KeyError.
+
+ You can also pass a default that covers the entire operation,
+ should the lookup fail at any level.
+
+ Args:
+ root: The target nesting of dictionaries, lists, or other
+ objects supporting ``__getitem__``.
+ path (tuple): A list of strings and integers to be successively
+ looked up within *root*.
+ default: The value to be returned should any
+ ``PathAccessError`` exceptions be raised.
+ """
+ ...
+
+def research(root, query=..., reraise=...):
+ """The :func:`research` function uses :func:`remap` to recurse over
+ any data nested in *root*, and find values which match a given
+ criterion, specified by the *query* callable.
+
+ Results are returned as a list of ``(path, value)`` pairs. The
+ paths are tuples in the same format accepted by
+ :func:`get_path`. This can be useful for comparing values nested
+ in two or more different structures.
+
+ Here's a simple example that finds all integers:
+
+ >>> root = {'a': {'b': 1, 'c': (2, 'd', 3)}, 'e': None}
+ >>> res = research(root, query=lambda p, k, v: isinstance(v, int))
+ >>> print(sorted(res))
+ [(('a', 'b'), 1), (('a', 'c', 0), 2), (('a', 'c', 2), 3)]
+
+ Note how *query* follows the same, familiar ``path, key, value``
+ signature as the ``visit`` and ``enter`` functions on
+ :func:`remap`, and returns a :class:`bool`.
+
+ Args:
+ root: The target object to search. Supports the same types of
+ objects as :func:`remap`, including :class:`list`,
+ :class:`tuple`, :class:`dict`, and :class:`set`.
+ query (callable): The function called on every object to
+ determine whether to include it in the search results. The
+ callable must accept three arguments, *path*, *key*, and
+ *value*, commonly abbreviated *p*, *k*, and *v*, same as
+ *enter* and *visit* from :func:`remap`.
+ reraise (bool): Whether to reraise exceptions raised by *query*
+ or to simply drop the result that caused the error.
+
+
+ With :func:`research` it's easy to inspect the details of a data
+ structure, like finding values that are at a certain depth (using
+ ``len(p)``) and much more. If more advanced functionality is
+ needed, check out the code and make your own :func:`remap`
+ wrapper, and consider `submitting a patch`_!
+
+ .. _submitting a patch: https://github.com/mahmoud/boltons/pulls
+ """
+ ...
+
+class GUIDerator(object):
+ """The GUIDerator is an iterator that yields a globally-unique
+ identifier (GUID) on every iteration. The GUIDs produced are
+ hexadecimal strings.
+
+ Testing shows it to be around 12x faster than the uuid module. By
+ default it is also more compact, partly due to its default 96-bit
+ (24-hexdigit) length. 96 bits of randomness means that there is a
+ 1 in 2 ^ 32 chance of collision after 2 ^ 64 iterations. If more
+ or less uniqueness is desired, the *size* argument can be adjusted
+ accordingly.
+
+ Args:
+ size (int): character length of the GUID, defaults to 24. Lengths
+ between 20 and 36 are considered valid.
+
+ The GUIDerator has built-in fork protection that causes it to
+ detect a fork on next iteration and reseed accordingly.
+
+ """
+ def __init__(self, size=...) -> None:
+ ...
+
+ def reseed(self):
+ ...
+
+ def __iter__(self):
+ ...
+
+ if _IS_PY3:
+ def __next__(self):
+ ...
+
+ else:
+ def __next__(self):
+ ...
+
+ next = ...
+
+
+class SequentialGUIDerator(GUIDerator):
+ """Much like the standard GUIDerator, the SequentialGUIDerator is an
+ iterator that yields a globally-unique identifier (GUID) on every
+ iteration. The GUIDs produced are hexadecimal strings.
+
+ The SequentialGUIDerator differs in that it picks a starting GUID
+ value and increments every iteration. This yields GUIDs which are
+ of course unique, but also ordered and lexicographically sortable.
+
+ The SequentialGUIDerator is around 50% faster than the normal
+ GUIDerator, making it almost 20x as fast as the built-in uuid
+ module. By default it is also more compact, partly due to its
+ 96-bit (24-hexdigit) default length. 96 bits of randomness means that
+ there is a 1 in 2 ^ 32 chance of collision after 2 ^ 64
+ iterations. If more or less uniqueness is desired, the *size*
+ argument can be adjusted accordingly.
+
+ Args:
+ size (int): character length of the GUID, defaults to 24.
+
+ Note that with SequentialGUIDerator there is a chance of GUIDs
+ growing larger than the size configured. The SequentialGUIDerator
+ has built-in fork protection that causes it to detect a fork on
+ next iteration and reseed accordingly.
+
+ """
+ if _IS_PY3:
+ def reseed(self):
+ ...
+
+ else:
+ def reseed(self):
+ ...
+
+ def __next__(self):
+ ...
+
+ next = ...
+
+
+guid_iter = GUIDerator()
+seq_guid_iter = SequentialGUIDerator()
+def soft_sorted(iterable, first=..., last=..., key=..., reverse=...):
+ """For when you care about the order of some elements, but not about
+ others.
+
+ Use this to float to the top and/or sink to the bottom a specific
+ ordering, while sorting the rest of the elements according to
+ normal :func:`sorted` rules.
+
+ >>> soft_sorted(['two', 'b', 'one', 'a'], first=['one', 'two'])
+ ['one', 'two', 'a', 'b']
+ >>> soft_sorted(range(7), first=[6, 15], last=[2, 4], reverse=True)
+ [6, 5, 3, 1, 0, 2, 4]
+ >>> import string
+ >>> ''.join(soft_sorted(string.hexdigits, first='za1', last='b', key=str.lower))
+ 'aA1023456789cCdDeEfFbB'
+
+ Args:
+ iterable (list): A list or other iterable to sort.
+ first (list): A sequence to enforce for elements which should
+ appear at the beginning of the returned list.
+ last (list): A sequence to enforce for elements which should
+ appear at the end of the returned list.
+ key (callable): Callable used to generate a comparable key for
+ each item to be sorted, same as the key in
+ :func:`sorted`. Note that entries in *first* and *last*
+ should be the keys for the items. Defaults to
+ passthrough/the identity function.
+ reverse (bool): Whether or not elements not explicitly ordered
+ by *first* and *last* should be in reverse order or not.
+
+ Returns a new list in sorted order.
+ """
+ ...
+
+def untyped_sorted(iterable, key=..., reverse=...):
+ """A version of :func:`sorted` which will happily sort an iterable of
+ heterogenous types and return a new list, similar to legacy Python's
+ behavior.
+
+ >>> untyped_sorted(['abc', 2.0, 1, 2, 'def'])
+ [1, 2.0, 2, 'abc', 'def']
+
+ Note how mutually orderable types are sorted as expected, as in
+ the case of the integers and floats above.
+
+ .. note::
+
+ Results may vary across Python versions and builds, but the
+ function will produce a sorted list, except in the case of
+ explicitly unorderable objects.
+
+ """
+ class _Wrapper(object):
+ ...
+
+
+
diff --git a/typings/typing/__init__.pyi b/typings/typing/__init__.pyi
new file mode 100644
index 0000000..6dc8514
--- /dev/null
+++ b/typings/typing/__init__.pyi
@@ -0,0 +1,13 @@
+from typing import Literal, Type, TypeVar, Union, overload
+
+
+X = TypeVar("X")
+
+@overload
+def isclass(obj: Type[X]) -> Literal[True]:
+ ...
+
+
+@overload
+def isclass(obj: X) -> Literal[False]:
+ ...