diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..6a561b6968d532507dd9315452ace33c12942df6 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +asset/external_view.pptx filter=lfs diff=lfs merge=lfs -text diff --git a/README.md b/README.md index 85c1f277951dea827c35b46fd8312f1881d1d4ff..1cb4be8f25a08cf9e1bc3ae79ae37f56c60fff28 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,26 @@ ---- -title: Neo Llm Module V1.3.5 -emoji: 🐢 -colorFrom: pink -colorTo: blue -sdk: static -pinned: false ---- - -Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference +# neoLLM Module + +neoAIのLLMソリューションの基盤モジュール +[neoLLM Module Document](https://www.notion.so/neoLLM-Module-Document-64399d1d1db24d92bce8f9b88472833f) + +## 準備 +[neoLLM インストール方法](https://www.notion.so/c760d96f1b4240e6880a32bee96bba35) +1. install neoLLM Module ※ Python 3.10 + ```bash + $ pip install git+https://github.com/neoAI-inc/neo-llm-module.git@v1.x.x + ``` + +2. APIキーの設定 + `.env`ファイルの配置 + - 環境変数を`.env`ファイルで定義し,実行するバスに配置 + - `project/example_env.txt`を`.env`に名前を変えて, 必要事項を記入 + +## 使用方法 +### 概要 +灰色背景の部分を開発するだけでOK +- MyLLM: 1回のLLMへのリクエストをラップできる +- MyL3M2: 複数のLLMへのリクエストをラップできる + +詳しくは、`project/neollm-tutorial.ipynb`, `project/ex_module` +![外観図](asset/external_view.png) + diff --git a/asset/external_view.png b/asset/external_view.png new file mode 100644 index 0000000000000000000000000000000000000000..d32771de8ef74fcb1c9addf07433a4363671f7b8 Binary files /dev/null and b/asset/external_view.png differ diff --git a/asset/external_view.pptx b/asset/external_view.pptx new file mode 100644 index 0000000000000000000000000000000000000000..f4addf7db533576e076d51f002e71ebcf0f015be --- /dev/null +++ b/asset/external_view.pptx @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b3e9d7dbbb6f9ca5750edd9eaad8fe7ce5fcb5797e8027ae11dea90a0a47a2c +size 8728033 diff --git a/makefile b/makefile new file mode 100644 index 0000000000000000000000000000000000000000..e3ebaeaec55ca76bdcff0af0b30f519e1af5f13f --- /dev/null +++ b/makefile @@ -0,0 +1,14 @@ +.PHONY: lint +lint: ## run tests with poetry (isort, black, pflake8, mypy) + poetry run black neollm + poetry run isort neollm + poetry run pflake8 neollm + poetry run mypy neollm --explicit-package-bases + +.PHONY: test +test: + poetry run pytest + +.PHONY: unit-test +unit-test: + poetry run pytest -k "not test_neollm" diff --git a/neollm.code-workspace b/neollm.code-workspace new file mode 100644 index 0000000000000000000000000000000000000000..d856242a7f726cb27fa25cc1ecd711fb79b304a1 --- /dev/null +++ b/neollm.code-workspace @@ -0,0 +1,63 @@ +{ + "folders": [ + { + "name": "neo-llm-module", + "path": "." + } + ], + "settings": { + "editor.codeActionsOnSave": { + "source.fixAll.eslint": "explicit", + "source.fixAll.stylelint": "explicit" + }, + "editor.formatOnSave": true, + "editor.formatOnPaste": true, + "editor.formatOnType": true, + "json.format.keepLines": true, + "[javascript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[typescript]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[typescriptreact]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[css]": { + "editor.defaultFormatter": "esbenp.prettier-vscode" + }, + "[json]": { + "editor.defaultFormatter": "vscode.json-language-features" + }, + "search.exclude": { + "**/node_modules": true, + "static": true + }, + "[python]": { + "editor.defaultFormatter": "ms-python.black-formatter", + "editor.codeActionsOnSave": { + "source.organizeImports": "explicit" + } + }, + "flake8.args": [ + "--max-line-length=119", + "--max-complexity=15", + "--ignore=E203,E501,E704,W503", + "--exclude=.venv,.git,__pycache__,.mypy_cache,.hg" + ], + "isort.args": ["--settings-path=pyproject.toml"], + "black-formatter.args": ["--config=pyproject.toml"], + "mypy-type-checker.args": ["--config-file=pyproject.toml"], + "python.analysis.extraPaths": ["./backend"] + }, + "extensions": { + "recommendations": [ + "esbenp.prettier-vscode", + "dbaeumer.vscode-eslint", + "ms-python.flake8", + "ms-python.isort", + "ms-python.black-formatter", + "ms-python.mypy-type-checker" + ] + } +} diff --git a/neollm/__init__.py b/neollm/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0056ff0106251f8ca7b38f52ac3141eed7dc8df6 --- /dev/null +++ b/neollm/__init__.py @@ -0,0 +1,5 @@ +from neollm.myllm.abstract_myllm import AbstractMyLLM +from neollm.myllm.myl3m2 import MyL3M2 +from neollm.myllm.myllm import MyLLM + +__all__ = ["AbstractMyLLM", "MyLLM", "MyL3M2"] diff --git a/neollm/exceptions.py b/neollm/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..2ebe003364d19a78f4d3d43454ef9f120b119fcf --- /dev/null +++ b/neollm/exceptions.py @@ -0,0 +1,2 @@ +class ContentFilterError(Exception): + pass diff --git a/neollm/llm/__init__.py b/neollm/llm/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..dc68336613b5a985d136426b4e619a83f22273b1 --- /dev/null +++ b/neollm/llm/__init__.py @@ -0,0 +1,4 @@ +from neollm.llm.abstract_llm import AbstractLLM +from neollm.llm.get_llm import get_llm + +__all__ = ["AbstractLLM", "get_llm"] diff --git a/neollm/llm/abstract_llm.py b/neollm/llm/abstract_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..ba58b15784cbc2ee971aefbdac2739320a0bd202 --- /dev/null +++ b/neollm/llm/abstract_llm.py @@ -0,0 +1,188 @@ +from abc import ABC, abstractmethod +from typing import Any + +from neollm.llm.utils import get_entity +from neollm.types import ( + APIPricing, + ChatCompletion, + ChatCompletionMessage, + ChatCompletionMessageToolCall, + Choice, + ChoiceDeltaToolCall, + Chunk, + ClientSettings, + CompletionUsage, + Function, + FunctionCall, + LLMSettings, + Messages, + Response, + StreamResponse, +) +from neollm.utils.utils import cprint + + +# 現状、Azure, OpenAIに対応 +class AbstractLLM(ABC): + dollar_per_ktoken: APIPricing + model: str + context_window: int + _custom_price_calculation: bool = False # self.tokenではなく、self.custom_tokenを使う場合にTrue + + def __init__(self, client_settings: ClientSettings): + """LLMクラスの初期化 + + Args: + client_settings (ClientSettings): クライアント設定 + """ + self.client_settings = client_settings + + def calculate_price(self, num_input_tokens: int = 0, num_output_tokens: int = 0) -> float: + """ + 費用の計測 + + Args: + num_input_tokens (int, optional): 入力のトークン数. Defaults to 0. + num_output_tokens (int, optional): 出力のトークン数. Defaults to 0. + + Returns: + float: API利用料(USD) + """ + price = ( + self.dollar_per_ktoken.input * num_input_tokens + self.dollar_per_ktoken.output * num_output_tokens + ) / 1000 + return price + + @abstractmethod + def count_tokens(self, messages: Messages | None = None, only_response: bool = False) -> int: ... + + @abstractmethod + def encode(self, text: str) -> list[int]: ... + + @abstractmethod + def decode(self, encoded: list[int]) -> str: ... + + @abstractmethod + def generate(self, messages: Messages, llm_settings: LLMSettings) -> Response: + """生成 + + Args: + messages (Messages): OpenAI仕様のMessages(list[dict]) + + Returns: + Response: OpenAI likeなResponse + """ + + @abstractmethod + def generate_stream(self, messages: Messages, llm_settings: LLMSettings) -> StreamResponse: ... + + def __repr__(self) -> str: + return f"{self.__class__}()" + + def convert_nonstream_response( + self, chunk_list: list[Chunk], messages: Messages, functions: Any = None + ) -> Response: + # messagesとfunctionsはトークン数計測に必要 + _chunk_choices = [chunk.choices[0] for chunk in chunk_list if len(chunk.choices) > 0] + # TODO: n=2以上の場合にwarningを出したい + + # FunctionCall -------------------------------------------------- + function_call: FunctionCall | None + if all([_c.delta.function_call is None for _c in _chunk_choices]): + function_call = None + else: + function_call = FunctionCall( + arguments="".join( + [ + _c.delta.function_call.arguments + for _c in _chunk_choices + if _c.delta.function_call is not None and _c.delta.function_call.arguments is not None + ] + ), + name=get_entity( + [_c.delta.function_call.name for _c in _chunk_choices if _c.delta.function_call is not None], + default="", + ), + ) + + # ToolCalls -------------------------------------------------- + _tool_calls_dict: dict[int, list[ChoiceDeltaToolCall]] = {} # key=index + for _chunk in _chunk_choices: + if _chunk.delta.tool_calls is None: + continue + for _tool_call in _chunk.delta.tool_calls: + _tool_calls_dict.setdefault(_tool_call.index, []).append(_tool_call) + + tool_calls: list[ChatCompletionMessageToolCall] | None + if sum(len(_tool_calls) for _tool_calls in _tool_calls_dict.values()) == 0: + tool_calls = None + else: + tool_calls = [] + for _tool_calls in _tool_calls_dict.values(): + tool_calls.append( + ChatCompletionMessageToolCall( + id=get_entity([_tc.id for _tc in _tool_calls], default=""), + function=Function( + arguments="".join( + [ + _tc.function.arguments + for _tc in _tool_calls + if _tc.function is not None and _tc.function.arguments is not None + ] + ), + name=get_entity( + [_tc.function.name for _tc in _tool_calls if _tc.function is not None], default="" + ), + ), + type=get_entity([_tc.type for _tc in _tool_calls], default="function"), + ) + ) + message = ChatCompletionMessage( + content="".join([_c.delta.content for _c in _chunk_choices if _c.delta.content is not None]), + # TODO: ChoiceDeltaのroleなんで、assistant以外も許されてるの? + role=get_entity([_c.delta.role for _c in _chunk_choices], default="assistant"), # type: ignore + function_call=function_call, + tool_calls=tool_calls, + ) + choice = Choice( + index=get_entity([_c.index for _c in _chunk_choices], default=0), + message=message, + finish_reason=get_entity([_c.finish_reason for _c in _chunk_choices], default=None), + ) + + # Usage -------------------------------------------------- + try: + for chunk in chunk_list: + if getattr(chunk, "tokens"): + prompt_tokens = int(getattr(chunk, "tokens")["input_tokens"]) + completion_tokens = int(getattr(chunk, "tokens")["output_tokens"]) + assert prompt_tokens + assert completion_tokens + except Exception: + prompt_tokens = self.count_tokens(messages) # TODO: fcなど + completion_tokens = self.count_tokens([message.to_typeddict_message()], only_response=True) + usages = CompletionUsage( + completion_tokens=completion_tokens, + prompt_tokens=prompt_tokens, + total_tokens=prompt_tokens + completion_tokens, + ) + + # ChatCompletion ------------------------------------------ + response = ChatCompletion( + id=get_entity([chunk.id for chunk in chunk_list], default=""), + object="chat.completion", + created=get_entity([getattr(chunk, "created", 0) for chunk in chunk_list], default=0), + model=get_entity([getattr(chunk, "model", "") for chunk in chunk_list], default=""), + choices=[choice], + system_fingerprint=get_entity( + [getattr(chunk, "system_fingerprint", None) for chunk in chunk_list], default=None + ), + usage=usages, + ) + + return response + + @property + def max_tokens(self) -> int: + cprint("max_tokensは非推奨です。context_windowを使用してください。") + return self.context_window diff --git a/neollm/llm/claude/abstract_claude.py b/neollm/llm/claude/abstract_claude.py new file mode 100644 index 0000000000000000000000000000000000000000..073ef53d4de530c4c442bccb6f75c8a3d268b86b --- /dev/null +++ b/neollm/llm/claude/abstract_claude.py @@ -0,0 +1,214 @@ +import time +from abc import abstractmethod +from typing import Any, Literal, cast + +from anthropic import Anthropic, AnthropicBedrock, AnthropicVertex, Stream +from anthropic.types import MessageParam as AnthropicMessageParam +from anthropic.types import MessageStreamEvent as AnthropicMessageStreamEvent +from anthropic.types.message import Message as AnthropicMessage + +from neollm.llm.abstract_llm import AbstractLLM +from neollm.types import ( + ChatCompletion, + LLMSettings, + Message, + Messages, + Response, + StreamResponse, +) +from neollm.types.openai.chat_completion import ( + ChatCompletionMessage, + Choice, + CompletionUsage, + FinishReason, +) +from neollm.types.openai.chat_completion_chunk import ( + ChatCompletionChunk, + ChoiceDelta, + ChunkChoice, +) +from neollm.utils.utils import cprint + +DEFAULT_MAX_TOKENS = 4_096 + + +class AbstractClaude(AbstractLLM): + @property + @abstractmethod + def client(self) -> Anthropic | AnthropicVertex | AnthropicBedrock: ... + + @property + def _client_for_token(self) -> Anthropic: + """トークンカウント用のAnthropicクライアント取得 + (AnthropicBedrock, AnthropicVertexがmethodを持っていないため) + + Returns: + Anthropic: Anthropicクライアント + """ + return Anthropic() + + def encode(self, text: str) -> list[int]: + tokenizer = self._client_for_token.get_tokenizer() + encoded = cast(list[int], tokenizer.encode(text).ids) + return encoded + + def decode(self, decoded: list[int]) -> str: + tokenizer = self._client_for_token.get_tokenizer() + text = cast(str, tokenizer.decode(decoded)) + return text + + def count_tokens(self, messages: list[Message] | None = None, only_response: bool = False) -> int: + """ + トークン数の計測 + + Args: + messages (Messages): messages + + Returns: + int: トークン数 + """ + if messages is None: + return 0 + tokens = 0 + for message in messages: + content = message["content"] + if isinstance(content, str): + tokens += self._client_for_token.count_tokens(content) + continue + if isinstance(content, list): + for content_i in content: + if content_i["type"] == "text": + tokens += self._client_for_token.count_tokens(content_i["text"]) + continue + return tokens + + def _convert_finish_reason( + self, stop_reason: Literal["end_turn", "max_tokens", "stop_sequence"] | None + ) -> FinishReason | None: + if stop_reason == "max_tokens": + return "length" + if stop_reason == "stop_sequence": + return "stop" + return None + + def _convert_to_response(self, platform_response: AnthropicMessage) -> Response: + return ChatCompletion( + id=platform_response.id, + choices=[ + Choice( + index=0, + message=ChatCompletionMessage( + content=platform_response.content[0].text if len(platform_response.content) > 0 else "", + role="assistant", + ), + finish_reason=self._convert_finish_reason(platform_response.stop_reason), + ) + ], + created=int(time.time()), + model=self.model, + object="messages.create", + system_fingerprint=None, + usage=CompletionUsage( + prompt_tokens=platform_response.usage.input_tokens, + completion_tokens=platform_response.usage.output_tokens, + total_tokens=platform_response.usage.input_tokens + platform_response.usage.output_tokens, + ), + ) + + def _convert_to_platform_messages(self, messages: Messages) -> tuple[str, list[AnthropicMessageParam]]: + _system = "" + _message: list[AnthropicMessageParam] = [] + for message in messages: + if message["role"] == "system": + _system += "\n" + message["content"] + elif message["role"] == "user": + if isinstance(message["content"], str): + _message.append({"role": "user", "content": message["content"]}) + else: + cprint("WARNING: 未対応です", color="yellow", background=True) + elif message["role"] == "assistant": + if isinstance(message["content"], str): + _message.append({"role": "assistant", "content": message["content"]}) + else: + cprint("WARNING: 未対応です", color="yellow", background=True) + else: + cprint("WARNING: 未対応です", color="yellow", background=True) + return _system, _message + + def _convert_to_streamresponse( + self, platform_streamresponse: Stream[AnthropicMessageStreamEvent] + ) -> StreamResponse: + created = int(time.time()) + model = "" + id_ = "" + content: str | None = None + for chunk in platform_streamresponse: + input_tokens = 0 + output_tokens = 0 + if chunk.type == "message_stop" or chunk.type == "content_block_stop": + continue + if chunk.type == "message_start": + model = model or chunk.message.model + id_ = id_ or chunk.message.id + input_tokens = chunk.message.usage.input_tokens + output_tokens = chunk.message.usage.output_tokens + content = "".join([content_block.text for content_block in chunk.message.content]) + finish_reason = self._convert_finish_reason(chunk.message.stop_reason) + elif chunk.type == "message_delta": + content = "" + finish_reason = self._convert_finish_reason(chunk.delta.stop_reason) + output_tokens = chunk.usage.output_tokens + elif chunk.type == "content_block_start": + content = chunk.content_block.text + finish_reason = None + elif chunk.type == "content_block_delta": + content = chunk.delta.text + finish_reason = None + yield ChatCompletionChunk( + id=id_, + choices=[ + ChunkChoice( + delta=ChoiceDelta( + content=content, + role="assistant", + ), + finish_reason=finish_reason, + index=0, # 0-indexedじゃないかもしれないので0に塗り替え + ) + ], + created=created, + model=model, + object="chat.completion.chunk", + tokens={"input_tokens": input_tokens, "output_tokens": output_tokens}, # type: ignore + ) + + def generate(self, messages: Messages, llm_settings: LLMSettings) -> Response: + _system, _message = self._convert_to_platform_messages(messages) + llm_settings = self._set_max_tokens(llm_settings) + response = self.client.messages.create( + model=self.model, + system=_system, + messages=_message, + stream=False, + **llm_settings, + ) + return self._convert_to_response(platform_response=response) + + def generate_stream(self, messages: Any, llm_settings: LLMSettings) -> StreamResponse: + _system, _message = self._convert_to_platform_messages(messages) + llm_settings = self._set_max_tokens(llm_settings) + response = self.client.messages.create( + model=self.model, + system=_system, + messages=_message, + stream=True, + **llm_settings, + ) + return self._convert_to_streamresponse(platform_streamresponse=response) + + def _set_max_tokens(self, llm_settings: LLMSettings) -> LLMSettings: + # claudeはmax_tokensが必須 + if not llm_settings.get("max_tokens"): + cprint(f"max_tokens is not set. Set to {DEFAULT_MAX_TOKENS}.", color="yellow") + llm_settings["max_tokens"] = DEFAULT_MAX_TOKENS + return llm_settings diff --git a/neollm/llm/claude/anthropic_llm.py b/neollm/llm/claude/anthropic_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..7ba376c8c7fb9df3204c676ad4facdf5773e1389 --- /dev/null +++ b/neollm/llm/claude/anthropic_llm.py @@ -0,0 +1,66 @@ +from typing import Literal, cast, get_args + +from anthropic import Anthropic + +from neollm.llm.abstract_llm import AbstractLLM +from neollm.llm.claude.abstract_claude import AbstractClaude +from neollm.types import APIPricing, ClientSettings +from neollm.utils.utils import cprint + +# price: https://www.anthropic.com/api +# models: https://docs.anthropic.com/claude/docs/models-overview + +SUPPORTED_MODELS = Literal[ + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", +] + + +def get_anthoropic_llm(model_name: SUPPORTED_MODELS | str, client_settings: ClientSettings) -> AbstractLLM: + # Add 日付 + replace_map_for_nodate: dict[str, SUPPORTED_MODELS] = { + "claude-3-opus": "claude-3-opus-20240229", + "claude-3-sonnet": "claude-3-sonnet-20240229", + "claude-3-haiku": "claude-3-haiku-20240307", + } + if model_name in replace_map_for_nodate: + cprint("WARNING: model_nameに日付を指定してください", color="yellow", background=True) + print(f"model_name: {model_name} -> {replace_map_for_nodate[model_name]}") + model_name = replace_map_for_nodate[model_name] + + # map to LLM + supported_model_map: dict[SUPPORTED_MODELS, AbstractLLM] = { + "claude-3-opus-20240229": AnthropicClaude3Opus20240229(client_settings), + "claude-3-sonnet-20240229": AnthropicClaude3Sonnet20240229(client_settings), + "claude-3-haiku-20240307": AnthropicClaude3Haiku20240229(client_settings), + } + if model_name in supported_model_map: + model_name = cast(SUPPORTED_MODELS, model_name) + return supported_model_map[model_name] + raise ValueError(f"model_name must be {get_args(SUPPORTED_MODELS)}, but got {model_name}.") + + +class AnthoropicLLM(AbstractClaude): + @property + def client(self) -> Anthropic: + client = Anthropic(**self.client_settings) + return client + + +class AnthropicClaude3Opus20240229(AnthoropicLLM): + dollar_per_ktoken = APIPricing(input=15 / 1000, output=75 / 1000) + model: str = "claude-3-opus-20240229" + context_window: int = 200_000 + + +class AnthropicClaude3Sonnet20240229(AnthoropicLLM): + dollar_per_ktoken = APIPricing(input=3 / 1000, output=15 / 1000) + model: str = "claude-3-sonnet-20240229" + context_window: int = 200_000 + + +class AnthropicClaude3Haiku20240229(AnthoropicLLM): + dollar_per_ktoken = APIPricing(input=0.25 / 1000, output=1.25 / 1000) + model: str = "claude-3-haiku-20240307" + context_window: int = 200_000 diff --git a/neollm/llm/claude/gcp_llm.py b/neollm/llm/claude/gcp_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..9a281d211fa6a74a7d805fbc4b58d84b756e9745 --- /dev/null +++ b/neollm/llm/claude/gcp_llm.py @@ -0,0 +1,67 @@ +from typing import Literal, cast, get_args + +from anthropic import AnthropicVertex + +from neollm.llm.abstract_llm import AbstractLLM +from neollm.llm.claude.abstract_claude import AbstractClaude +from neollm.types import APIPricing, ClientSettings +from neollm.utils.utils import cprint + +# price: https://www.anthropic.com/api +# models: https://docs.anthropic.com/claude/docs/models-overview + +SUPPORTED_MODELS = Literal[ + "claude-3-opus@20240229", + "claude-3-sonnet@20240229", + "claude-3-haiku@20240307", +] + + +# TODO! google 動かしたいね +def get_gcp_llm(model_name: SUPPORTED_MODELS | str, client_settings: ClientSettings) -> AbstractLLM: + # Add 日付 + replace_map_for_nodate: dict[str, SUPPORTED_MODELS] = { + "claude-3-opus": "claude-3-opus@20240229", + "claude-3-sonnet": "claude-3-sonnet@20240229", + "claude-3-haiku": "claude-3-haiku@20240307", + } + if model_name in replace_map_for_nodate: + cprint("WARNING: model_nameに日付を指定してください", color="yellow", background=True) + print(f"model_name: {model_name} -> {replace_map_for_nodate[model_name]}") + model_name = replace_map_for_nodate[model_name] + + # map to LLM + supported_model_map: dict[SUPPORTED_MODELS, AbstractLLM] = { + "claude-3-opus@20240229": GCPClaude3Opus20240229(client_settings), + "claude-3-sonnet@20240229": GCPClaude3Sonnet20240229(client_settings), + "claude-3-haiku@20240307": GCPClaude3Haiku20240229(client_settings), + } + if model_name in supported_model_map: + model_name = cast(SUPPORTED_MODELS, model_name) + return supported_model_map[model_name] + raise ValueError(f"model_name must be {get_args(SUPPORTED_MODELS)}, but got {model_name}.") + + +class GoogleLLM(AbstractClaude): + @property + def client(self) -> AnthropicVertex: + client = AnthropicVertex(**self.client_settings) + return client + + +class GCPClaude3Opus20240229(GoogleLLM): + dollar_per_ktoken = APIPricing(input=15 / 1000, output=75 / 1000) + model: str = "claude-3-opus@20240229" + context_window: int = 200_000 + + +class GCPClaude3Sonnet20240229(GoogleLLM): + dollar_per_ktoken = APIPricing(input=3 / 1000, output=15 / 1000) + model: str = "claude-3-sonnet@20240229" + context_window: int = 200_000 + + +class GCPClaude3Haiku20240229(GoogleLLM): + dollar_per_ktoken = APIPricing(input=0.25 / 1000, output=1.25 / 1000) + model: str = "claude-3-haiku@20240307" + context_window: int = 200_000 diff --git a/neollm/llm/gemini/abstract_gemini.py b/neollm/llm/gemini/abstract_gemini.py new file mode 100644 index 0000000000000000000000000000000000000000..a960ce1fdc2a8e5d4900bef16ccc896371dcd6e9 --- /dev/null +++ b/neollm/llm/gemini/abstract_gemini.py @@ -0,0 +1,229 @@ +import time +from abc import abstractmethod +from typing import Iterable, cast + +from google.cloud.aiplatform_v1beta1.types import CountTokensResponse +from google.cloud.aiplatform_v1beta1.types.content import Candidate +from vertexai.generative_models import ( + Content, + GenerationConfig, + GenerationResponse, + GenerativeModel, + Part, +) +from vertexai.generative_models._generative_models import ContentsType + +from neollm.llm.abstract_llm import AbstractLLM +from neollm.types import ( + ChatCompletion, + CompletionUsageForCustomPriceCalculation, + LLMSettings, + Message, + Messages, + Response, + StreamResponse, +) +from neollm.types.openai.chat_completion import ( + ChatCompletionMessage, + Choice, + CompletionUsage, +) +from neollm.types.openai.chat_completion import FinishReason as FinishReasonVertex +from neollm.types.openai.chat_completion_chunk import ( + ChatCompletionChunk, + ChoiceDelta, + ChunkChoice, +) +from neollm.utils.utils import cprint + + +class AbstractGemini(AbstractLLM): + + @abstractmethod + def generate_config(self, llm_settings: LLMSettings) -> GenerationConfig: ... + + # 使っていない + def encode(self, text: str) -> list[int]: + return [ord(char) for char in text] + + # 使っていない + def decode(self, decoded: list[int]) -> str: + return "".join([chr(number) for number in decoded]) + + def _count_tokens_vertex(self, contents: ContentsType) -> CountTokensResponse: + model = GenerativeModel(model_name=self.model) + return cast(CountTokensResponse, model.count_tokens(contents)) + + def count_tokens(self, messages: list[Message] | None = None, only_response: bool = False) -> int: + """ + トークン数の計測 + + Args: + messages (Messages): messages + + Returns: + int: トークン数 + """ + if messages is None: + return 0 + _system, _message = self._convert_to_platform_messages(messages) + total_tokens = 0 + if _system: + total_tokens += int(self._count_tokens_vertex(_system).total_tokens) + if _message: + total_tokens = int(self._count_tokens_vertex(_message).total_tokens) + return total_tokens + + def _convert_to_platform_messages(self, messages: Messages) -> tuple[str | None, list[Content]]: + _system = None + _message: list[Content] = [] + + for message in messages: + if message["role"] == "system": + _system = "\n" + message["content"] + elif message["role"] == "user": + if isinstance(message["content"], str): + _message.append(Content(role="user", parts=[Part.from_text(message["content"])])) + else: + try: + if isinstance(message["content"], list) and message["content"][1]["type"] == "image_url": + encoded_image = message["content"][1]["image_url"]["url"].split(",")[-1] + _message.append( + Content( + role="user", + parts=[ + Part.from_text(message["content"][0]["text"]), + Part.from_data(data=encoded_image, mime_type="image/jpeg"), + ], + ) + ) + except KeyError: + cprint("WARNING: 未対応です", color="yellow", background=True) + except IndexError: + cprint("WARNING: 未対応です", color="yellow", background=True) + except Exception as e: + cprint(e, color="red", background=True) + elif message["role"] == "assistant": + if isinstance(message["content"], str): + _message.append(Content(role="model", parts=[Part.from_text(message["content"])])) + else: + cprint("WARNING: 未対応です", color="yellow", background=True) + return _system, _message + + def _convert_finish_reason(self, stop_reason: Candidate.FinishReason) -> FinishReasonVertex | None: + """ + 参考記事 : https://ai.google.dev/api/python/google/ai/generativelanguage/Candidate/FinishReason + + 0: FINISH_REASON_UNSPECIFIED + Default value. This value is unused. + 1: STOP + Natural stop point of the model or provided stop sequence. + 2: MAX_TOKENS + The maximum number of tokens as specified in the request was reached. + 3: SAFETY + The candidate content was flagged for safety reasons. + 4: RECITATION + The candidate content was flagged for recitation reasons. + 5: OTHER + Unknown reason. + """ + + if stop_reason.value in [0, 3, 4, 5]: + return "stop" + + if stop_reason.value in [2]: + return "length" + + return None + + def _convert_to_response( + self, platform_response: GenerationResponse, system: str | None, message: list[Content] + ) -> Response: + # input 請求用文字数 + input_billable_characters = 0 + if system: + input_billable_characters += self._count_tokens_vertex(system).total_billable_characters + if message: + input_billable_characters += self._count_tokens_vertex(message).total_billable_characters + # output 請求用文字数 + output_billable_characters = 0 + if platform_response.text: + output_billable_characters += self._count_tokens_vertex(platform_response.text).total_billable_characters + return ChatCompletion( # type: ignore [call-arg] + id="", + choices=[ + Choice( + index=0, + message=ChatCompletionMessage( + content=platform_response.text, + role="assistant", + ), + finish_reason=self._convert_finish_reason(platform_response.candidates[0].finish_reason), + ) + ], + created=int(time.time()), + model=self.model, + object="messages.create", + system_fingerprint=None, + usage=CompletionUsage( + prompt_tokens=platform_response.usage_metadata.prompt_token_count, + completion_tokens=platform_response.usage_metadata.candidates_token_count, + total_tokens=platform_response.usage_metadata.prompt_token_count + + platform_response.usage_metadata.candidates_token_count, + ), + usage_for_price=CompletionUsageForCustomPriceCalculation( + prompt_tokens=input_billable_characters, + completion_tokens=output_billable_characters, + total_tokens=input_billable_characters + output_billable_characters, + ), + ) + + def _convert_to_streamresponse(self, platform_streamresponse: Iterable[GenerationResponse]) -> StreamResponse: + created = int(time.time()) + content: str | None = None + for chunk in platform_streamresponse: + content = chunk.text + yield ChatCompletionChunk( + id="", + choices=[ + ChunkChoice( + delta=ChoiceDelta( + content=content, + role="assistant", + ), + finish_reason=self._convert_finish_reason(chunk.candidates[0].finish_reason), + index=0, # 0-indexedじゃないかもしれないので0に塗り替え + ) + ], + created=created, + model=self.model, + object="chat.completion.chunk", + ) + + def generate(self, messages: Messages, llm_settings: LLMSettings) -> Response: + _system, _message = self._convert_to_platform_messages(messages) + model = GenerativeModel( + model_name=self.model, + system_instruction=_system, + ) + + response = model.generate_content( + contents=_message, + stream=False, + generation_config=self.generate_config(llm_settings), + ) + + return self._convert_to_response(platform_response=response, system=_system, message=_message) + + def generate_stream(self, messages: Messages, llm_settings: LLMSettings) -> StreamResponse: + _system, _message = self._convert_to_platform_messages(messages) + model = GenerativeModel( + model_name=self.model, + system_instruction=_system, + ) + response = model.generate_content( + contents=_message, + stream=True, + generation_config=self.generate_config(llm_settings), + ) + return self._convert_to_streamresponse(platform_streamresponse=response) diff --git a/neollm/llm/gemini/gcp_llm.py b/neollm/llm/gemini/gcp_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..86123d90663913102ebee47e97b8abb3a2df7c96 --- /dev/null +++ b/neollm/llm/gemini/gcp_llm.py @@ -0,0 +1,114 @@ +from copy import deepcopy +from typing import Literal, cast, get_args + +import vertexai +from vertexai.generative_models import GenerationConfig + +from neollm.llm.abstract_llm import AbstractLLM +from neollm.llm.gemini.abstract_gemini import AbstractGemini +from neollm.types import APIPricing, ClientSettings, LLMSettings, StreamResponse +from neollm.types.mytypes import Messages, Response +from neollm.utils.utils import cprint + +# price: https://ai.google.dev/pricing?hl=ja +# models: https://ai.google.dev/gemini-api/docs/models/gemini?hl=ja + +SUPPORTED_MODELS = Literal["gemini-1.0-pro", "gemini-1.0-pro-vision", "gemini-1.5-pro-preview-0409"] +AVAILABLE_CONFIG_VARIABLES = [ + "candidate_count", + "stop_sequences", + "temperature", + "max_tokens", # "max_output_tokensが設定されていない場合、max_tokensを使う + "max_output_tokens", + "top_p", + "top_k", +] + + +def get_gcp_llm(model_name: SUPPORTED_MODELS | str, client_settings: ClientSettings) -> AbstractLLM: + + vertexai.init(**client_settings) + + # map to LLM + supported_model_map: dict[SUPPORTED_MODELS, AbstractLLM] = { + "gemini-1.0-pro": GCPGemini10Pro(client_settings), + "gemini-1.0-pro-vision": GCPGemini10ProVision(client_settings), + "gemini-1.5-pro-preview-0409": GCPGemini15Pro0409(client_settings), + } + if model_name in supported_model_map: + model_name = cast(SUPPORTED_MODELS, model_name) + return supported_model_map[model_name] + raise ValueError(f"model_name must be {get_args(SUPPORTED_MODELS)}, but got {model_name}.") + + +class GoogleLLM(AbstractGemini): + + def generate_config(self, llm_settings: LLMSettings) -> GenerationConfig: + """ + 参考記事 : https://ai.google.dev/api/rest/v1/GenerationConfig?hl=ja + """ + # gemini + candidate_count = llm_settings.pop("candidate_count", None) + stop_sequences = llm_settings.pop("stop_sequences", None) + temperature = llm_settings.pop("temperature", None) + max_output_tokens = llm_settings.pop("max_output_tokens", None) + top_p = llm_settings.pop("top_p", None) + top_k = llm_settings.pop("top_k", None) + + # neollmの引数でも動くようにする + if max_output_tokens is None: + max_output_tokens = llm_settings.pop("max_tokens", None) + + if len(llm_settings) > 0 and "max_tokens" not in llm_settings: + raise ValueError(f"llm_settings has unknown keys: {llm_settings}") + + return GenerationConfig( + candidate_count=candidate_count, + stop_sequences=stop_sequences, + temperature=temperature, + max_output_tokens=max_output_tokens, + top_p=top_p, + top_k=top_k, + ) + + +class GCPGemini10Pro(GoogleLLM): + dollar_per_ktoken = APIPricing(input=0.125 / 1000, output=0.375 / 1000) + model: str = "gemini-1.0-pro" + context_window: int = 32_000 + + +class GCPGemini10ProVision(GoogleLLM): + dollar_per_ktoken = APIPricing(input=0.125 / 1000, output=0.375 / 1000) + model: str = "gemini-1.0-pro-vision" + context_window: int = 32_000 + + def generate(self, messages: Messages, llm_settings: LLMSettings) -> Response: + messages = self._preprocess_message_to_use_system(messages) + return super().generate(messages, llm_settings) + + def generate_stream(self, messages: Messages, llm_settings: LLMSettings) -> StreamResponse: + messages = self._preprocess_message_to_use_system(messages) + return super().generate_stream(messages, llm_settings) + + def _preprocess_message_to_use_system(self, message: Messages) -> Messages: + if message[0]["role"] != "system": + return message + preprocessed_message = deepcopy(message) + system = preprocessed_message[0]["content"] + del preprocessed_message[0] + if ( + isinstance(system, str) + and isinstance(preprocessed_message[0]["content"], list) + and isinstance(preprocessed_message[0]["content"][0]["text"], str) + ): + preprocessed_message[0]["content"][0]["text"] = system + preprocessed_message[0]["content"][0]["text"] + else: + cprint("WARNING: 入力形式が不正です", color="yellow", background=True) + return preprocessed_message + + +class GCPGemini15Pro0409(GoogleLLM): + dollar_per_ktoken = APIPricing(input=2.5 / 1000, output=7.5 / 1000) + model: str = "gemini-1.5-pro-preview-0409" + context_window: int = 1_000_000 diff --git a/neollm/llm/get_llm.py b/neollm/llm/get_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..3edbe95892a71a44aa673b96847d8c17021f1efe --- /dev/null +++ b/neollm/llm/get_llm.py @@ -0,0 +1,47 @@ +from neollm.llm.abstract_llm import AbstractLLM +from neollm.types import ClientSettings + +from .platform import Platform + +SUPPORTED_CLAUDE_MODELS = [ + "claude-3-opus", + "claude-3-sonnet", + "claude-3-haiku", + "claude-3-opus@20240229", + "claude-3-sonnet@20240229", + "claude-3-haiku@20240307", +] + +SUPPORTED_GEMINI_MODELS = [ + "gemini-1.5-pro-preview-0409", + "gemini-1.0-pro", + "gemini-1.0-pro-vision", +] + + +def get_llm(model_name: str, platform: str, client_settings: ClientSettings) -> AbstractLLM: + platform = Platform(platform) + # llmの取得 + if platform == Platform.AZURE: + from neollm.llm.gpt.azure_llm import get_azure_llm + + return get_azure_llm(model_name, client_settings) + if platform == Platform.OPENAI: + from neollm.llm.gpt.openai_llm import get_openai_llm + + return get_openai_llm(model_name, client_settings) + if platform == Platform.ANTHROPIC: + from neollm.llm.claude.anthropic_llm import get_anthoropic_llm + + return get_anthoropic_llm(model_name, client_settings) + if platform == Platform.GCP: + if model_name in SUPPORTED_CLAUDE_MODELS: + from neollm.llm.claude.gcp_llm import get_gcp_llm as get_gcp_llm_for_claude + + return get_gcp_llm_for_claude(model_name, client_settings) + elif model_name in SUPPORTED_GEMINI_MODELS: + from neollm.llm.gemini.gcp_llm import get_gcp_llm as get_gcp_llm_for_gemini + + return get_gcp_llm_for_gemini(model_name, client_settings) + else: + raise ValueError(f"{model_name} is not supported in GCP.") diff --git a/neollm/llm/gpt/abstract_gpt.py b/neollm/llm/gpt/abstract_gpt.py new file mode 100644 index 0000000000000000000000000000000000000000..87e4e9bc49b77b9964967aca2d00295cf6f05665 --- /dev/null +++ b/neollm/llm/gpt/abstract_gpt.py @@ -0,0 +1,81 @@ +import tiktoken + +from neollm.llm.abstract_llm import AbstractLLM +from neollm.types import ( + ChatCompletion, + ChatCompletionChunk, + Message, + Messages, + OpenAIMessages, + OpenAIResponse, + OpenAIStreamResponse, + Response, + StreamResponse, +) + + +class AbstractGPT(AbstractLLM): + def encode(self, text: str) -> list[int]: + tokenizer = tiktoken.encoding_for_model(self.model or "gpt-3.5-turbo") + return tokenizer.encode(text) + + def decode(self, encoded: list[int]) -> str: + tokenizer = tiktoken.encoding_for_model(self.model or "gpt-3.5-turbo") + return tokenizer.decode(encoded) + + def count_tokens(self, messages: list[Message] | None = None, only_response: bool = False) -> int: + """ + トークン数の計測 + + Args: + messages (Messages): messages + + Returns: + int: トークン数 + """ + if messages is None: + return 0 + + # count tokens + num_tokens: int = 0 + # messages ---------------------------------------------------------------------------v + for message in messages: + # per message ------------------------------------------- + num_tokens += 4 + # content ----------------------------------------------- + content = message.get("content", None) + if content is None: + num_tokens += 0 + elif isinstance(content, str): + num_tokens += len(self.encode(content)) + continue + elif isinstance(content, list): + for content_params in content: + if content_params["type"] == "text": + num_tokens += len(self.encode(content_params["text"])) + # TODO: ChatCompletionFunctionMessageParam.name + # tokens_per_name = 1 + # tool calls ------------------------------------------------ + # TODO: ChatCompletionAssistantMessageParam.function_call + # TODO: ChatCompletionAssistantMessageParam.tool_calls + + if only_response: + if len(messages) != 1: + raise ValueError("only_response=Trueの場合、messagesは1つのみにしてください。") + num_tokens -= 4 # per message分を消す + else: + num_tokens += 3 # every reply is primed with <|start|>assistant<|message|> + + return num_tokens + + def _convert_to_response(self, platform_response: OpenAIResponse) -> Response: + return ChatCompletion(**platform_response.model_dump()) + + def _convert_to_platform_messages(self, messages: Messages) -> OpenAIMessages: + # OpenAIのMessagesをデフォルトに置いているため、変換は不要 + platform_messages: OpenAIMessages = messages + return platform_messages + + def _convert_to_streamresponse(self, platform_streamresponse: OpenAIStreamResponse) -> StreamResponse: + for chunk in platform_streamresponse: + yield ChatCompletionChunk(**chunk.model_dump()) diff --git a/neollm/llm/gpt/azure_llm.py b/neollm/llm/gpt/azure_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..c1f74d76a5e14b243518305c5785e88e33fefbde --- /dev/null +++ b/neollm/llm/gpt/azure_llm.py @@ -0,0 +1,215 @@ +from typing import Literal, cast + +from openai import AzureOpenAI + +from neollm.llm.abstract_llm import AbstractLLM +from neollm.llm.gpt.abstract_gpt import AbstractGPT +from neollm.types import ( + APIPricing, + ClientSettings, + LLMSettings, + Messages, + Response, + StreamResponse, +) +from neollm.utils.utils import cprint, ensure_env_var, suport_unrecomended_env_var + +suport_unrecomended_env_var(old_key="AZURE_API_BASE", new_key="AZURE_OPENAI_ENDPOINT") +suport_unrecomended_env_var(old_key="AZURE_API_VERSION", new_key="OPENAI_API_VERSION") +# 0613なし +suport_unrecomended_env_var(old_key="AZURE_ENGINE_GPT35", new_key="AZURE_ENGINE_GPT35T_0613") +suport_unrecomended_env_var(old_key="AZURE_ENGINE_GPT35_16k", new_key="AZURE_ENGINE_GPT35T_16K_0613") +suport_unrecomended_env_var(old_key="AZURE_ENGINE_GPT4", new_key="AZURE_ENGINE_GPT4_0613") +suport_unrecomended_env_var(old_key="AZURE_ENGINE_GPT4_32k", new_key="AZURE_ENGINE_GPT4_32K_0613") +# turbo抜け +suport_unrecomended_env_var(old_key="AZURE_ENGINE_GPT35_0613", new_key="AZURE_ENGINE_GPT35T_0613") +suport_unrecomended_env_var(old_key="AZURE_ENGINE_GPT35_16K_0613", new_key="AZURE_ENGINE_GPT35T_16K_0613") + +# Pricing: https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/ + +SUPPORTED_MODELS = Literal[ + "gpt-4o-2024-05-13", + "gpt-4-turbo-2024-04-09", + "gpt-3.5-turbo-0125", + "gpt-4-turbo-0125", + "gpt-3.5-turbo-1106", + "gpt-4-turbo-1106", + "gpt-4v-turbo-1106", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k-0613", + "gpt-4-0613", + "gpt-4-32k-0613", +] + + +def get_azure_llm(model_name: SUPPORTED_MODELS | str, client_settings: ClientSettings) -> AbstractLLM: + # 表記変更 + model_name = model_name.replace("gpt-35-turbo", "gpt-3.5-turbo") + # Add 日付 + replace_map_for_nodate: dict[str, SUPPORTED_MODELS] = { + "gpt-4o": "gpt-4o-2024-05-13", + "gpt-3.5-turbo": "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k": "gpt-3.5-turbo-16k-0613", + "gpt-4": "gpt-4-0613", + "gpt-4-32k": "gpt-4-32k-0613", + "gpt-4-turbo": "gpt-4-turbo-1106", + "gpt-4v-turbo": "gpt-4v-turbo-1106", + } + if model_name in replace_map_for_nodate: + cprint("WARNING: model_nameに日付を指定してください", color="yellow", background=True) + print(f"model_name: {model_name} -> {replace_map_for_nodate[model_name]}") + model_name = replace_map_for_nodate[model_name] + + # map to LLM + supported_model_map: dict[SUPPORTED_MODELS, AbstractLLM] = { + "gpt-4o-2024-05-13": AzureGPT4O_20240513(client_settings), + "gpt-4-turbo-2024-04-09": AzureGPT4T_20240409(client_settings), + "gpt-3.5-turbo-0125": AzureGPT35T_0125(client_settings), + "gpt-4-turbo-0125": AzureGPT4T_0125(client_settings), + "gpt-3.5-turbo-1106": AzureGPT35T_1106(client_settings), + "gpt-4-turbo-1106": AzureGPT4T_1106(client_settings), + "gpt-4v-turbo-1106": AzureGPT4VT_1106(client_settings), + "gpt-3.5-turbo-0613": AzureGPT35T_0613(client_settings), + "gpt-3.5-turbo-16k-0613": AzureGPT35T16k_0613(client_settings), + "gpt-4-0613": AzureGPT4_0613(client_settings), + "gpt-4-32k-0613": AzureGPT432k_0613(client_settings), + } + # 通常モデル + if model_name in supported_model_map: + model_name = cast(SUPPORTED_MODELS, model_name) + return supported_model_map[model_name] + # FTモデル + return AzureGPT35FT(model_name, client_settings) + + +class AzureLLM(AbstractGPT): + _engine_name_env_key: str | None = None + + @property + def client(self) -> AzureOpenAI: + client: AzureOpenAI = AzureOpenAI(**self.client_settings) + # api_key: str | None = (None,) + # timeout: httpx.Timeout(timeout=600.0, connect=5.0) + # max_retries: int = 2 + return client + + @property + def engine(self) -> str: + return ensure_env_var(self._engine_name_env_key) + + def generate(self, messages: Messages, llm_settings: LLMSettings) -> Response: + openai_response = self.client.chat.completions.create( + model=self.engine, + messages=self._convert_to_platform_messages(messages), + stream=False, + **llm_settings, + ) + response = self._convert_to_response(openai_response) + return response + + def generate_stream(self, messages: Messages, llm_settings: LLMSettings) -> StreamResponse: + platform_stream_response = self.client.chat.completions.create( + model=self.engine, + messages=self._convert_to_platform_messages(messages), + stream=True, + **llm_settings, + ) + stream_response = self._convert_to_streamresponse(platform_stream_response) + return stream_response + + +# omni 2024-05-13 -------------------------------------------------------------------------------------------- +class AzureGPT4O_20240513(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.005, output=0.015) # 30倍/45倍 + model: str = "gpt-4o-2024-05-13" + _engine_name_env_key: str = "AZURE_ENGINE_GPT4O_20240513" + context_window: int = 128_000 + + +# 2024-04-09 -------------------------------------------------------------------------------------------- +class AzureGPT4T_20240409(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.01, output=0.03) + model: str = "gpt-4-turbo-2024-04-09" + _engine_name_env_key: str = "AZURE_ENGINE_GPT4T_20240409" + context_window: int = 128_000 + + +# 0125 -------------------------------------------------------------------------------------------- +class AzureGPT35T_0125(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.0005, output=0.0015) + model: str = "gpt-3.5-turbo-0125" + _engine_name_env_key: str = "AZURE_ENGINE_GPT35T_0125" + context_window: int = 16_385 + + +class AzureGPT4T_0125(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.01, output=0.03) + model: str = "gpt-4-turbo-0125" + _engine_name_env_key: str = "AZURE_ENGINE_GPT4T_0125" + context_window: int = 128_000 + + +# 1106 -------------------------------------------------------------------------------------------- +class AzureGPT35T_1106(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.001, output=0.002) + model: str = "gpt-3.5-turbo-1106" + _engine_name_env_key: str = "AZURE_ENGINE_GPT35T_1106" + context_window: int = 16_385 + + +class AzureGPT4VT_1106(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.01, output=0.03) # 10倍/15倍 + model: str = "gpt-4-1106-vision-preview" + _engine_name_env_key: str = "AZURE_ENGINE_GPT4VT_1106" + context_window: int = 128_000 + + +class AzureGPT4T_1106(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.01, output=0.03) + model: str = "gpt-4-turbo-1106" + _engine_name_env_key: str = "AZURE_ENGINE_GPT4T_1106" + context_window: int = 128_000 + + +# FT -------------------------------------------------------------------------------------------- +class AzureGPT35FT(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.0005, output=0.0015) # 1倍 + セッション稼働時間 + model: str = "gpt-3.5-turbo-ft" + context_window: int = 4_096 + + def __init__(self, model_name: str, client_setting: ClientSettings) -> None: + super().__init__(client_setting) + self._engine = model_name + + @property + def engine(self) -> str: + return self._engine + + +# 0613 -------------------------------------------------------------------------------------------- +class AzureGPT35T_0613(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.0015, output=0.002) + model: str = "gpt-3.5-turbo-0613" + _engine_name_env_key: str = "AZURE_ENGINE_GPT35T_0613" + context_window: int = 4_096 + + +class AzureGPT35T16k_0613(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.003, output=0.004) # 2倍 + model: str = "gpt-3.5-turbo-16k-0613" + _engine_name_env_key: str = "AZURE_ENGINE_GPT35T_16K_0613" + context_window: int = 16_385 + + +class AzureGPT4_0613(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.03, output=0.06) # 20倍/30倍 + model: str = "gpt-4-0613" + _engine_name_env_key: str = "AZURE_ENGINE_GPT4_0613" + context_window: int = 8_192 + + +class AzureGPT432k_0613(AzureLLM): + dollar_per_ktoken = APIPricing(input=0.06, output=0.12) # 40倍/60倍 + model: str = "gpt-4-32k-0613" + _engine_name_env_key: str = "AZURE_ENGINE_GPT4_32K_0613" + context_window: int = 32_768 diff --git a/neollm/llm/gpt/openai_llm.py b/neollm/llm/gpt/openai_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..d9663680e701cc162ce2edafb91af5731ee060b9 --- /dev/null +++ b/neollm/llm/gpt/openai_llm.py @@ -0,0 +1,222 @@ +from typing import Literal, cast + +from openai import OpenAI + +from neollm.llm.abstract_llm import AbstractLLM +from neollm.llm.gpt.abstract_gpt import AbstractGPT +from neollm.types import ( + APIPricing, + ClientSettings, + LLMSettings, + Messages, + Response, + StreamResponse, +) +from neollm.utils.utils import cprint + +# Models: https://platform.openai.com/docs/models/continuous-model-upgrades +# Pricing: https://openai.com/pricing + +SUPPORTED_MODELS = Literal[ + "gpt-4o-2024-05-13", + "gpt-4-turbo-2024-04-09", + "gpt-3.5-turbo-0125", + "gpt-4-turbo-0125", + "gpt-3.5-turbo-1106", + "gpt-4-turbo-1106", + "gpt-4v-turbo-1106", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k-0613", + "gpt-4-0613", + "gpt-4-32k-0613", +] + + +def get_openai_llm(model_name: SUPPORTED_MODELS | str, client_settings: ClientSettings) -> AbstractLLM: + # Add 日付 + replace_map_for_nodate: dict[str, SUPPORTED_MODELS] = { + "gpt-4o": "gpt-4o-2024-05-13", + "gpt-3.5-turbo": "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k": "gpt-3.5-turbo-16k-0613", + "gpt-4": "gpt-4-0613", + "gpt-4-32k": "gpt-4-32k-0613", + "gpt-4-turbo": "gpt-4-turbo-1106", + "gpt-4v-turbo": "gpt-4v-turbo-1106", + } + if model_name in replace_map_for_nodate: + cprint("WARNING: model_nameに日付を指定してください", color="yellow", background=True) + print(f"model_name: {model_name} -> {replace_map_for_nodate[model_name]}") + model_name = replace_map_for_nodate[model_name] + + # map to LLM + supported_model_map: dict[SUPPORTED_MODELS, AbstractLLM] = { + "gpt-4o-2024-05-13": OpenAIGPT4O_20240513(client_settings), + "gpt-4-turbo-2024-04-09": OpenAIGPT4T_20240409(client_settings), + "gpt-3.5-turbo-0125": OpenAIGPT35T_0125(client_settings), + "gpt-4-turbo-0125": OpenAIGPT4T_0125(client_settings), + "gpt-3.5-turbo-1106": OpenAIGPT35T_1106(client_settings), + "gpt-4-turbo-1106": OpenAIGPT4T_1106(client_settings), + "gpt-4v-turbo-1106": OpenAIGPT4VT_1106(client_settings), + "gpt-3.5-turbo-0613": OpenAIGPT35T_0613(client_settings), + "gpt-3.5-turbo-16k-0613": OpenAIGPT35T16k_0613(client_settings), + "gpt-4-0613": OpenAIGPT4_0613(client_settings), + "gpt-4-32k-0613": OpenAIGPT432k_0613(client_settings), + } + # 通常モデル + if model_name in supported_model_map: + model_name = cast(SUPPORTED_MODELS, model_name) + return supported_model_map[model_name] + # FTモデル + if "gpt-3.5-turbo-1106" in model_name: + return OpenAIGPT35TFT_1106(model_name, client_settings) + if "gpt-3.5-turbo-0613" in model_name: + return OpenAIGPT35TFT_0613(model_name, client_settings) + if "gpt-3.5-turbo-0125" in model_name: + return OpenAIGPT35TFT_0125(model_name, client_settings) + if "gpt4" in model_name.replace("-", ""): # TODO! もっといい条件に修正 + return OpenAIGPT4FT_0613(model_name, client_settings) + + cprint( + f"WARNING: このFTモデルは何?: {model_name} -> OpenAIGPT35TFT_1106として設定", color="yellow", background=True + ) + return OpenAIGPT35TFT_1106(model_name, client_settings) + + +class OpenAILLM(AbstractGPT): + model: str + + @property + def client(self) -> OpenAI: + client: OpenAI = OpenAI(**self.client_settings) + # api_key: str | None = (None,) + # timeout: httpx.Timeout(timeout=600.0, connect=5.0) + # max_retries: int = 2 + return client + + def generate(self, messages: Messages, llm_settings: LLMSettings) -> Response: + openai_response = self.client.chat.completions.create( + model=self.model, + messages=self._convert_to_platform_messages(messages), + stream=False, + **llm_settings, + ) + response = self._convert_to_response(openai_response) + return response + + def generate_stream(self, messages: Messages, llm_settings: LLMSettings) -> StreamResponse: + platform_stream_response = self.client.chat.completions.create( + model=self.model, + messages=self._convert_to_platform_messages(messages), + stream=True, + **llm_settings, + ) + stream_response = self._convert_to_streamresponse(platform_stream_response) + return stream_response + + +# omni 2024-05-13 -------------------------------------------------------------------------------------------- +class OpenAIGPT4O_20240513(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.005, output=0.015) + model: str = "gpt-4o-2024-05-13" + context_window: int = 128_000 + + +# 2024-04-09 -------------------------------------------------------------------------------------------- +class OpenAIGPT4T_20240409(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.01, output=0.03) # 10倍/15倍 + model: str = "gpt-4-turbo-2024-04-09" + # model: str = "gpt-4-turbo-2024-04-09" + context_window: int = 128_000 + + +# 0125 -------------------------------------------------------------------------------------------- +class OpenAIGPT35T_0125(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.0005, output=0.0015) + model: str = "gpt-3.5-turbo-0125" + context_window: int = 16_385 + + +class OpenAIGPT4T_0125(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.01, output=0.03) + model: str = "gpt-4-0125-preview" + context_window: int = 128_000 + + +class OpenAIGPT35TFT_0125(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.003, output=0.006) + context_window: int = 16_385 + + def __init__(self, model_name: str, client_setting: ClientSettings) -> None: + super().__init__(client_setting) + self.model = model_name + + +# 1106 -------------------------------------------------------------------------------------------- +class OpenAIGPT35T_1106(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.0010, output=0.0020) + model: str = "gpt-3.5-turbo-1106" + context_window: int = 16_385 + + +class OpenAIGPT4T_1106(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.01, output=0.03) + model: str = "gpt-4-1106-preview" + context_window: int = 128_000 + + +class OpenAIGPT4VT_1106(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.01, output=0.03) + model: str = "gpt-4-1106-vision-preview" + context_window: int = 128_000 + + +class OpenAIGPT35TFT_1106(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.003, output=0.006) + context_window: int = 4_096 + + def __init__(self, model_name: str, client_setting: ClientSettings) -> None: + super().__init__(client_setting) + self.model = model_name + + +# 0613 -------------------------------------------------------------------------------------------- +class OpenAIGPT35T_0613(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.0015, output=0.002) + model: str = "gpt-3.5-turbo-0613" + context_window: int = 4_096 + + +class OpenAIGPT35T16k_0613(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.003, output=0.004) + model: str = "gpt-3.5-turbo-16k-0613" + context_window: int = 16_385 + + +class OpenAIGPT4_0613(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.03, output=0.06) + model: str = "gpt-4-0613" + context_window: int = 8_192 + + +class OpenAIGPT432k_0613(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.06, output=0.12) + model: str = "gpt-4-32k-0613" + context_window: int = 32_768 + + +class OpenAIGPT35TFT_0613(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.003, output=0.006) + context_window: int = 4_096 + + def __init__(self, model_name: str, client_setting: ClientSettings) -> None: + super().__init__(client_setting) + self.model = model_name + + +class OpenAIGPT4FT_0613(OpenAILLM): + dollar_per_ktoken = APIPricing(input=0.045, output=0.090) + context_window: int = 8_192 + + def __init__(self, model_name: str, client_setting: ClientSettings) -> None: + super().__init__(client_setting) + self.model = model_name diff --git a/neollm/llm/gpt/token.py b/neollm/llm/gpt/token.py new file mode 100644 index 0000000000000000000000000000000000000000..c9b5c7ce07476e2d69857bb49f9d539824c2f238 --- /dev/null +++ b/neollm/llm/gpt/token.py @@ -0,0 +1,247 @@ +import json +import textwrap +from typing import Any, Iterator, overload + +import tiktoken + +from neollm.types import Function +from neollm.utils.utils import cprint # , Functions, Messages + +DEFAULT_MODEL_NAME = "gpt-3.5-turbo" + + +def get_tokenizer(model_name: str) -> tiktoken.Encoding: + # 参考: https://platform.openai.com/docs/models/gpt-3-5 + MODEL_NAME_MAP = [ + ("gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613"), + ("gpt-3.5-turbo", "gpt-3.5-turbo-0613"), + ("gpt-4-32k", "gpt-4-32k-0613"), + ("gpt-4", "gpt-4-0613"), + ] + ALL_VERSION_MODELS = [ + # gpt-3.5-turbo + "gpt-3.5-turbo-0125", + "gpt-3.5-turbo-1106", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-0301", # Legacy + # gpt-4 + "gpt-4o-2024-05-13", + "gpt-4-turbo-0125", + "gpt-4-turbo-1106", + "gpt-4-0613", + "gpt-4-32k-0613", + "gpt-4-0314", # Legacy + "gpt-4-32k-0314", # Legacy + ] + # Azure表記 → OpenAI表記に統一 + model_name = model_name.replace("gpt-35", "gpt-3.5") + # 最新モデルを正式名称に & 新モデル, FTモデルをキャッチ + if model_name not in ALL_VERSION_MODELS: + for key, model_name_version in MODEL_NAME_MAP: + if key in model_name: + model_name = model_name_version + break + try: + return tiktoken.encoding_for_model(model_name) + except Exception as e: + cprint(f"WARNING: Tokenizerの取得に失敗。{model_name}: {e}", color="yellow", background=True) + return tiktoken.encoding_for_model("gpt-3.5-turbo") + + +@overload +def count_tokens(messages: str, model_name: str | None = None) -> int: ... + + +@overload +def count_tokens( + messages: Iterator[dict[str, str]], model_name: str | None = None, functions: Any | None = None +) -> int: ... + + +def count_tokens( + messages: Iterator[dict[str, str]] | str, + model_name: str | None = None, + functions: Any | None = None, +) -> int: + if isinstance(messages, str): + tokenizer = get_tokenizer(model_name or DEFAULT_MODEL_NAME) + encoded = tokenizer.encode(messages) + return len(encoded) + return _count_messages_and_function_tokens(messages, model_name, functions) + + +def _count_messages_and_function_tokens( + messages: Iterator[dict[str, str]], model_name: str | None = None, functions: Any | None = None +) -> int: + """トークン数計測 + + Args: + messages (Messages): GPTAPIの入力のmessages + model_name (str | None, optional): モデル名. Defaults to None. + functions (Functions | None, optional): GPTAPIの入力のfunctions. Defaults to None. + + Returns: + int: トークン数 + """ + num_tokens = _count_messages_tokens(messages, model_name or DEFAULT_MODEL_NAME) + if functions is not None: + num_tokens += _count_functions_tokens(functions, model_name) + return num_tokens + + +# https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb +def _count_messages_tokens(messages: Iterator[dict[str, str]] | None, model_name: str) -> int: + """メッセージのトークン数を計算 + + Args: + messages (Messages): ChatGPT等APIに入力するmessages + model_name (str, optional): 使用するモデルの名前 + "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k-0613", "gpt-4-0314", "gpt-4-32k-0314" + "gpt-4-0613", "gpt-4-32k-0613", "gpt-3.5-turbo", "gpt-4" + + Returns: + int: トークン数の合計 + """ + if messages is None: + return 0 + # setting model + encoding_model = get_tokenizer(model_name) + + # config + if "gpt-3.5-turbo-0301" in model_name: + tokens_per_message = 4 # every message follows <|start|>{role/name}\n{content}<|end|>\n + tokens_per_name = -1 # if there's a name, the role is omitted + else: + tokens_per_message = 3 + tokens_per_name = 1 + + # count tokens + num_tokens = 3 # every reply is primed with <|start|>assistant<|message|> + for message in messages: + num_tokens += tokens_per_message + for key, value in message.items(): + if isinstance(value, str): + num_tokens += len(encoding_model.encode(value)) + if key == "name": + num_tokens += tokens_per_name + return num_tokens + + +# https://gist.github.com/CGamesPlay/dd4f108f27e2eec145eedf5c717318f5 +def _count_functions_tokens(functions: Any, model_name: str | None = None) -> int: + """ + functionsのトークン数計測 + + Args: + functions (Functions): GPTAPIの入力のfunctions + model_name (str | None, optional): モデル名. Defaults to None. + + Returns: + _type_: トークン数 + """ + encoding_model = encoding_model = get_tokenizer(model_name or DEFAULT_MODEL_NAME) + num_tokens = 3 + len(encoding_model.encode(__functions2string(functions))) + return num_tokens + + +# functionsのstring化、補助関数 --------------------------------------------------------------------------- +def __functions2string(functions: Any) -> str: + """functionsの文字列化 + + Args: + functions (Functions): GPTAPIの入力のfunctions + + Returns: + str: functionsの文字列 + """ + prefix = "# Tools\n\n## functions\n\nnamespace functions {\n\n} // namespace functions\n" + functions_string = prefix + "".join(__function2string(function) for function in functions) + return functions_string + + +def __function2string(function: Function) -> str: + """functionの文字列化 + + Args: + function (Function): GPTAPIのfunctionの要素 + + Returns: + str: functionの文字列 + """ + object_string = __format_object(function["parameters"]) + if object_string is not None: + object_string = "_: " + object_string + else: + object_string = "" + + functions_string: str = ( + f"// {function['description']}\ntype {function['name']} = (" + object_string + ") => any;\n\n" + ) + return functions_string + + +def __format_object(schema: dict[str, Any], indent: int = 0) -> str | None: + if "properties" not in schema or len(schema["properties"]) == 0: + if schema.get("additionalProperties", False): + return "object" + return None + + result = "{\n" + for key, value in dict(schema["properties"]).items(): + # value <- resolve_ref(value) + value_rendered = __format_schema(value, indent + 1) + if value_rendered is None: + continue + # description + if "description" in value: + description = "".join( + " " * indent + f"// {description_i}\n" + for description_i in textwrap.dedent(value["description"]).strip().split("\n") + ) + # optional + optional = "" if key in schema.get("required", {}) else "?" + # default + default_comment = "" if "default" not in value else f" // default: {__format_default(value)}" + # add string + result += description + " " * indent + f"{key}{optional}: {value_rendered},{default_comment}\n" + result += (" " * (indent - 1)) + "}" + return result + + +# よくわからん +# def resolve_ref(schema): +# if schema.get("$ref") is not None: +# ref = schema["$ref"][14:] +# schema = json_schema["definitions"][ref] +# return schema + + +def __format_schema(schema: dict[str, Any], indent: int) -> str | None: + # schema <- resolve_ref(schema) + if "enum" in schema: + return __format_enum(schema) + elif schema["type"] == "object": + return __format_object(schema, indent) + elif schema["type"] in {"integer", "number"}: + return "number" + elif schema["type"] in {"string"}: + return "string" + elif schema["type"] == "array": + return str(__format_schema(schema["items"], indent)) + "[]" + else: + raise ValueError("unknown schema type " + schema["type"]) + + +def __format_enum(schema: dict[str, Any]) -> str: + # "A" | "B" | "C" + return " | ".join(json.dumps(element, ensure_ascii=False) for element in schema["enum"]) + + +def __format_default(schema: dict[str, Any]) -> str: + default = schema["default"] + if schema["type"] == "number" and float(default).is_integer(): + # numberの時、0 → 0.0 + return f"{default:.1f}" + else: + return str(default) diff --git a/neollm/llm/platform.py b/neollm/llm/platform.py new file mode 100644 index 0000000000000000000000000000000000000000..960dfc2ce6b2b9f3d8df0470c0a6bd98e55ad773 --- /dev/null +++ b/neollm/llm/platform.py @@ -0,0 +1,16 @@ +from enum import Enum + + +class Platform(str, Enum): + AZURE = "azure" + OPENAI = "openai" + ANTHROPIC = "anthropic" + GCP = "gcp" + + @classmethod + def from_string(cls, platform: str) -> "Platform": + platform = platform.lower().strip() + try: + return cls(platform) + except Exception: + raise ValueError(f"platform must be {cls.__members__}, but got {platform}.") diff --git a/neollm/llm/utils.py b/neollm/llm/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..83a69945e292e59284c51589ebf299d2627d4b8d --- /dev/null +++ b/neollm/llm/utils.py @@ -0,0 +1,72 @@ +from typing import Any, TypeVar + +from neollm.utils.utils import cprint + +Immutable = tuple[Any, ...] | str | int | float | bool +_T = TypeVar("_T") +_TD = TypeVar("_TD") + + +def _to_immutable(x: Any) -> Immutable: + """list, dictをtupleに変換して, setに格納できるようにする + + Args: + x (Any): 要素 + + Returns: + Immutable: Immutableな要素(dict, listはtupleに変換) + """ + if isinstance(x, list): + return tuple(map(_to_immutable, x)) + if isinstance(x, dict): + return tuple((key, _to_immutable(value)) for key, value in sorted(x.items())) + if isinstance(x, (set, frozenset)): + return tuple(sorted(map(_to_immutable, x))) + if isinstance(x, (str, int, float, bool)): + return x + cprint("_to_immutable: not supported: 無理やりstr(*)", color="yellow", background=True) + return str(x) + + +def _remove_duplicate(arr: list[_T | None]) -> list[_T]: + """listの重複と初期値を削除する + + Args: + arr (list[Any]): リスト + + Returns: + list[Any]: 重複削除済みのlist + """ + seen_set: set[Immutable] = set() + unique_list: list[_T] = [] + for x in arr: + if x is None or bool(x) is False: + continue + x_immutable = _to_immutable(x) + if x_immutable not in seen_set: + unique_list.append(x) + seen_set.add(x_immutable) + return unique_list + + +def get_entity(arr: list[_T | None], default: _TD, index: int | None = None) -> _T | _TD: + """listから必要な1要素を取得する + + Args: + arr (list[Any]): list + default (Any): 初期値 + index (int | None, optional): 複数ある場合、指定のindex. Defaults to None. + + Returns: + Any: 要素 + """ + arr_cleaned = _remove_duplicate(arr) + if len(arr_cleaned) == 0: + return default + if len(arr_cleaned) == 1: + return arr_cleaned[0] + if index is not None: + return arr_cleaned[index] + cprint("get_entity: not unique", color="yellow", background=True) + cprint(arr_cleaned, color="yellow", background=True) + return arr_cleaned[0] diff --git a/neollm/myllm/abstract_myllm.py b/neollm/myllm/abstract_myllm.py new file mode 100644 index 0000000000000000000000000000000000000000..620c5e7d78244272a05d3ef418ac24187aa0a6a7 --- /dev/null +++ b/neollm/myllm/abstract_myllm.py @@ -0,0 +1,148 @@ +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Generator, Generic, Optional, TypeAlias, cast + +from neollm.myllm.print_utils import print_inputs, print_metadata, print_outputs +from neollm.types import ( + InputType, + OutputType, + PriceInfo, + StreamOutputType, + TimeInfo, + TokenInfo, +) +from neollm.utils.utils import cprint + +if TYPE_CHECKING: + from typing import Any + + from neollm.myllm.myl3m2 import MyL3M2 + + _MyL3M2: TypeAlias = MyL3M2[Any, Any] + + +class AbstractMyLLM(ABC, Generic[InputType, OutputType]): + """MyLLM, MyL3M2の抽象クラス""" + + inputs: InputType | None + outputs: OutputType | None + silent_set: set[str] + verbose: bool + time: float = 0.0 + time_detail: TimeInfo = TimeInfo() + parent: Optional["_MyL3M2"] = None + do_stream: bool + + @property + @abstractmethod + def token(self) -> TokenInfo: + """LLMの利用トークン数 + + Returns: + TokenInfo: トークン数 (入力, 出力, 合計) + >>> TokenInfo(input=1588, output=128, total=1716) + """ + + @property + def custom_token(self) -> TokenInfo | None: + """料金計算用トークン(Gemini用)""" + return None + + @property + @abstractmethod + def price(self) -> PriceInfo: + """LLMの利用料金 (USD) + + Returns: + PriceInfo: 利用料金 (USD) (入力, 出力, 合計) + >>> PriceInfo(input=0.002382, output=0.000256, total=0.002638) + """ + + @abstractmethod + def _call(self, inputs: InputType, stream: bool = False) -> Generator[StreamOutputType, None, OutputType]: + """MyLLMの子クラスのメインロジック + + streamとnon-streamの両方のコードを書く必要がある + + Args: + inputs (InputType): LLMへの入力 + stream (bool, optional): streamの有無. Defaults to False. + + Yields: + Generator[StreamOutputType, None, OutputType]: LLMのstream出力 + + Returns: + OutputType: LLMの出力 + """ + + def __call__(self, inputs: InputType) -> OutputType: + """MyLLMのメインロジック + + Args: + inputs (InputType): LLMへの入力 + + Returns: + OutputType: LLMの出力 + """ + it: Generator[StreamOutputType, None, OutputType] = self._call(inputs, stream=self.do_stream) + while True: + try: + next(it) + except StopIteration as e: + outputs = cast(OutputType, e.value) + return outputs + except Exception as e: + raise e + + def call_stream(self, inputs: InputType) -> Generator[StreamOutputType, None, OutputType]: + """MyLLMのメインロジック(stream処理) + + Args: + inputs (InputType): LLMへの入力 + + Yields: + Generator[StreamOutputType, None, OutputType]: LLMのstream出力 + + Returns: + LLMの出力 + """ + it: Generator[StreamOutputType, None, OutputType] = self._call(inputs, stream=True) + while True: + try: + delta_content = next(it) + yield delta_content + except StopIteration as e: + outputs = cast(OutputType, e.value) + return outputs + except Exception as e: + raise e + + def _print_inputs(self) -> None: + if self.inputs is None: + return + if not ("inputs" not in self.silent_set and self.verbose): + return + print_inputs(self.inputs) + + def _print_outputs(self) -> None: + if self.outputs is None: + return + if not ("outputs" not in self.silent_set and self.verbose): + return + print_outputs(self.outputs) + + def _print_metadata(self) -> None: + if not ("metadata" not in self.silent_set and self.verbose): + return + print_metadata(self.time, self.token, self.price) + + def _print_start(self, sep: str = "-") -> None: + if not self.verbose: + return + if self.parent is None: + cprint("PARENT", color="red", background=True) + print(self, sep * (99 - len(str(self)))) + + def _print_end(self, sep: str = "-") -> None: + if not self.verbose: + return + print(sep * 100) diff --git a/neollm/myllm/myl3m2.py b/neollm/myllm/myl3m2.py new file mode 100644 index 0000000000000000000000000000000000000000..a84f69c450ae68354f7c768af99f608f5d93b171 --- /dev/null +++ b/neollm/myllm/myl3m2.py @@ -0,0 +1,165 @@ +from __future__ import annotations + +import time +from typing import Any, Generator, Literal, Optional, cast + +from neollm.myllm.abstract_myllm import AbstractMyLLM +from neollm.myllm.myllm import MyLLM +from neollm.myllm.print_utils import TITLE_COLOR +from neollm.types import ( + InputType, + OutputType, + PriceInfo, + StreamOutputType, + TimeInfo, + TokenInfo, +) +from neollm.utils.utils import cprint + + +class MyL3M2(AbstractMyLLM[InputType, OutputType]): + """LLMの複数リクエストをまとめるクラス""" + + do_stream: bool = False # stream_verboseがないため、__call__ではstreamを使わない + + def __init__( + self, + parent: Optional["MyL3M2[Any, Any]"] = None, + verbose: bool = False, + silent_list: list[Literal["inputs", "outputs", "metadata", "all_myllm"]] | None = None, + ) -> None: + """ + MyL3M2の初期化 + + Args: + parent (MyL3M2, optional): + 親のMyL3M2のインスタンス(self or None) + verbose (bool, optional): + 出力をするかどうかのフラグ. Defaults to False. + sileznt_list (list[Literal["inputs", "outputs", "metadata", "all_myllm"]], optional): + サイレントモードのリスト。出力を抑制する要素を指定する。. Defaults to None(=[]). + """ + self.parent = parent + self.verbose = verbose + self.silent_set = set(silent_list or []) + self.myllm_list: list["MyL3M2[Any, Any]" | MyLLM[Any, Any]] = [] + self.inputs: InputType | None = None + self.outputs: OutputType | None = None + self.called: bool = False + + def _link(self, inputs: InputType) -> OutputType: + """複数のLLMの処理を行う + + Args: + inputs (InputType): 入力データを保持する辞書 + + Returns: + OutputType: 処理結果の出力データ + """ + raise NotImplementedError("_link(self, inputs: InputType) -> OutputType:を実装してください") + + def _stream_link(self, inputs: InputType) -> Generator[StreamOutputType, None, OutputType]: + """複数のLLMの処理を行う(stream処理) + + Args: + inputs (InputType): 入力データを保持する辞書 + + Yields: + Generator[StreamOutputType, None, OutputType]: 処理結果の出力データ(stream) + + Returns: + self.outputsに入れたいもの + """ + raise NotImplementedError( + "_stream_link(self, inputs: InputType) -> Generator[StreamOutputType, None, None]を実装してください" + ) + + def _call(self, inputs: InputType, stream: bool = False) -> Generator[StreamOutputType, None, OutputType]: + if self.called: + raise RuntimeError("MyLLMは1回しか呼び出せない") + + self._print_start(sep="=") + + # main ----------------------------------------------------------- + t_start = time.time() + self.inputs = inputs + # [stream] + if stream: + it = self._stream_link(inputs) + while True: + try: + yield next(it) + except StopIteration as e: + self.outputs = cast(OutputType, e.value) + break + except Exception as e: + raise e + # [non-stream] + else: + self.outputs = self._link(inputs) + self._print_inputs() + self._print_outputs() + self._print_all_myllm() + self.time = time.time() - t_start + self.time_detail = TimeInfo(total=self.time, main=self.time) + + # metadata ----------------------------------------------------------- + self._print_metadata() + self._print_end(sep="=") + + # 親MyL3M2にAppend ----------------------------------------------------------- + if self.parent is not None: + self.parent.myllm_list.append(self) + self.called = True + + return self.outputs + + @property + def token(self) -> TokenInfo: + token = TokenInfo(input=0, output=0, total=0) + for myllm in self.myllm_list: + # TODO: token += myllm.token + token.input += myllm.token.input + token.output += myllm.token.output + token.total += myllm.token.total + return token + + @property + def price(self) -> PriceInfo: + price = PriceInfo(input=0.0, output=0.0, total=0.0) + for myllm in self.myllm_list: + # TODO: price += myllm.price + price.input += myllm.price.input + price.output += myllm.price.output + price.total += myllm.price.total + return price + + @property + def logs(self) -> list[Any]: + logs: list[Any] = [] + for myllm in self.myllm_list: + if isinstance(myllm, MyLLM): + logs.append(myllm.log) + elif isinstance(myllm, MyL3M2): + logs.extend(myllm.logs) + return logs + + def _print_all_myllm(self, prefix: str = "", title: bool = True) -> None: + if not ("all_myllm" not in self.silent_set and self.verbose): + return + try: + if title: + cprint("[all_myllm]", color=TITLE_COLOR) + print(" ", end="") + cprint(f"{self}", color="magenta", bold=True, underline=True) + for myllm in self.myllm_list: + if isinstance(myllm, MyLLM): + cprint(f" {prefix}- {myllm}", color="cyan") + elif isinstance(myllm, MyL3M2): + cprint(f" {prefix}- {myllm}", color="magenta") + myllm._print_all_myllm(prefix=prefix + " ", title=False) + except Exception as e: + cprint(e, color="red", background=True) + + def __repr__(self) -> str: + return f"MyL3M2({self.__class__.__name__})" diff --git a/neollm/myllm/myllm.py b/neollm/myllm/myllm.py new file mode 100644 index 0000000000000000000000000000000000000000..ab0178f4f20f77e487cdcff88ee322edc960564d --- /dev/null +++ b/neollm/myllm/myllm.py @@ -0,0 +1,449 @@ +import os +import time +from abc import abstractmethod +from typing import TYPE_CHECKING, Any, Final, Generator, Literal, Optional + +from neollm.exceptions import ContentFilterError +from neollm.llm import AbstractLLM, get_llm +from neollm.llm.gpt.azure_llm import AzureLLM +from neollm.myllm.abstract_myllm import AbstractMyLLM +from neollm.myllm.print_utils import ( + print_client_settings, + print_delta, + print_llm_settings, + print_messages, +) +from neollm.types import ( + Chunk, + ClientSettings, + Functions, + InputType, + LLMSettings, + Message, + Messages, + OutputType, + PriceInfo, + Response, + StreamOutputType, + TimeInfo, + TokenInfo, + Tools, +) +from neollm.types.openai.chat_completion import CompletionUsageForCustomPriceCalculation +from neollm.utils.preprocess import dict2json +from neollm.utils.utils import cprint + +if TYPE_CHECKING: + from neollm.myllm.myl3m2 import MyL3M2 + + _MyL3M2 = MyL3M2[Any, Any] + _State = dict[Any, Any] + +DEFAULT_LLM_SETTINGS: LLMSettings = {"temperature": 0} +DEFAULT_PLATFORM: Final[str] = "azure" + + +class MyLLM(AbstractMyLLM[InputType, OutputType]): + """LLMの単一リクエストをまとめるクラス""" + + def __init__( + self, + model: str, + parent: Optional["_MyL3M2"] = None, + llm_settings: LLMSettings | None = None, + client_settings: ClientSettings | None = None, + platform: str | None = None, + verbose: bool = False, + stream_verbose: bool = False, + silent_list: list[Literal["llm_settings", "inputs", "outputs", "messages", "metadata"]] | None = None, + log_dir: str | None = None, + ) -> None: + """ + MyLLMクラスの初期化 + + Args: + model (Optional[str]): LLMモデル名 + parent (Optional[MyL3M2]): 親のMyL3M2のインスタンス (self or None) + llm_settings (LLMSettings): LLMの設定パラメータ + client_settings (ClientSettings): llmのclientの設定パラメータ + platform (Optional[str]): LLMのプラットフォーム名 (デフォルト: os.environ["PLATFORM"] or "azure") + (enum: openai, azure) + verbose (bool): 出力をするかどうかのフラグ + stream_verbose (bool): assitantをstreamで出力するか(verbose=False, message in "messages"の時、無効) + silent_list (list[Literal["llm_settings", "inputs", "outputs", "messages", "metadata"]]): + verbose=True時, 出力を抑制する要素のリスト + log_dir (Optional[str]): ログを保存するディレクトリのパス Noneの時、保存しない + """ + self.parent: _MyL3M2 | None = parent + self.llm_settings = llm_settings or DEFAULT_LLM_SETTINGS + self.client_settings = client_settings or {} + self.model: str = model + self.platform: str = platform or os.environ.get("LLM_PLATFORM", DEFAULT_PLATFORM) or DEFAULT_PLATFORM + self.verbose: bool = verbose & (True if self.parent is None else self.parent.verbose) # 親に合わせる + self.silent_set = set(silent_list or []) + self.stream_verbose: bool = stream_verbose if verbose and ("messages" not in self.silent_set) else False + self.log_dir: str | None = log_dir + + self.inputs: InputType | None = None + self.outputs: OutputType | None = None + self.messages: Messages | None = None + self.functions: Functions | None = None + self.tools: Tools | None = None + self.response: Response | None = None + self.called: bool = False + self.do_stream: bool = self.stream_verbose + + self.llm: AbstractLLM = get_llm( + model_name=self.model, platform=self.platform, client_settings=self.client_settings + ) + + @abstractmethod + def _preprocess(self, inputs: InputType) -> Messages: + """ + inputs を API入力 の messages に前処理する + + Args: + inputs (InputType): 入力 + + Returns: + Messages: API入力 の messages + >>> [{"role": "system", "content": "system_prompt"}, {"role": "user", "content": "user_prompt"}] + """ + + @abstractmethod + def _postprocess(self, response: Response) -> OutputType: + """ + API の response を outputs に後処理する + + Args: + response (Response): API の response + >>> {"choices": [{"message": {"role": "assistant", + >>> "content": "This is a test!"}}]} + >>> {"choices": [{"message": {"role": "assistant", + >>> "function_call": {"name": "func", "arguments": "{a: 1}"}}]} + + Returns: + OutputType: 出力 + """ + + def _ruleprocess(self, inputs: InputType) -> OutputType | None: + """ + ルールベース処理 or APIリクエスト の判断 + + Args: + inputs (InputType): MyLLMの入力 + + Returns: + RuleOutputs: + ルールベース処理の時、MyLLMの出力を返す + APIリクエストの時、Noneを返す + """ + return None + + def _update_settings(self) -> None: + """ + APIの設定の更新 + Note: + messageのトークン数 + >>> self.llm.count_tokens(self.messsage) + + モデル変更 + >>> self.model = "gpt-3.5-turbo-16k" + + パラメータ変更 + >>> self.llm_settings = {"temperature": 0.2} + """ + return None + + def _add_tools(self, inputs: InputType) -> Tools | None: + return None + + def _add_functions(self, inputs: InputType) -> Functions | None: + """ + functions の追加 + + Args: + inputs (InputType): 入力 + + Returns: + Functions | None: functions。追加しない場合None + https://json-schema.org/understanding-json-schema/reference/index.html + >>> { + >>> "name": "関数名", + >>> "description": "関数の動作の説明。GPTは説明を見て利用するか選ぶ", + >>> "parameters": { + >>> "type": "object", "properties": {"city_name": {"type": "string", "description": "都市名"}}, + >>> json-schema[https://json-schema.org/understanding-json-schema/reference/index.html] + >>> } + >>> } + """ + return None + + def _stream_postprocess( + self, + new_chunk: Chunk, + state: "_State", + ) -> StreamOutputType: + """call_streamのGeneratorのpostprocess + + Args: + new_chunk (OpenAIChunkResponse): 新しいchunk + state (dict[Any, Any]): 状態を持てるdict. 初めは、default {}. 状態が消えてしまうのでoverwriteしない。 + + Returns: + StreamOutputType: 一時的なoutput + """ + if len(new_chunk.choices) == 0: + return "" + return new_chunk.choices[0].delta.content + + def _generate(self, stream: bool) -> Generator[StreamOutputType, None, None]: + """ + LLMの出力を得て、`self.response`に格納する + + Args: + messages (list[dict[str, str]]): LLMの入力メッセージ + """ + # 例外処理 ----------------------------------------------------------- + if self.messages is None: + raise ValueError("MessagesがNoneです。") + + # kwargs ----------------------------------------------------------- + generate_kwargs = dict(**self.llm_settings) + if self.functions is not None: + generate_kwargs["functions"] = self.functions + if self.functions is not None: + generate_kwargs["tools"] = self.tools + + # generate ---------------------------------------------------------- + self._print_messages() # verbose + self.llm = get_llm(model_name=self.model, platform=self.platform, client_settings=self.client_settings) + # [stream] + if stream or self.stream_verbose: + it = self.llm.generate_stream(messages=self.messages, llm_settings=generate_kwargs) + chunk_list: list[Chunk] = [] + state: "_State" = {} + for chunk in it: + chunk_list.append(chunk) + self._print_delta(chunk=chunk) # verbose: stop→改行、conent, TODO: fc→出力 + yield self._stream_postprocess(new_chunk=chunk, state=state) + self.response = self.llm.convert_nonstream_response(chunk_list, self.messages, self.functions) + # [non-stream] + else: + try: + self.response = self.llm.generate(messages=self.messages, llm_settings=generate_kwargs) + self._print_message_assistant() + except Exception as e: + raise e + + # ContentFilterError ------------------------------------------------- + if len(self.response.choices) == 0: + cprint(self.response, color="red", background=True) + raise ContentFilterError("入力のコンテンツフィルターに引っかかりました。") + if self.response.choices[0].finish_reason == "content_filter": + cprint(self.response, color="red", background=True) + raise ContentFilterError("出力のコンテンツフィルターに引っかかりました。") + + def _call(self, inputs: InputType, stream: bool = False) -> Generator[StreamOutputType, None, OutputType]: + """ + LLMの処理を行う (preprocess, check_input, generate, postprocess) + + Args: + inputs (InputType): 入力データを保持する辞書 + + Returns: + OutputType: 処理結果の出力データ + + Raises: + RuntimeError: 既に呼び出されている場合に発生 + """ + if self.called: + raise RuntimeError("MyLLMは1回しか呼び出せない") + + self._print_start(sep="-") + + # main ----------------------------------------------------------- + t_start = time.time() + self.inputs = inputs + self._print_inputs() + rulebase_output = self._ruleprocess(inputs) + if rulebase_output is None: # API リクエストを送る場合 + self._update_settings() + self.messages = self._preprocess(inputs) + self.functions = self._add_functions(inputs) + self.tools = self._add_tools(inputs) + t_preprocessed = time.time() + # [generate] + it = self._generate(stream=stream) + for delta_content in it: # stream=Falseの時、空のGenerator + yield delta_content + if self.response is None: + raise ValueError("responseがNoneです。") + t_generated = time.time() + # [postprocess] + self.outputs = self._postprocess(self.response) + t_postprocessed = time.time() + else: # ルールベースの場合 + self.outputs = rulebase_output + t_preprocessed = t_generated = t_postprocessed = time.time() + self.time_detail = TimeInfo( + total=t_postprocessed - t_start, + preprocess=t_preprocessed - t_start, + main=t_generated - t_preprocessed, + postprocess=t_postprocessed - t_generated, + ) + self.time = t_postprocessed - t_start + + # print ----------------------------------------------------------- + self._print_outputs() + self._print_client_settings() + self._print_llm_settings() + self._print_metadata() + self._print_end(sep="-") + + # 親MyL3M2にAppend ----------------------------------------------------------- + if self.parent is not None: + self.parent.myllm_list.append(self) + self.called = True + + # log ----------------------------------------------------------- + self._save_log() + + return self.outputs + + @property + def log(self) -> dict[str, Any]: + return { + "inputs": self.inputs, + "outputs": self.outputs, + "resposnse": self.response.model_dump() if self.response is not None else None, + "input_token": self.token.input, + "output_token": self.token.output, + "total_token": self.token.total, + "input_price": self.price.input, + "output_price": self.price.output, + "total_price": self.price.total, + "time": self.time, + "time_stamp": time.time(), + "llm_settings": self.llm_settings, + "client_settings": self.client_settings, + "model": self.model, + "platform": self.platform, + "verbose": self.verbose, + "messages": self.messages, + "assistant_message": self.assistant_message, + "functions": self.functions, + "tools": self.tools, + } + + def _save_log(self) -> None: + if self.log_dir is None: + return + try: + log = self.log + json_string = dict2json(log) + + save_log_path = os.path.join(self.log_dir, f"{log['time_stamp']}.json") + os.makedirs(self.log_dir, exist_ok=True) + with open(save_log_path, mode="w") as f: + f.write(json_string) + except Exception as e: + cprint(e, color="red", background=True) + + @property + def token(self) -> TokenInfo: + if self.response is None or self.response.usage is None: + return TokenInfo(input=0, output=0, total=0) + return TokenInfo( + input=self.response.usage.prompt_tokens, + output=self.response.usage.completion_tokens, + total=self.response.usage.total_tokens, + ) + + @property + def custom_token(self) -> TokenInfo | None: + if not self.llm._custom_price_calculation: + return None + if self.response is None: + return TokenInfo(input=0, output=0, total=0) + usage_for_price = getattr(self.response, "usage_for_price", None) + if not isinstance(usage_for_price, CompletionUsageForCustomPriceCalculation): + cprint("usage_for_priceがNoneです。正しくトークン計算できません", color="red", background=True) + return TokenInfo(input=0, output=0, total=0) + return TokenInfo( + input=usage_for_price.prompt_tokens, + output=usage_for_price.completion_tokens, + total=usage_for_price.total_tokens, + ) + + @property + def price(self) -> PriceInfo: + if self.response is None: + return PriceInfo(input=0.0, output=0.0, total=0.0) + if self.llm._custom_price_calculation: + # Geniniの時は必ずcustom_tokenがある想定 + if self.custom_token is None: + cprint("custom_tokenがNoneです。正しくトークン計算できません", color="red", background=True) + else: + return PriceInfo( + input=self.llm.calculate_price(num_input_tokens=self.custom_token.input), + output=self.llm.calculate_price(num_output_tokens=self.custom_token.output), + total=self.llm.calculate_price( + num_input_tokens=self.custom_token.input, num_output_tokens=self.custom_token.output + ), + ) + return PriceInfo( + input=self.llm.calculate_price(num_input_tokens=self.token.input), + output=self.llm.calculate_price(num_output_tokens=self.token.output), + total=self.llm.calculate_price(num_input_tokens=self.token.input, num_output_tokens=self.token.output), + ) + + @property + def assistant_message(self) -> Message | None: + if self.response is None or len(self.response.choices) == 0: + return None + return self.response.choices[0].message.to_typeddict_message() + + @property + def chat_history(self) -> Messages: + chat_history: Messages = [] + if self.messages: + chat_history += self.messages + if self.assistant_message is not None: + chat_history.append(self.assistant_message) + return chat_history + + def _print_llm_settings(self) -> None: + if not ("llm_settings" not in self.silent_set and self.verbose): + return + print_llm_settings( + llm_settings=self.llm_settings, + model=self.model, + platform=self.platform, + engine=self.llm.engine if isinstance(self.llm, AzureLLM) else None, + ) + + def _print_messages(self) -> None: + if not ("messages" not in self.silent_set and self.verbose): + return + print_messages(self.messages, title=True) + + def _print_message_assistant(self) -> None: + if self.response is None or len(self.response.choices) == 0: + return + if not ("messages" not in self.silent_set and self.verbose): + return + print_messages(messages=[self.response.choices[0].message], title=False) + + def _print_delta(self, chunk: Chunk) -> None: + if not ("messages" not in self.silent_set and self.verbose): + return + print_delta(chunk) + + def _print_client_settings(self) -> None: + if not ("client_settings" not in self.silent_set and self.verbose): + return + print_client_settings(self.llm.client_settings) + + def __repr__(self) -> str: + return f"MyLLM({self.__class__.__name__})" diff --git a/neollm/myllm/print_utils.py b/neollm/myllm/print_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c19e6054886a83c07389ffe9c5231621e43d2d04 --- /dev/null +++ b/neollm/myllm/print_utils.py @@ -0,0 +1,235 @@ +import json +from typing import Any + +from openai.types.chat import ChatCompletionAssistantMessageParam +from openai.types.chat.chat_completion_assistant_message_param import FunctionCall +from openai.types.chat.chat_completion_message_tool_call_param import ( + ChatCompletionMessageToolCallParam, + Function, +) + +from neollm.types import ( + ChatCompletionMessage, + Chunk, + ClientSettings, + InputType, + LLMSettings, + Message, + Messages, + OutputType, + PriceInfo, + PrintColor, + Role, + TokenInfo, +) +from neollm.utils.postprocess import json2dict +from neollm.utils.utils import CPrintParam, cprint + +TITLE_COLOR: PrintColor = "blue" +YEN_PAR_DOLLAR: float = 140.0 # 150円になってしまったぴえん(231027) + + +def _ChatCompletionMessage2dict(message: ChatCompletionMessage) -> Message: + message_dict = ChatCompletionAssistantMessageParam(content=message.content, role=message.role) + if message.function_call is not None: + message_dict["function_call"] = FunctionCall( + arguments=message.function_call.arguments, name=message.function_call.name + ) + if message.tool_calls is not None: + message_dict["tool_calls"] = [ + ChatCompletionMessageToolCallParam( + id=tool_call.id, + function=Function(arguments=tool_call.function.arguments, name=tool_call.function.name), + type=tool_call.type, + ) + for tool_call in message.tool_calls + ] + return message_dict + + +def _get_tool_calls(message_dict: Message) -> list[ChatCompletionMessageToolCallParam]: + tool_calls: list[ChatCompletionMessageToolCallParam] = [] + if "tool_calls" in message_dict: + _tool_calls = message_dict.get("tool_calls", None) + if _tool_calls is not None and isinstance(_tool_calls, list): # isinstance(_tool_calls, list)ないと通らん,,, + for _tool_call in _tool_calls: + tool_call = ChatCompletionMessageToolCallParam( + id=_tool_call["id"], + function=Function( + arguments=_tool_call["function"]["arguments"], + name=_tool_call["function"]["name"], + ), + type=_tool_call["type"], + ) + tool_calls.append(tool_call) + if "function_call" in message_dict: + function_call = message_dict.get("function_call", None) + if function_call is not None and isinstance( + function_call, dict + ): # isinstance(function_call, dict)ないと通らん,,, + tool_calls.append( + ChatCompletionMessageToolCallParam( + id="", + function=Function( + arguments=function_call["arguments"], + name=function_call["name"], + ), + type="function", + ) + ) + return tool_calls + + +def print_metadata(time: float, token: TokenInfo, price: PriceInfo) -> None: + try: + cprint("[metadata]", color=TITLE_COLOR, kwargs={"end": " "}) + print( + f"{time:.1f}s; " + f"{token.total:,}({token.input:,}+{token.output:,})tokens; " + f"${price.total:.2g}; ¥{price.total*YEN_PAR_DOLLAR:.2g}" + ) + except Exception as e: + cprint(e, color="red", background=True) + + +def print_inputs(inputs: InputType) -> None: + try: + cprint("[inputs]", color=TITLE_COLOR) + print(json.dumps(_arange_dumpable_object(inputs), indent=2, ensure_ascii=False)) + except Exception as e: + cprint(e, color="red", background=True) + + +def print_outputs(outputs: OutputType) -> None: + try: + cprint("[outputs]", color=TITLE_COLOR) + print(json.dumps(_arange_dumpable_object(outputs), indent=2, ensure_ascii=False)) + except Exception as e: + cprint(e, color="red", background=True) + + +def print_messages(messages: list[ChatCompletionMessage] | Messages | None, title: bool = True) -> None: + if messages is None: + cprint("Not yet running _preprocess", color="red") + return + # try: + if title: + cprint("[messages]", color=TITLE_COLOR) + role2prarams: dict[Role, CPrintParam] = { + "system": {"color": "green"}, + "user": {"color": "green"}, + "assistant": {"color": "green"}, + "function": {"color": "green", "background": True}, + "tool": {"color": "green", "background": True}, + } + for message in messages: + message_dict: Message + if isinstance(message, ChatCompletionMessage): + message_dict = _ChatCompletionMessage2dict(message) + else: + message_dict = message + + # roleの出力 ---------------------------------------- + print(" ", end="") + role = message_dict["role"] + cprint(role, **role2prarams[role]) + + # contentの出力 ---------------------------------------- + content = message_dict.get("content", None) + if isinstance(content, str): + print(" " + content.replace("\n", "\n ")) + elif isinstance(content, list): + for content_part in content: + if content_part["type"] == "text": + print(" " + content_part["text"].replace("\n", "\n ")) + elif content_part["type"] == "image_url": + cprint(" ", color="green", kwargs={"end": " "}) + print(content_part["image_url"]) + # TODO: 画像出力 + # TODO: Preview用、content_part["image"]: str, dict両方いけてしまう + else: + # TODO: 未対応のcontentの出力 + pass + + # tool_callの出力 ---------------------------------------- + for tool_call in _get_tool_calls(message_dict): + print(" ", end="") + cprint(tool_call["function"]["name"], color="green", background=True) + print(" " + str(json2dict(tool_call["function"]["arguments"], error_key=None)).replace("\n", "\n ")) + + # except Exception as e: + # cprint(e, color="red", background=True) + + +def print_delta(chunk: Chunk) -> None: + if len(chunk.choices) == 0: + return + choice = chunk.choices[0] # TODO: n>2の対応 + if choice.delta.role is not None: + print(" ", end="") + cprint(choice.delta.role, color="green") + print(" ", end="") + if choice.delta.content is not None: + print(choice.delta.content.replace("\n", "\n "), end="") + if choice.delta.function_call is not None: + if choice.delta.function_call.name is not None: + cprint(choice.delta.function_call.name, color="green", background=True) + print(" ", end="") + if choice.delta.function_call.arguments is not None: + print(choice.delta.function_call.arguments.replace("\n", "\n "), end="") + if choice.delta.tool_calls is not None: + for tool_call in choice.delta.tool_calls: + if tool_call.function is not None: + if tool_call.function.name is not None: + if tool_call.index != 0: + print("\n ", end="") + cprint(tool_call.function.name, color="green", background=True) + print(" ", end="") + if tool_call.function.arguments is not None: + print(tool_call.function.arguments.replace("\n", "\n "), end="") + if choice.finish_reason is not None: + print() + + +def print_llm_settings(llm_settings: LLMSettings, model: str, engine: str | None, platform: str) -> None: + try: + cprint("[llm_settings]", color=TITLE_COLOR, kwargs={"end": " "}) + llm_settings_copy = dict(platform=platform, **llm_settings) + llm_settings_copy["model"] = model + # Azureの場合 + if platform == "azure": + llm_settings_copy["engine"] = engine # engineを追加 + print(llm_settings_copy or "-") + except Exception as e: + cprint(e, color="red", background=True) + + +def print_client_settings(client_settings: ClientSettings) -> None: + try: + cprint("[client_settings]", color=TITLE_COLOR, kwargs={"end": " "}) + print(client_settings or "-") + except Exception as e: + cprint(e, color="red", background=True) + + +# ------- + +_DumplableEntity = int | float | str | bool | None | list[Any] | dict[Any, Any] +DumplableType = _DumplableEntity | list["DumplableType"] | dict["DumplableType", "DumplableType"] + + +def _arange_dumpable_object(obj: Any) -> DumplableType: + # 基本データ型の場合、そのまま返す + if isinstance(obj, (int, float, str, bool, type(None))): + return obj + + # リストの場合、再帰的に各要素を変換 + if isinstance(obj, list): + return [_arange_dumpable_object(item) for item in obj] + + # 辞書の場合、再帰的に各キーと値を変換 + if isinstance(obj, dict): + return {_arange_dumpable_object(key): _arange_dumpable_object(value) for key, value in obj.items()} + + # それ以外の型の場合、型情報を含めて文字列に変換 + return f"<{type(obj).__name__}>{str(obj)}" diff --git a/neollm/types/__init__.py b/neollm/types/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f96e71d9e6cafc8794729a4ea471074693a3fe72 --- /dev/null +++ b/neollm/types/__init__.py @@ -0,0 +1,4 @@ +from neollm.types.info import * # NOQA +from neollm.types.mytypes import * # NOQA +from neollm.types.openai.chat_completion import * # NOQA +from neollm.types.openai.chat_completion_chunk import * # NOQA diff --git a/neollm/types/_model.py b/neollm/types/_model.py new file mode 100644 index 0000000000000000000000000000000000000000..3070da8c68474c7b398db77b2147a8dd005b7ea3 --- /dev/null +++ b/neollm/types/_model.py @@ -0,0 +1,8 @@ +from typing import Any + +from openai._models import BaseModel + + +class DictableBaseModel(BaseModel): # openaiのBaseModelをDictAccessできるようにした + def __getitem__(self, item: str) -> Any: + return getattr(self, item) diff --git a/neollm/types/info.py b/neollm/types/info.py new file mode 100644 index 0000000000000000000000000000000000000000..3ffeba19a4139af2d08702ef2167b99315dd8e7c --- /dev/null +++ b/neollm/types/info.py @@ -0,0 +1,82 @@ +from typing import Literal + +from neollm.types._model import DictableBaseModel + +PrintColor = Literal["black", "red", "green", "yellow", "blue", "magenta", "cyan", "white"] + + +class TimeInfo(DictableBaseModel): + total: float = 0.0 + """処理時間合計 preprocess + main + postprocess""" + preprocess: float = 0.0 + """前処理時間""" + main: float = 0.0 + """メイン処理時間""" + postprocess: float = 0.0 + """後処理時間""" + + def __repr__(self) -> str: + return ( + f"TimeInfo(total={self.total:.3f}, preprocess={self.preprocess:.3f}, main={self.main:.3f}, " + f"postprocess={self.postprocess:.3f})" + ) + + +class TokenInfo(DictableBaseModel): + input: int + """入力部分のトークン数""" + output: int + """出力部分のトークン数""" + total: int + """合計トークン数""" + + def __add__(self, other: "TokenInfo") -> "TokenInfo": + if not isinstance(other, TokenInfo): + raise TypeError(f"{other} is not TokenInfo") + return TokenInfo( + input=self.input + other.input, output=self.output + other.output, total=self.total + other.total + ) + + def __iadd__(self, other: "TokenInfo") -> "TokenInfo": + if not isinstance(other, TokenInfo): + raise TypeError(f"{other} is not TokenInfo") + self.input += other.input + self.output += other.output + self.total += other.total + return self + + +class PriceInfo(DictableBaseModel): + input: float + """入力部分の費用 (USD)""" + output: float + """出力部分の費用 (USD)""" + total: float + """合計費用 (USD)""" + + def __add__(self, other: "PriceInfo") -> "PriceInfo": + if not isinstance(other, PriceInfo): + raise TypeError(f"{other} is not PriceInfo") + return PriceInfo( + input=self.input + other.input, output=self.output + other.output, total=self.total + other.total + ) + + def __iadd__(self, other: "PriceInfo") -> "PriceInfo": + if not isinstance(other, PriceInfo): + raise TypeError(f"{other} is not PriceInfo") + self.input += other.input + self.output += other.output + self.total += other.total + return self + + def __repr__(self) -> str: + return f"PriceInfo(input={self.input:.3f}, output={self.output:.3f}, total={self.total:.3f})" + + +class APIPricing(DictableBaseModel): + """APIの価格設定に関する情報を表すクラス。""" + + input: float + """入力 1k tokens 当たりのAPI利用料 (USD)""" + output: float + """出力 1k tokens 当たりのAPI利用料 (USD)""" diff --git a/neollm/types/mytypes.py b/neollm/types/mytypes.py new file mode 100644 index 0000000000000000000000000000000000000000..2348f3b642ba057ac73c67707cda7c398c7d16ff --- /dev/null +++ b/neollm/types/mytypes.py @@ -0,0 +1,31 @@ +from typing import Any, Iterator, Literal, TypeVar + +import openai.types.chat as openai_types +from openai._streaming import Stream + +from neollm.types.openai.chat_completion import ChatCompletion +from neollm.types.openai.chat_completion_chunk import ChatCompletionChunk + +Role = Literal["system", "user", "assistant", "tool", "function"] +# Settings +LLMSettings = dict[str, Any] +ClientSettings = dict[str, Any] +# Message +Message = openai_types.ChatCompletionMessageParam +Messages = list[Message] +Tools = Any +Functions = Any +# Response +Response = ChatCompletion +Chunk = ChatCompletionChunk +StreamResponse = Iterator[Chunk] +# IO +InputType = TypeVar("InputType") +OutputType = TypeVar("OutputType") +StreamOutputType = Any + +# OpenAI -------------------------------------------- +OpenAIResponse = openai_types.ChatCompletion +OpenAIChunk = openai_types.ChatCompletionChunk +OpenAIStreamResponse = Stream[OpenAIChunk] # OpneAI StreamResponse +OpenAIMessages = list[openai_types.ChatCompletionMessageParam] diff --git a/neollm/types/openai/__init__.py b/neollm/types/openai/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9d18b3d0bac608c5501f5480f1863c14112f3218 --- /dev/null +++ b/neollm/types/openai/__init__.py @@ -0,0 +1,2 @@ +from neollm.types.openai.chat_completion import * # NOQA +from neollm.types.openai.chat_completion_chunk import * # NOQA diff --git a/neollm/types/openai/chat_completion.py b/neollm/types/openai/chat_completion.py new file mode 100644 index 0000000000000000000000000000000000000000..5151e71bf6a00eaca607eabc8e827e5956a3f2f6 --- /dev/null +++ b/neollm/types/openai/chat_completion.py @@ -0,0 +1,170 @@ +from typing import List, Literal, Optional + +from openai.types.chat import ChatCompletionAssistantMessageParam +from openai.types.chat.chat_completion_assistant_message_param import ( + FunctionCall as FunctionCallParams, +) +from openai.types.chat.chat_completion_message_tool_call_param import ( + ChatCompletionMessageToolCallParam, +) +from openai.types.chat.chat_completion_message_tool_call_param import ( + Function as FunctionParams, +) +from pydantic import field_validator + +from neollm.types._model import DictableBaseModel + + +class CompletionUsage(DictableBaseModel): + completion_tokens: int + """Number of tokens in the generated completion.""" + + prompt_tokens: int + """Number of tokens in the prompt.""" + + total_tokens: int + """Total number of tokens used in the request (prompt + completion).""" + + # ADDED: gpt4v preview用(Noneを許容するため) + @field_validator("completion_tokens", "prompt_tokens", "total_tokens", mode="before") + def validate_name(cls, v: int | None) -> int: + return v or 0 + + +class CompletionUsageForCustomPriceCalculation(DictableBaseModel): + completion_tokens: int + """Number of tokens in the generated completion.""" + + prompt_tokens: int + """Number of tokens in the prompt.""" + + total_tokens: int + """Total number of tokens used in the request (prompt + completion).""" + + # ADDED: gpt4v preview用(Noneを許容するため) + @field_validator("completion_tokens", "prompt_tokens", "total_tokens", mode="before") + def validate_name(cls, v: int | None) -> int: + return v or 0 + + +class Function(DictableBaseModel): + arguments: str + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: str + """The name of the function to call.""" + + +class ChatCompletionMessageToolCall(DictableBaseModel): + id: str + """The ID of the tool call.""" + + function: Function + """The function that the model called.""" + + type: Literal["function"] + """The type of the tool. Currently, only `function` is supported.""" + + +class FunctionCall(DictableBaseModel): + arguments: str + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: str + """The name of the function to call.""" + + +class ChatCompletionMessage(DictableBaseModel): + content: Optional[str] + """The contents of the message.""" + + role: Literal["assistant"] + """The role of the author of this message.""" + + function_call: Optional[FunctionCall] = None + """Deprecated and replaced by `tool_calls`. + + The name and arguments of a function that should be called, as generated by the + model. + """ + + tool_calls: Optional[List[ChatCompletionMessageToolCall]] = None + """The tool calls generated by the model, such as function calls.""" + + def to_typeddict_message(self) -> ChatCompletionAssistantMessageParam: + message_dict = ChatCompletionAssistantMessageParam(role=self.role, content=self.content) + if self.function_call is not None: + message_dict["function_call"] = FunctionCallParams( + arguments=self.function_call.arguments, name=self.function_call.name + ) + if self.tool_calls is not None: + message_dict["tool_calls"] = [ + ChatCompletionMessageToolCallParam( + id=tool_call.id, + function=FunctionParams(arguments=tool_call.function.arguments, name=tool_call.function.name), + type=tool_call.type, + ) + for tool_call in self.tool_calls + ] + return message_dict + + +FinishReason = Literal["stop", "length", "tool_calls", "content_filter", "function_call"] + + +class Choice(DictableBaseModel): + finish_reason: FinishReason | None = None # ADDED: gpt4v preview用 + """The reason the model stopped generating tokens. + + This will be `stop` if the model hit a natural stop point or a provided stop + sequence, `length` if the maximum number of tokens specified in the request was + reached, `content_filter` if content was omitted due to a flag from our content + filters, `tool_calls` if the model called a tool, or `function_call` + (deprecated) if the model called a function. + """ + + index: int + """The index of the choice in the list of choices.""" + + message: ChatCompletionMessage + """A chat completion message generated by the model.""" + + +class ChatCompletion(DictableBaseModel): + id: str + """A unique identifier for the chat completion.""" + + choices: List[Choice] + """A list of chat completion choices. + + Can be more than one if `n` is greater than 1. + """ + + created: int + """The Unix timestamp (in seconds) of when the chat completion was created.""" + + model: str + """The model used for the chat completion.""" + + object: Literal["chat.completion"] | str + """The object type, which is always `chat.completion`.""" + + system_fingerprint: Optional[str] = None + """This fingerprint represents the backend configuration that the model runs with. + + Can be used in conjunction with the `seed` request parameter to understand when + backend changes have been made that might impact determinism. + """ + + usage: Optional[CompletionUsage] = None + """Usage statistics for the completion request.""" diff --git a/neollm/types/openai/chat_completion_chunk.py b/neollm/types/openai/chat_completion_chunk.py new file mode 100644 index 0000000000000000000000000000000000000000..e98020830a46a2a3ac1a0152332bbf8eddcf22a6 --- /dev/null +++ b/neollm/types/openai/chat_completion_chunk.py @@ -0,0 +1,109 @@ +from typing import List, Literal, Optional + +from pydantic import field_validator + +from neollm.types._model import DictableBaseModel +from neollm.utils.utils import cprint + + +class ChoiceDeltaFunctionCall(DictableBaseModel): + arguments: Optional[str] = None + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: Optional[str] = None + """The name of the function to call.""" + + +class ChoiceDeltaToolCallFunction(DictableBaseModel): + arguments: Optional[str] = None + """ + The arguments to call the function with, as generated by the model in JSON + format. Note that the model does not always generate valid JSON, and may + hallucinate parameters not defined by your function schema. Validate the + arguments in your code before calling your function. + """ + + name: Optional[str] = None + """The name of the function to call.""" + + +class ChoiceDeltaToolCall(DictableBaseModel): + index: int + + id: Optional[str] = None + """The ID of the tool call.""" + + function: Optional[ChoiceDeltaToolCallFunction] = None + + type: Optional[Literal["function"]] = None + """The type of the tool. Currently, only `function` is supported.""" + + +class ChoiceDelta(DictableBaseModel): + content: Optional[str] = None + """The contents of the chunk message.""" + + function_call: Optional[ChoiceDeltaFunctionCall] = None + """Deprecated and replaced by `tool_calls`. + + The name and arguments of a function that should be called, as generated by the + model. + """ + + role: Optional[Literal["system", "user", "assistant", "tool"]] = None + """The role of the author of this message.""" + + tool_calls: Optional[List[ChoiceDeltaToolCall]] = None + + +class ChunkChoice(DictableBaseModel): # chat_completionと同名なため、改名(Choice->ChunkChoice) + delta: ChoiceDelta + """A chat completion delta generated by streamed model responses.""" + + finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter", "function_call"]] + """The reason the model stopped generating tokens. + + This will be `stop` if the model hit a natural stop point or a provided stop + sequence, `length` if the maximum number of tokens specified in the request was + reached, `content_filter` if content was omitted due to a flag from our content + filters, `tool_calls` if the model called a tool, or `function_call` + (deprecated) if the model called a function. + """ + + index: int + """The index of the choice in the list of choices.""" + + +class ChatCompletionChunk(DictableBaseModel): + id: str + """A unique identifier for the chat completion. Each chunk has the same ID.""" + + choices: List[ChunkChoice] + """A list of chat completion choices. + + Can be more than one if `n` is greater than 1. + """ + + created: int + """The Unix timestamp (in seconds) of when the chat completion was created. + + Each chunk has the same timestamp. + """ + + model: str + """The model to generate the completion.""" + + object: Literal["chat.completion.chunk"] = "chat.completion.chunk" # for azure + """The object type, which is always `chat.completion.chunk`.""" + + # ADDED: azure用 (""を許容するため) + @field_validator("object", mode="before") + def validate_name(cls, v: str) -> Literal["chat.completion.chunk"]: + if v != "" and v != "chat.completion.chunk": + cprint(f"ChatCompletionChunk.object is not 'chat.completion.chunk': {v}", "yellow") + return "chat.completion.chunk" diff --git a/neollm/utils/inference.py b/neollm/utils/inference.py new file mode 100644 index 0000000000000000000000000000000000000000..490793397f55a27c72e3f26fa0798b2784aee8e8 --- /dev/null +++ b/neollm/utils/inference.py @@ -0,0 +1,70 @@ +import csv +import glob +import json +from concurrent.futures import Future, ThreadPoolExecutor +from typing import Any, Callable, TypeVar + +_T = TypeVar("_T") + + +def execute_parallel(func: Callable[..., _T], kwargs_list: list[dict[str, Any]], max_workers: int) -> list[_T]: + """並行処理を行う + + Args: + func (Callable): 並行処理したい関数 + kwargs_list (list[dict[str, Any]]): 関数の引数(dict型)のリスト + max_workers (int): 並行処理数 + + Returns: + list[Any]: 関数の戻り値のリスト + """ + response_list: list[Future[_T]] = [] + with ThreadPoolExecutor(max_workers=max_workers) as e: + for kwargs in kwargs_list: + response: Future[_T] = e.submit(func, **kwargs) + response_list.append(response) + return [r.result() for r in response_list] + + +def _load_json_file(file_path: str) -> Any: + # TODO: Docstring追加 + with open(file_path, "r", encoding="utf-8") as json_file: + data = json.load(json_file) + return data + + +def make_log_csv(log_dir: str, csv_file_name: str = "log.csv") -> None: + """ログデータのcsvを保存 + + Args: + log_dir (str): ログデータが保存されているディレクトリ + csv_file_name (str, optional): 保存するcsvファイル名. Defaults to "log.csv". + """ + # ディレクトリ内のJSONファイルのリストを取得 + # TODO: エラーキャッチ + json_files = sorted([f for f in glob.glob(f"{log_dir}/*.json")], key=lambda x: int(x.split("/")[-1].split(".")[0])) + + # すべてのJSONファイルからユニークなキーを取得 + columns = [] + data_list: list[dict[Any, Any]] = [] + keys_set = set() + for json_file in json_files: + data = _load_json_file(json_file) + if isinstance(data, dict): + for key in data.keys(): + if key not in keys_set: + keys_set.add(key) + columns.append(key) + data_list.append(data) + + # CSVファイルを作成し、ヘッダーを書き込む + with open(csv_file_name, "w", encoding="utf-8", newline="") as csv_file: + writer = csv.writer(csv_file) + writer.writerow(columns) + + # JSONファイルからデータを読み取り、CSVファイルに書き込む + for data in data_list: + row = [data.get(key, "") for key in columns] + writer.writerow(row) + + print(f"saved csv file: {csv_file_name}") diff --git a/neollm/utils/postprocess.py b/neollm/utils/postprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..b9653b00042da5754120aa6fdf5c9ff699538bb8 --- /dev/null +++ b/neollm/utils/postprocess.py @@ -0,0 +1,120 @@ +import json +from typing import Any, overload + + +# string --------------------------------------- +def _extract_string(text: str, start_string: str | None = None, end_string: str | None = None) -> str: + """ + テキストから必要な文字列を抽出する + + Args: + text (str): 抽出するテキスト + + Returns: + str: 抽出された必要な文字列 + """ + # 最初の文字 + if start_string is not None and start_string in text: + idx_head = text.index(start_string) + text = text[idx_head:] + # 最後の文字 + if end_string is not None and end_string in text: + idx_tail = len(text) - text[::-1].index(end_string[::-1]) + text = text[:idx_tail] + return text + + +def _delete_first_chapter_tag(text: str, first_character_tag: str | list[str]) -> str: + """_summary_ + + Args: + text (str): テキスト + first_character_tag (str | list[str]): 最初にある余分な文字列 + + Returns: + str: 除去済みのテキスト + """ + # first_character_tagのlist化 + if isinstance(first_character_tag, str): + first_character_tag = [first_character_tag] + # 最初のチャプタータグの消去 + for first_character_i in first_character_tag: + if text.startswith(first_character_i): + text = text[len(first_character_i) :] + break + return text.strip() + + +def strip_string( + text: str, + first_character: str | list[str] = ["", ""], + start_string: str | None = None, + end_string: str | None = None, + strip_quotes: str | list[str] = ["'", '"'], +) -> str: + """stringの前後の余分な文字を削除する + + Args: + text (str): ChatGPTの出力文字列 + first_character (str, optional): 出力の先頭につく文字 Defaults to ["", ""]. + start_string (str, optional): 出力の先頭につく文字 Defaults to None. + end_string (str, optional): 出力の先頭につく文字 Defaults to None. + strip_quotes (str, optional): 前後の余分な'"を消す. Defaults to ["'", '"']. + + Returns: + str: 余分な文字列を消去した文字列 + + Examples: + >>> strip_string("'''ChatGPT is smart!'''", "") + ChatGPT is smart! + >>> strip_string('{"a": 1}', start_string="{", end_string="}") + {"a": 1} + >>> strip_string(" `neoAI`", strip_quotes="`") + neoAI + """ + # 余分な文字列消去 + text = _delete_first_chapter_tag(text, first_character) + # 前後の'" を消す + if isinstance(strip_quotes, str): + strip_quotes = [strip_quotes] + for quote in strip_quotes: + text = text.strip(quote).strip() + text = _extract_string(text, start_string, end_string) + return text.strip() + + +# dict --------------------------------------- + + +@overload +def json2dict(json_string: str, error_key: None) -> dict[Any, Any] | str: ... + + +@overload +def json2dict(json_string: str, error_key: str) -> dict[Any, Any]: ... + + +def json2dict(json_string: str, error_key: str | None = "error") -> dict[Any, Any] | str: + """ + JSON文字列をPython dictに変換する + + Args: + json_string (str): 変換するJSON文字列 + error_key (str, optional): エラーキーの値として代入する文字列. Defaults to "error". + + Returns: + dict: 変換されたPython dict + """ + try: + python_dict = json.loads(_extract_string(json_string, start_string="{", end_string="}"), strict=False) + except ValueError: + if error_key is None: + return json_string + python_dict = {error_key: json_string} + if isinstance(python_dict, dict): + return python_dict + return {error_key: python_dict} + + +# calender +# YYYY年MM月YY日 -> YYYY-MM-DD diff --git a/neollm/utils/preprocess.py b/neollm/utils/preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..5a01ad6a6cda9f89b98b5f090c3aeb647b7cd318 --- /dev/null +++ b/neollm/utils/preprocess.py @@ -0,0 +1,107 @@ +import json +import re +from typing import Any, Callable + + +# dict2json -------------------------------- +def dict2json(python_dict: dict[str, Any]) -> str: + """ + Python dictをJSON文字列に変換する + + Args: + python_dict (dict): 変換するPython dict + + Returns: + str: 変換されたJSON文字列 + """ + # ensure_ascii: 日本語とかを出力するため + json_string = json.dumps(python_dict, indent=2, ensure_ascii=False) + return json_string + + +# optimize token -------------------------------- +def optimize_token(text: str, funcs: list[Callable[[str], str]] | None = None) -> str: + """ + テキストのトークンを最適化をする + + Args: + text (str): 最適化するテキスト + + Returns: + str: 最適化されたテキスト + """ + funcs = funcs or [minimize_newline, zenkaku_to_hankaku, remove_trailing_spaces] + for func in funcs: + text = func(text) + return text.strip() + + +def _replace_consecutive(text: str, pattern: str, replacing_text: str) -> str: + """ + テキスト内の連続するパターンに対して、指定された置換テキストで置換する + + Args: + text (str): テキスト + pattern (str): 置換するパターン + replacing_text (str): 置換テキスト + + Returns: + str: 置換されたテキスト + """ + p = re.compile(pattern) + matches = [(m.start(), m.end()) for m in p.finditer(text)][::-1] + + text_replaced = list(text) + + for i_start, i_end in matches: + text_replaced[i_start:i_end] = [replacing_text] + return "".join(text_replaced) + + +def minimize_newline(text: str) -> str: + """ + テキスト内の連続する改行を2以下にする + + Args: + text (str): テキスト + + Returns: + str: 改行を最小限にしたテキスト + """ + return _replace_consecutive(text, pattern="\n{2,}", replacing_text="\n\n") + + +def zenkaku_to_hankaku(text: str) -> str: + """ + テキスト内の全角文字を半角文字に変換する + + Args: + text (str): テキスト + + Returns: + str: 半角文字に変換されたテキスト + """ + mapping_dict = {" ": " ", ":": ": ", "‎": " ", ".": "。", ",": "、", "¥": "¥"} + hankaku_text = "" + for char in text: + # A-Za-z0-9!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~ + if char in mapping_dict: + hankaku_text += mapping_dict[char] + elif 65281 <= ord(char) <= 65374: + hankaku_text += chr(ord(char) - 65248) + else: + hankaku_text += char + return hankaku_text + + +def remove_trailing_spaces(text: str) -> str: + """ + テキスト内の各行の末尾のスペースを削除する + + Args: + text (str): テキスト + + Returns: + str: スペースを削除したテキスト + """ + return "\n".join([line.rstrip() for line in text.split("\n")]) diff --git a/neollm/utils/prompt_checker.py b/neollm/utils/prompt_checker.py new file mode 100644 index 0000000000000000000000000000000000000000..437f40bfecee7b7e593a7b49d2375d6697ac4cb3 --- /dev/null +++ b/neollm/utils/prompt_checker.py @@ -0,0 +1,110 @@ +from __future__ import annotations + +from typing import Any + +from typing_extensions import TypedDict + +from neollm import MyL3M2, MyLLM +from neollm.types import LLMSettings, Messages, Response + +_MyLLM = MyLLM[Any, Any] +_MyL3M2 = MyL3M2[Any, Any] + + +class PromptCheckerInput(TypedDict): + myllm: _MyLLM | _MyL3M2 + model: str + platform: str + llm_settings: LLMSettings | None + + +class APromptCheckerInput(TypedDict): + myllm: _MyLLM + + +class APromptChecker(MyLLM[APromptCheckerInput, str]): + def _preprocess(self, inputs: APromptCheckerInput) -> Messages: + system_prompt = ( + "あなたは、AIへの指示(プロンプト)をより良くすることが仕事です。\n" + "あなたは言語能力が非常に高く、仕事も丁寧なので小さなミスも気づくことができる天才です。" + "誤字脱字・論理的でない点・指示が不明確な点を箇条書きで指摘し、より良いプロンプトを提案してください。\n" + "# 出力例: \n" + "[指示の誤字脱字/文法ミス]\n" + "- ...\n" + "- ...\n" + "[指示が論理的でない点]\n" + "- ...\n" + "- ...\n" + "[指示が不明確な点]\n" + "- ...\n" + "- ...\n" + "[その他気になる点]\n" + "- ...\n" + "- ...\n" + "[提案]\n" + "- ...\n" + "- ...\n" + ) + if inputs["myllm"].messages is None: + return [] + user_prompt = "# プロンプト\n" + "\n".join( + # [f"<{message['role']}>\n{message['content']}\n" for message in inputs.messages] + [str(message) for message in inputs["myllm"].messages] + ) + messages: Messages = [ + {"role": "system", "content": system_prompt}, + {"role": "user", "content": user_prompt}, + ] + return messages + + def _postprocess(self, response: Response) -> str: + if response.choices[0].message.content is None: + return "contentがないンゴ" + return response.choices[0].message.content + + def _ruleprocess(self, inputs: APromptCheckerInput) -> str | None: + if inputs["myllm"].messages is None: + return "ruleprocessが走って、リクエストしてないよ!" + return None + + def __call__(self, inputs: APromptCheckerInput) -> str: + outputs: str = super().__call__(inputs) + return outputs + + +class PromptsChecker(MyL3M2[PromptCheckerInput, None]): + def _link(self, inputs: PromptCheckerInput) -> None: + if isinstance(inputs["myllm"], MyL3M2): + for myllm in inputs["myllm"].myllm_list: + prompts_checker = PromptsChecker(parent=self, verbose=True) + prompts_checker( + inputs={ + "myllm": myllm, + "model": inputs["model"], + "platform": inputs["platform"], + "llm_settings": inputs["llm_settings"], + } + ) + elif isinstance(inputs["myllm"], MyLLM): + a_prompt_checker = APromptChecker( + parent=self, + llm_settings=inputs["llm_settings"], + verbose=True, + platform=inputs["platform"], + model=inputs["model"], + ) + a_prompt_checker(inputs={"myllm": inputs["myllm"]}) + + def __call__(self, inputs: PromptCheckerInput) -> None: + super().__call__(inputs) + + +def check_prompt( + myllm: _MyLLM | _MyL3M2, + llm_settings: LLMSettings | None = None, + model: str = "gpt-3.5-turbo", + platform: str = "openai", +) -> MyL3M2[Any, Any]: + prompt_checker_2 = PromptsChecker(verbose=True) + prompt_checker_2(inputs={"myllm": myllm, "llm_settings": llm_settings, "model": model, "platform": platform}) + return prompt_checker_2 diff --git a/neollm/utils/tokens.py b/neollm/utils/tokens.py new file mode 100644 index 0000000000000000000000000000000000000000..2794936fb4d3c8638c4b21479597184ba05c2fd8 --- /dev/null +++ b/neollm/utils/tokens.py @@ -0,0 +1,229 @@ +import json +import textwrap +from typing import Any + +import tiktoken + +from neollm.types import Function # , Functions, Messages + + +def normalize_model_name(model_name: str) -> str: + """model_nameのトークン数計測のための標準化 + + Args: + model_name (str): model_name + OpenAI: gpt-3.5-turbo-0613, gpt-3.5-turbo-16k-0613, gpt-4-0613, gpt-4-32k-0613 + OpenAIFT: ft:gpt-3.5-turbo:org_id + Azure: gpt-35-turbo-0613, gpt-35-turbo-16k-0613, gpt-4-0613, gpt-4-32k-0613 + + Returns: + str: 標準化されたmodel_name + + Raises: + ValueError: model_nameが不適切 + """ + # 参考: https://platform.openai.com/docs/models/gpt-3-5 + NEWEST_MAP = [ + ("gpt-3.5-turbo-16k", "gpt-3.5-turbo-16k-0613"), + ("gpt-3.5-turbo", "gpt-3.5-turbo-0613"), + ("gpt-4-32k", "gpt-4-32k-0613"), + ("gpt-4", "gpt-4-0613"), + ] + ALL_VERSION_MODELS = [ + # gpt-3.5-turbo + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k-0613", + "gpt-3.5-turbo-0301", # Legacy + # gpt-4 + "gpt-4-0613", + "gpt-4-32k-0613", + "gpt-4-0314", # Legacy + "gpt-4-32k-0314", # Legacy + ] + # Azure表記 → OpenAI表記に統一 + model_name = model_name.replace("gpt-35", "gpt-3.5") + # 最新モデルを正式名称に & 新モデル, FTモデルをキャッチ + if model_name not in ALL_VERSION_MODELS: + for key, model_name_version in NEWEST_MAP: + if key in model_name: + model_name = model_name_version + break + # Return + if model_name in ALL_VERSION_MODELS: + return model_name + raise ValueError("model_name は以下から選んで.\n" + ",".join(ALL_VERSION_MODELS)) + + +def count_tokens(messages: Any | None = None, model_name: str | None = None, functions: Any | None = None) -> int: + """トークン数計測 + + Args: + messages (Messages): GPTAPIの入力のmessages + model_name (str | None, optional): モデル名. Defaults to None. + functions (Functions | None, optional): GPTAPIの入力のfunctions. Defaults to None. + + Returns: + int: トークン数 + """ + model_name = normalize_model_name(model_name or "cl100k_base") + num_tokens = _count_messages_tokens(messages, model_name) + if functions is not None: + num_tokens += _count_functions_tokens(functions, model_name) + return num_tokens + + +# https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb +def _count_messages_tokens(messages: Any | None, model_name: str) -> int: + """メッセージのトークン数を計算 + + Args: + messages (Messages): ChatGPT等APIに入力するmessages + model_name (str, optional): 使用するモデルの名前 + "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k-0613", "gpt-4-0314", "gpt-4-32k-0314" + "gpt-4-0613", "gpt-4-32k-0613", "gpt-3.5-turbo", "gpt-4" + + Returns: + int: トークン数の合計 + """ + if messages is None: + return 0 + # setting model + encoding_model = tiktoken.encoding_for_model(model_name) # "cl100k_base" + + # config + if model_name == "gpt-3.5-turbo-0301": + tokens_per_message = 4 # every message follows <|start|>{role/name}\n{content}<|end|>\n + tokens_per_name = -1 # if there's a name, the role is omitted + else: + tokens_per_message = 3 + tokens_per_name = 1 + + # count tokens + num_tokens = 3 # every reply is primed with <|start|>assistant<|message|> + for message in messages: + num_tokens += tokens_per_message + for key, value in message.items(): + if isinstance(value, str): + num_tokens += len(encoding_model.encode(value)) + if key == "name": + num_tokens += tokens_per_name + return num_tokens + + +# https://gist.github.com/CGamesPlay/dd4f108f27e2eec145eedf5c717318f5 +def _count_functions_tokens(functions: Any, model_name: str | None = None) -> int: + """ + functionsのトークン数計測 + + Args: + functions (Functions): GPTAPIの入力のfunctions + model_name (str | None, optional): モデル名. Defaults to None. + + Returns: + _type_: トークン数 + """ + encoding_model = tiktoken.encoding_for_model(model_name or "cl100k_base") # "cl100k_base" + num_tokens = 3 + len(encoding_model.encode(__functions2string(functions))) + return num_tokens + + +# functionsのstring化、補助関数 --------------------------------------------------------------------------- +def __functions2string(functions: Any) -> str: + """functionsの文字列化 + + Args: + functions (Functions): GPTAPIの入力のfunctions + + Returns: + str: functionsの文字列 + """ + prefix = "# Tools\n\n## functions\n\nnamespace functions {\n\n} // namespace functions\n" + functions_string = prefix + "".join(__function2string(function) for function in functions) + return functions_string + + +def __function2string(function: Function) -> str: + """functionの文字列化 + + Args: + function (Function): GPTAPIのfunctionの要素 + + Returns: + str: functionの文字列 + """ + object_string = __format_object(function["parameters"]) + if object_string is not None: + object_string = "_: " + object_string + else: + object_string = "" + + functions_string: str = ( + f"// {function['description']}\ntype {function['name']} = (" + object_string + ") => any;\n\n" + ) + return functions_string + + +def __format_object(schema: dict[str, Any], indent: int = 0) -> str | None: + if "properties" not in schema or len(schema["properties"]) == 0: + if schema.get("additionalProperties", False): + return "object" + return None + + result = "{\n" + for key, value in dict(schema["properties"]).items(): + # value <- resolve_ref(value) + value_rendered = __format_schema(value, indent + 1) + if value_rendered is None: + continue + # description + if "description" in value: + description = "".join( + " " * indent + f"// {description_i}\n" + for description_i in textwrap.dedent(value["description"]).strip().split("\n") + ) + # optional + optional = "" if key in schema.get("required", {}) else "?" + # default + default_comment = "" if "default" not in value else f" // default: {__format_default(value)}" + # add string + result += description + " " * indent + f"{key}{optional}: {value_rendered},{default_comment}\n" + result += (" " * (indent - 1)) + "}" + return result + + +# よくわからん +# def resolve_ref(schema): +# if schema.get("$ref") is not None: +# ref = schema["$ref"][14:] +# schema = json_schema["definitions"][ref] +# return schema + + +def __format_schema(schema: dict[str, Any], indent: int) -> str | None: + # schema <- resolve_ref(schema) + if "enum" in schema: + return __format_enum(schema) + elif schema["type"] == "object": + return __format_object(schema, indent) + elif schema["type"] in {"integer", "number"}: + return "number" + elif schema["type"] in {"string"}: + return "string" + elif schema["type"] == "array": + return str(__format_schema(schema["items"], indent)) + "[]" + else: + raise ValueError("unknown schema type " + schema["type"]) + + +def __format_enum(schema: dict[str, Any]) -> str: + # "A" | "B" | "C" + return " | ".join(json.dumps(element, ensure_ascii=False) for element in schema["enum"]) + + +def __format_default(schema: dict[str, Any]) -> str: + default = schema["default"] + if schema["type"] == "number" and float(default).is_integer(): + # numberの時、0 → 0.0 + return f"{default:.1f}" + else: + return str(default) diff --git a/neollm/utils/utils.py b/neollm/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1cb5c01626eb718a36a67724b8b87289f237e1fa --- /dev/null +++ b/neollm/utils/utils.py @@ -0,0 +1,98 @@ +from __future__ import annotations + +import os +from typing import Any + +from typing_extensions import TypedDict + +from neollm.types import PrintColor + + +class CPrintParam(TypedDict, total=False): + text: Any + color: PrintColor | None + background: bool + light: bool + bold: bool + italic: bool + underline: bool + kwargs: dict[str, Any] + + +def cprint( + *text: Any, + color: PrintColor | None = None, + background: bool = False, + light: bool = False, + bold: bool = False, + italic: bool = False, + underline: bool = False, + kwargs: dict[str, Any] = {}, +) -> None: + """ + 色付けなどリッチにprint + + Args: + *text: 表示するテキスト。 + color (PrintColor): テキストの色: 'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'。 + background (bool): 背景色 + light (bool): 淡い色にするか + bold (bool): 太字 + italic (bool): 斜体 + underline (bool): 下線 + **kwargs: printの引数 + """ + # ANSIエスケープシーケンスを使用して、テキストを書式設定して表示する + format_string = "" + + # 色の設定 + color2code: dict[PrintColor, int] = { + "black": 30, + "red": 31, + "green": 32, + "yellow": 33, + "blue": 34, + "magenta": 35, + "cyan": 36, + "white": 37, + } + if color is not None and color in color2code: + code = color2code[color] + if background: + code += 10 + elif light: + code += 60 + format_string += f"\033[{code}m" + if bold: + format_string += "\033[1m" + if italic: + format_string += "\033[3m" + if underline: + format_string += "\033[4m" + + # テキストの表示 + for text_i in text: + print(format_string + str(text_i) + "\033[0m", **kwargs) + + +def ensure_env_var(var_name: str | None = None, default: str | None = None) -> str: + if var_name is None: + return "" + if os.environ.get(var_name, "") == "": + if default is None: + raise ValueError(f"{var_name}をenvで設定しよう") + cprint(f"WARNING: {var_name}が設定されていません。{default}を使用します。", color="yellow", background=True) + os.environ[var_name] = default + return os.environ[var_name] + + +def suport_unrecomended_env_var(old_key: str, new_key: str) -> None: + """非推奨の環境変数をサポートする + + Args: + old_key (str): 非推奨の環境変数名 + new_key (str): 推奨の環境変数名 + """ + if os.getenv(old_key) is not None and os.getenv(new_key) is None: + cprint(f"WARNING: {old_key}ではなく、{new_key}にしてね", color="yellow", background=True) + os.environ[new_key] = os.environ[old_key] diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000000000000000000000000000000000000..54855ab7a557fce861190c751b8f25c8783e74b9 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,3694 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "anthropic" +version = "0.18.1" +description = "The official Python library for the anthropic API" +optional = false +python-versions = ">=3.7" +files = [ + {file = "anthropic-0.18.1-py3-none-any.whl", hash = "sha256:b85aee64f619ce1b1964ba733a09adc4053e7bc4e6d4186001229ec191099dcf"}, + {file = "anthropic-0.18.1.tar.gz", hash = "sha256:f5d1caafd43f6cc933a79753a93531605095f040a384f6a900c3de9c3fb6694e"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +google-auth = {version = ">=2,<3", optional = true, markers = "extra == \"vertex\""} +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tokenizers = ">=0.13.0" +typing-extensions = ">=4.7,<5" + +[package.extras] +bedrock = ["boto3 (>=1.28.57)", "botocore (>=1.31.57)"] +vertex = ["google-auth (>=2,<3)"] + +[[package]] +name = "anyio" +version = "4.3.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "appnope" +version = "0.1.4" +description = "Disable App Nap on macOS >= 10.9" +optional = false +python-versions = ">=3.6" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "argon2-cffi" +version = "23.1.0" +description = "Argon2 for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"}, + {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"}, +] + +[package.dependencies] +argon2-cffi-bindings = "*" + +[package.extras] +dev = ["argon2-cffi[tests,typing]", "tox (>4)"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"] +tests = ["hypothesis", "pytest"] +typing = ["mypy"] + +[[package]] +name = "argon2-cffi-bindings" +version = "21.2.0" +description = "Low-level CFFI bindings for Argon2" +optional = false +python-versions = ">=3.6" +files = [ + {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"}, + {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"}, + {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"}, + {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"}, + {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"}, +] + +[package.dependencies] +cffi = ">=1.0.1" + +[package.extras] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] +tests = ["pytest"] + +[[package]] +name = "arrow" +version = "1.3.0" +description = "Better dates & times for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "async-lru" +version = "2.0.4" +description = "Simple LRU cache for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"}, + {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "attrs" +version = "23.2.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, + {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] +tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "black" +version = "24.3.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, + {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, + {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, + {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, + {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, + {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, + {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, + {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, + {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, + {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, + {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, + {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, + {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, + {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, + {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, + {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, + {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, + {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, + {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, + {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, + {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, + {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "bleach" +version = "6.1.0" +description = "An easy safelist-based HTML-sanitizing tool." +optional = false +python-versions = ">=3.8" +files = [ + {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"}, + {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"}, +] + +[package.dependencies] +six = ">=1.9.0" +webencodings = "*" + +[package.extras] +css = ["tinycss2 (>=1.1.0,<1.3)"] + +[[package]] +name = "cachetools" +version = "5.3.3" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, + {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, +] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "comm" +version = "0.2.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +optional = false +python-versions = ">=3.8" +files = [ + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, +] + +[package.dependencies] +traitlets = ">=4" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "debugpy" +version = "1.8.1" +description = "An implementation of the Debug Adapter Protocol for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "debugpy-1.8.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:3bda0f1e943d386cc7a0e71bfa59f4137909e2ed947fb3946c506e113000f741"}, + {file = "debugpy-1.8.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda73bf69ea479c8577a0448f8c707691152e6c4de7f0c4dec5a4bc11dee516e"}, + {file = "debugpy-1.8.1-cp310-cp310-win32.whl", hash = "sha256:3a79c6f62adef994b2dbe9fc2cc9cc3864a23575b6e387339ab739873bea53d0"}, + {file = "debugpy-1.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:7eb7bd2b56ea3bedb009616d9e2f64aab8fc7000d481faec3cd26c98a964bcdd"}, + {file = "debugpy-1.8.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:016a9fcfc2c6b57f939673c874310d8581d51a0fe0858e7fac4e240c5eb743cb"}, + {file = "debugpy-1.8.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd97ed11a4c7f6d042d320ce03d83b20c3fb40da892f994bc041bbc415d7a099"}, + {file = "debugpy-1.8.1-cp311-cp311-win32.whl", hash = "sha256:0de56aba8249c28a300bdb0672a9b94785074eb82eb672db66c8144fff673146"}, + {file = "debugpy-1.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:1a9fe0829c2b854757b4fd0a338d93bc17249a3bf69ecf765c61d4c522bb92a8"}, + {file = "debugpy-1.8.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3ebb70ba1a6524d19fa7bb122f44b74170c447d5746a503e36adc244a20ac539"}, + {file = "debugpy-1.8.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2e658a9630f27534e63922ebf655a6ab60c370f4d2fc5c02a5b19baf4410ace"}, + {file = "debugpy-1.8.1-cp312-cp312-win32.whl", hash = "sha256:caad2846e21188797a1f17fc09c31b84c7c3c23baf2516fed5b40b378515bbf0"}, + {file = "debugpy-1.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:edcc9f58ec0fd121a25bc950d4578df47428d72e1a0d66c07403b04eb93bcf98"}, + {file = "debugpy-1.8.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:7a3afa222f6fd3d9dfecd52729bc2e12c93e22a7491405a0ecbf9e1d32d45b39"}, + {file = "debugpy-1.8.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d915a18f0597ef685e88bb35e5d7ab968964b7befefe1aaea1eb5b2640b586c7"}, + {file = "debugpy-1.8.1-cp38-cp38-win32.whl", hash = "sha256:92116039b5500633cc8d44ecc187abe2dfa9b90f7a82bbf81d079fcdd506bae9"}, + {file = "debugpy-1.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:e38beb7992b5afd9d5244e96ad5fa9135e94993b0c551ceebf3fe1a5d9beb234"}, + {file = "debugpy-1.8.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:bfb20cb57486c8e4793d41996652e5a6a885b4d9175dd369045dad59eaacea42"}, + {file = "debugpy-1.8.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efd3fdd3f67a7e576dd869c184c5dd71d9aaa36ded271939da352880c012e703"}, + {file = "debugpy-1.8.1-cp39-cp39-win32.whl", hash = "sha256:58911e8521ca0c785ac7a0539f1e77e0ce2df753f786188f382229278b4cdf23"}, + {file = "debugpy-1.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:6df9aa9599eb05ca179fb0b810282255202a66835c6efb1d112d21ecb830ddd3"}, + {file = "debugpy-1.8.1-py2.py3-none-any.whl", hash = "sha256:28acbe2241222b87e255260c76741e1fbf04fdc3b6d094fcf57b6c6f75ce1242"}, + {file = "debugpy-1.8.1.zip", hash = "sha256:f696d6be15be87aef621917585f9bb94b1dc9e8aced570db1b8a6fc14e8f9b42"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "defusedxml" +version = "0.7.1" +description = "XML bomb protection for Python stdlib modules" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, + {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, +] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "docstring-parser" +version = "0.16" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, + {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "fastjsonschema" +version = "2.19.1" +description = "Fastest Python implementation of JSON schema" +optional = false +python-versions = "*" +files = [ + {file = "fastjsonschema-2.19.1-py3-none-any.whl", hash = "sha256:3672b47bc94178c9f23dbb654bf47440155d4db9df5f7bc47643315f9c405cd0"}, + {file = "fastjsonschema-2.19.1.tar.gz", hash = "sha256:e3126a94bdc4623d3de4485f8d468a12f02a67921315ddc87836d6e456dc789d"}, +] + +[package.extras] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] + +[[package]] +name = "filelock" +version = "3.14.0" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.14.0-py3-none-any.whl", hash = "sha256:43339835842f110ca7ae60f1e1c160714c5a6afd15a2873419ab185334975c0f"}, + {file = "filelock-3.14.0.tar.gz", hash = "sha256:6ea72da3be9b8c82afd3edcf99f2fffbb5076335a5ae4d03248bb5b6c3eae78a"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "flake8" +version = "6.1.0" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.11.0,<2.12.0" +pyflakes = ">=3.1.0,<3.2.0" + +[[package]] +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" +optional = false +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +files = [ + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, +] + +[[package]] +name = "fsspec" +version = "2024.3.1" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2024.3.1-py3-none-any.whl", hash = "sha256:918d18d41bf73f0e2b261824baeb1b124bcf771767e3a26425cd7dec3332f512"}, + {file = "fsspec-2024.3.1.tar.gz", hash = "sha256:f39780e282d7d117ffb42bb96992f8a90795e4d0fb0f661a70ca39fe9c43ded9"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "google-ai-generativelanguage" +version = "0.6.2" +description = "Google Ai Generativelanguage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-ai-generativelanguage-0.6.2.tar.gz", hash = "sha256:308791ac3b9dad015b359172970739aa3753dd542142a416d07f9fa047e22386"}, + {file = "google_ai_generativelanguage-0.6.2-py3-none-any.whl", hash = "sha256:bf84c34c641570d7e8a1f2e6901e6771af1438f2ee8307d1801fd43585f9b1c6"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "google-api-core" +version = "2.19.0" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.19.0.tar.gz", hash = "sha256:cf1b7c2694047886d2af1128a03ae99e391108a08804f87cfd35970e49c9cd10"}, + {file = "google_api_core-2.19.0-py3-none-any.whl", hash = "sha256:8661eec4078c35428fd3f69a2c7ee29e342896b70f01d1a1cbcb334372dd6251"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +grpcio = [ + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] +grpcio-status = [ + {version = ">=1.49.1,<2.0.dev0", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, +] +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-api-python-client" +version = "2.127.0" +description = "Google API Client Library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-python-client-2.127.0.tar.gz", hash = "sha256:bbb51b0fbccdf40e536c26341e372d7800f09afebb53103bbcc94e08f14b523b"}, + {file = "google_api_python_client-2.127.0-py2.py3-none-any.whl", hash = "sha256:d01c70c7840ec37888aa02b1aea5d9baba4c1701e268d1a0251640afd56e5e90"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0.dev0" +google-auth = ">=1.32.0,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0.dev0" +google-auth-httplib2 = ">=0.2.0,<1.0.0" +httplib2 = ">=0.19.0,<1.dev0" +uritemplate = ">=3.0.1,<5" + +[[package]] +name = "google-auth" +version = "2.29.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.29.0.tar.gz", hash = "sha256:672dff332d073227550ffc7457868ac4218d6c500b155fe6cc17d2b13602c360"}, + {file = "google_auth-2.29.0-py2.py3-none-any.whl", hash = "sha256:d452ad095688cd52bae0ad6fafe027f6a6d6f560e810fec20914e17a09526415"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-auth-httplib2" +version = "0.2.0" +description = "Google Authentication Library: httplib2 transport" +optional = false +python-versions = "*" +files = [ + {file = "google-auth-httplib2-0.2.0.tar.gz", hash = "sha256:38aa7badf48f974f1eb9861794e9c0cb2a0511a4ec0679b1f886d108f5640e05"}, + {file = "google_auth_httplib2-0.2.0-py2.py3-none-any.whl", hash = "sha256:b65a0a2123300dd71281a7bf6e64d65a0759287df52729bdd1ae2e47dc311a3d"}, +] + +[package.dependencies] +google-auth = "*" +httplib2 = ">=0.19.0" + +[[package]] +name = "google-cloud-aiplatform" +version = "1.49.0" +description = "Vertex AI API client library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "google-cloud-aiplatform-1.49.0.tar.gz", hash = "sha256:e6e6d01079bb5def49e4be4db4d12b13c624b5c661079c869c13c855e5807429"}, + {file = "google_cloud_aiplatform-1.49.0-py2.py3-none-any.whl", hash = "sha256:8072d9e0c18d8942c704233d1a93b8d6312fc7b278786a283247950e28ae98df"}, +] + +[package.dependencies] +docstring-parser = "<1" +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0dev" +google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" +google-cloud-storage = ">=1.32.0,<3.0.0dev" +packaging = ">=14.3" +proto-plus = ">=1.22.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +pydantic = "<3" +shapely = "<3.0.0dev" + +[package.extras] +autologging = ["mlflow (>=1.27.0,<=2.1.1)"] +cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] +endpoint = ["requests (>=2.28.1)"] +full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] +langchain = ["langchain (>=0.1.13,<0.2)", "langchain-core (<0.2)", "langchain-google-vertexai (<0.2)"] +langchain-testing = ["absl-py", "cloudpickle (>=2.2.1,<3.0)", "langchain (>=0.1.13,<0.2)", "langchain-core (<0.2)", "langchain-google-vertexai (<0.2)", "pydantic (>=2.6.3,<3)", "pytest-xdist"] +lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] +metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] +pipelines = ["pyyaml (>=5.3.1,<7)"] +prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.109.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] +preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] +private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] +rapid-evaluation = ["nest-asyncio (>=1.0.0,<1.6.0)", "pandas (>=1.0.0,<2.2.0)"] +ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)"] +ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "ray[train] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "scikit-learn", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] +reasoningengine = ["cloudpickle (>=2.2.1,<3.0)", "pydantic (>=2.6.3,<3)"] +tensorboard = ["tensorflow (>=2.3.0,<3.0.0dev)"] +testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.109.1)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "nest-asyncio (>=1.0.0,<1.6.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pandas (>=1.0.0,<2.2.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<=2.9.3)", "ray[default] (>=2.5,<=2.9.3)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] +vizier = ["google-vizier (>=0.1.6)"] +xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] + +[[package]] +name = "google-cloud-bigquery" +version = "3.21.0" +description = "Google BigQuery API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-bigquery-3.21.0.tar.gz", hash = "sha256:6265c39f9d5bdf50f11cb81a9c2a0605d285df34ac139de0d2333b1250add0ff"}, + {file = "google_cloud_bigquery-3.21.0-py2.py3-none-any.whl", hash = "sha256:83a090aae16b3a687ef22e7b0a1b551e18da615b1c4855c5f312f198959e7739"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +google-cloud-core = ">=1.6.0,<3.0.0dev" +google-resumable-media = ">=0.6.0,<3.0dev" +packaging = ">=20.0.0" +python-dateutil = ">=2.7.2,<3.0dev" +requests = ">=2.21.0,<3.0.0dev" + +[package.extras] +all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] +bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] +geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] +ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] +ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] +opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] +tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +description = "Google Cloud API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, +] + +[package.dependencies] +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.25.0,<3.0dev" + +[package.extras] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] + +[[package]] +name = "google-cloud-resource-manager" +version = "1.12.3" +description = "Google Cloud Resource Manager API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-resource-manager-1.12.3.tar.gz", hash = "sha256:809851824119834e4f2310b2c4f38621c1d16b2bb14d5b9f132e69c79d355e7f"}, + {file = "google_cloud_resource_manager-1.12.3-py2.py3-none-any.whl", hash = "sha256:92be7d6959927b76d90eafc4028985c37975a46ded5466a018f02e8649e113d4"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "google-cloud-storage" +version = "2.16.0" +description = "Google Cloud Storage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-storage-2.16.0.tar.gz", hash = "sha256:dda485fa503710a828d01246bd16ce9db0823dc51bbca742ce96a6817d58669f"}, + {file = "google_cloud_storage-2.16.0-py2.py3-none-any.whl", hash = "sha256:91a06b96fb79cf9cdfb4e759f178ce11ea885c79938f89590344d079305f5852"}, +] + +[package.dependencies] +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" +google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.6.0" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +protobuf = ["protobuf (<5.0.0dev)"] + +[[package]] +name = "google-crc32c" +version = "1.5.0" +description = "A python wrapper of the C library 'Google CRC32C'" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, +] + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "google-generativeai" +version = "0.5.2" +description = "Google Generative AI High level API client library and tools." +optional = false +python-versions = ">=3.9" +files = [ + {file = "google_generativeai-0.5.2-py3-none-any.whl", hash = "sha256:56f39485a0a673c93c21ec31c17809cc6a964193fb77b7ce809ad15d0dd72d7b"}, +] + +[package.dependencies] +google-ai-generativelanguage = "0.6.2" +google-api-core = "*" +google-api-python-client = "*" +google-auth = ">=2.15.0" +protobuf = "*" +pydantic = "*" +tqdm = "*" +typing-extensions = "*" + +[package.extras] +dev = ["Pillow", "absl-py", "black", "ipython", "nose2", "pandas", "pytype", "pyyaml"] + +[[package]] +name = "google-resumable-media" +version = "2.7.0" +description = "Utilities for Google Media Downloads and Resumable Uploads" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, + {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, +] + +[package.dependencies] +google-crc32c = ">=1.0,<2.0dev" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] +requests = ["requests (>=2.18.0,<3.0.0dev)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.63.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.63.0.tar.gz", hash = "sha256:17ad01b11d5f1d0171c06d3ba5c04c54474e883b66b949722b4938ee2694ef4e"}, + {file = "googleapis_common_protos-1.63.0-py2.py3-none-any.whl", hash = "sha256:ae45f75702f7c08b541f750854a678bd8f534a1a6bace6afe975f1d0a82d6632"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.0" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, + {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "grpcio" +version = "1.63.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.8" +files = [ + {file = "grpcio-1.63.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:2e93aca840c29d4ab5db93f94ed0a0ca899e241f2e8aec6334ab3575dc46125c"}, + {file = "grpcio-1.63.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:91b73d3f1340fefa1e1716c8c1ec9930c676d6b10a3513ab6c26004cb02d8b3f"}, + {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:b3afbd9d6827fa6f475a4f91db55e441113f6d3eb9b7ebb8fb806e5bb6d6bd0d"}, + {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f3f6883ce54a7a5f47db43289a0a4c776487912de1a0e2cc83fdaec9685cc9f"}, + {file = "grpcio-1.63.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf8dae9cc0412cb86c8de5a8f3be395c5119a370f3ce2e69c8b7d46bb9872c8d"}, + {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:08e1559fd3b3b4468486b26b0af64a3904a8dbc78d8d936af9c1cf9636eb3e8b"}, + {file = "grpcio-1.63.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5c039ef01516039fa39da8a8a43a95b64e288f79f42a17e6c2904a02a319b357"}, + {file = "grpcio-1.63.0-cp310-cp310-win32.whl", hash = "sha256:ad2ac8903b2eae071055a927ef74121ed52d69468e91d9bcbd028bd0e554be6d"}, + {file = "grpcio-1.63.0-cp310-cp310-win_amd64.whl", hash = "sha256:b2e44f59316716532a993ca2966636df6fbe7be4ab6f099de6815570ebe4383a"}, + {file = "grpcio-1.63.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:f28f8b2db7b86c77916829d64ab21ff49a9d8289ea1564a2b2a3a8ed9ffcccd3"}, + {file = "grpcio-1.63.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:65bf975639a1f93bee63ca60d2e4951f1b543f498d581869922910a476ead2f5"}, + {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:b5194775fec7dc3dbd6a935102bb156cd2c35efe1685b0a46c67b927c74f0cfb"}, + {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4cbb2100ee46d024c45920d16e888ee5d3cf47c66e316210bc236d5bebc42b3"}, + {file = "grpcio-1.63.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff737cf29b5b801619f10e59b581869e32f400159e8b12d7a97e7e3bdeee6a2"}, + {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd1e68776262dd44dedd7381b1a0ad09d9930ffb405f737d64f505eb7f77d6c7"}, + {file = "grpcio-1.63.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:93f45f27f516548e23e4ec3fbab21b060416007dbe768a111fc4611464cc773f"}, + {file = "grpcio-1.63.0-cp311-cp311-win32.whl", hash = "sha256:878b1d88d0137df60e6b09b74cdb73db123f9579232c8456f53e9abc4f62eb3c"}, + {file = "grpcio-1.63.0-cp311-cp311-win_amd64.whl", hash = "sha256:756fed02dacd24e8f488f295a913f250b56b98fb793f41d5b2de6c44fb762434"}, + {file = "grpcio-1.63.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:93a46794cc96c3a674cdfb59ef9ce84d46185fe9421baf2268ccb556f8f81f57"}, + {file = "grpcio-1.63.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a7b19dfc74d0be7032ca1eda0ed545e582ee46cd65c162f9e9fc6b26ef827dc6"}, + {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:8064d986d3a64ba21e498b9a376cbc5d6ab2e8ab0e288d39f266f0fca169b90d"}, + {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:219bb1848cd2c90348c79ed0a6b0ea51866bc7e72fa6e205e459fedab5770172"}, + {file = "grpcio-1.63.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2d60cd1d58817bc5985fae6168d8b5655c4981d448d0f5b6194bbcc038090d2"}, + {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:9e350cb096e5c67832e9b6e018cf8a0d2a53b2a958f6251615173165269a91b0"}, + {file = "grpcio-1.63.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:56cdf96ff82e3cc90dbe8bac260352993f23e8e256e063c327b6cf9c88daf7a9"}, + {file = "grpcio-1.63.0-cp312-cp312-win32.whl", hash = "sha256:3a6d1f9ea965e750db7b4ee6f9fdef5fdf135abe8a249e75d84b0a3e0c668a1b"}, + {file = "grpcio-1.63.0-cp312-cp312-win_amd64.whl", hash = "sha256:d2497769895bb03efe3187fb1888fc20e98a5f18b3d14b606167dacda5789434"}, + {file = "grpcio-1.63.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:fdf348ae69c6ff484402cfdb14e18c1b0054ac2420079d575c53a60b9b2853ae"}, + {file = "grpcio-1.63.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a3abfe0b0f6798dedd2e9e92e881d9acd0fdb62ae27dcbbfa7654a57e24060c0"}, + {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:6ef0ad92873672a2a3767cb827b64741c363ebaa27e7f21659e4e31f4d750280"}, + {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b416252ac5588d9dfb8a30a191451adbf534e9ce5f56bb02cd193f12d8845b7f"}, + {file = "grpcio-1.63.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b77eaefc74d7eb861d3ffbdf91b50a1bb1639514ebe764c47773b833fa2d91"}, + {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b005292369d9c1f80bf70c1db1c17c6c342da7576f1c689e8eee4fb0c256af85"}, + {file = "grpcio-1.63.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cdcda1156dcc41e042d1e899ba1f5c2e9f3cd7625b3d6ebfa619806a4c1aadda"}, + {file = "grpcio-1.63.0-cp38-cp38-win32.whl", hash = "sha256:01799e8649f9e94ba7db1aeb3452188048b0019dc37696b0f5ce212c87c560c3"}, + {file = "grpcio-1.63.0-cp38-cp38-win_amd64.whl", hash = "sha256:6a1a3642d76f887aa4009d92f71eb37809abceb3b7b5a1eec9c554a246f20e3a"}, + {file = "grpcio-1.63.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:75f701ff645858a2b16bc8c9fc68af215a8bb2d5a9b647448129de6e85d52bce"}, + {file = "grpcio-1.63.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cacdef0348a08e475a721967f48206a2254a1b26ee7637638d9e081761a5ba86"}, + {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:0697563d1d84d6985e40ec5ec596ff41b52abb3fd91ec240e8cb44a63b895094"}, + {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6426e1fb92d006e47476d42b8f240c1d916a6d4423c5258ccc5b105e43438f61"}, + {file = "grpcio-1.63.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48cee31bc5f5a31fb2f3b573764bd563aaa5472342860edcc7039525b53e46a"}, + {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:50344663068041b34a992c19c600236e7abb42d6ec32567916b87b4c8b8833b3"}, + {file = "grpcio-1.63.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:259e11932230d70ef24a21b9fb5bb947eb4703f57865a404054400ee92f42f5d"}, + {file = "grpcio-1.63.0-cp39-cp39-win32.whl", hash = "sha256:a44624aad77bf8ca198c55af811fd28f2b3eaf0a50ec5b57b06c034416ef2d0a"}, + {file = "grpcio-1.63.0-cp39-cp39-win_amd64.whl", hash = "sha256:166e5c460e5d7d4656ff9e63b13e1f6029b122104c1633d5f37eaea348d7356d"}, + {file = "grpcio-1.63.0.tar.gz", hash = "sha256:f3023e14805c61bc439fb40ca545ac3d5740ce66120a678a3c6c2c55b70343d1"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.63.0)"] + +[[package]] +name = "grpcio-status" +version = "1.62.2" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.62.2.tar.gz", hash = "sha256:62e1bfcb02025a1cd73732a2d33672d3e9d0df4d21c12c51e0bbcaf09bab742a"}, + {file = "grpcio_status-1.62.2-py3-none-any.whl", hash = "sha256:206ddf0eb36bc99b033f03b2c8e95d319f0044defae9b41ae21408e7e0cda48f"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.62.2" +protobuf = ">=4.21.6" + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.5" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, + {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<0.26.0)"] + +[[package]] +name = "httplib2" +version = "0.22.0" +description = "A comprehensive HTTP client library." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "httplib2-0.22.0-py3-none-any.whl", hash = "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc"}, + {file = "httplib2-0.22.0.tar.gz", hash = "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81"}, +] + +[package.dependencies] +pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} + +[[package]] +name = "httpx" +version = "0.27.0" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.0-py3-none-any.whl", hash = "sha256:71d5465162c13681bff01ad59b2cc68dd838ea1f10e51574bac27103f00c91a5"}, + {file = "httpx-0.27.0.tar.gz", hash = "sha256:a0cb88a46f32dc874e04ee956e4c2764aba2aa228f650b06788ba6bda2962ab5"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] + +[[package]] +name = "huggingface-hub" +version = "0.23.0" +description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "huggingface_hub-0.23.0-py3-none-any.whl", hash = "sha256:075c30d48ee7db2bba779190dc526d2c11d422aed6f9044c5e2fdc2c432fdb91"}, + {file = "huggingface_hub-0.23.0.tar.gz", hash = "sha256:7126dedd10a4c6fac796ced4d87a8cf004efc722a5125c2c09299017fa366fa9"}, +] + +[package.dependencies] +filelock = "*" +fsspec = ">=2023.5.0" +packaging = ">=20.9" +pyyaml = ">=5.1" +requests = "*" +tqdm = ">=4.42.1" +typing-extensions = ">=3.7.4.3" + +[package.extras] +all = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +cli = ["InquirerPy (==0.3.4)"] +dev = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "mypy (==1.5.1)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "ruff (>=0.3.0)", "soundfile", "types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)", "urllib3 (<2.0)"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +hf-transfer = ["hf-transfer (>=0.1.4)"] +inference = ["aiohttp", "minijinja (>=1.0)"] +quality = ["mypy (==1.5.1)", "ruff (>=0.3.0)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +tensorflow-testing = ["keras (<3.0)", "tensorflow"] +testing = ["InquirerPy (==0.3.4)", "Jinja2", "Pillow", "aiohttp", "fastapi", "gradio", "jedi", "minijinja (>=1.0)", "numpy", "pytest", "pytest-asyncio", "pytest-cov", "pytest-env", "pytest-rerunfailures", "pytest-vcr", "pytest-xdist", "soundfile", "urllib3 (<2.0)"] +torch = ["safetensors", "torch"] +typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "types-tqdm", "types-urllib3", "typing-extensions (>=4.8.0)"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipykernel" +version = "6.29.4" +description = "IPython Kernel for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.29.4-py3-none-any.whl", hash = "sha256:1181e653d95c6808039c509ef8e67c4126b3b3af7781496c7cbfb5ed938a27da"}, + {file = "ipykernel-6.29.4.tar.gz", hash = "sha256:3d44070060f9475ac2092b760123fadf105d2e2493c24848b6691a7c4f42af5c"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=24" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.24.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ipython-8.24.0-py3-none-any.whl", hash = "sha256:d7bf2f6c4314984e3e02393213bab8703cf163ede39672ce5918c51fe253a2a3"}, + {file = "ipython-8.24.0.tar.gz", hash = "sha256:010db3f8a728a578bb641fdd06c063b9fb8e96a9464c63aec6310fbcb5e80501"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +exceptiongroup = {version = "*", markers = "python_version < \"3.11\""} +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5.13.0" +typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "stack-data", "typing-extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "ipywidgets" +version = "8.1.2" +description = "Jupyter interactive widgets" +optional = false +python-versions = ">=3.7" +files = [ + {file = "ipywidgets-8.1.2-py3-none-any.whl", hash = "sha256:bbe43850d79fb5e906b14801d6c01402857996864d1e5b6fa62dd2ee35559f60"}, + {file = "ipywidgets-8.1.2.tar.gz", hash = "sha256:d0b9b41e49bae926a866e613a39b0f0097745d2b9f1f3dd406641b4a57ec42c9"}, +] + +[package.dependencies] +comm = ">=0.1.3" +ipython = ">=6.1.0" +jupyterlab-widgets = ">=3.0.10,<3.1.0" +traitlets = ">=4.3.1" +widgetsnbextension = ">=4.0.10,<4.1.0" + +[package.extras] +test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] + +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +optional = false +python-versions = ">=3.7" +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] + +[package.dependencies] +arrow = ">=0.15.0" + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "json5" +version = "0.9.25" +description = "A Python implementation of the JSON5 data format." +optional = false +python-versions = ">=3.8" +files = [ + {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, + {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, +] + +[[package]] +name = "jsonpointer" +version = "2.4" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpointer-2.4-py2.py3-none-any.whl", hash = "sha256:15d51bba20eea3165644553647711d150376234112651b4f1811022aecad7d7a"}, + {file = "jsonpointer-2.4.tar.gz", hash = "sha256:585cee82b70211fa9e6043b7bb89db6e1aa49524340dde8ad6b63206ea689d88"}, +] + +[[package]] +name = "jsonschema" +version = "4.22.0" +description = "An implementation of JSON Schema validation for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema-4.22.0-py3-none-any.whl", hash = "sha256:ff4cfd6b1367a40e7bc6411caec72effadd3db0bbe5017de188f2d6108335802"}, + {file = "jsonschema-4.22.0.tar.gz", hash = "sha256:5b22d434a45935119af990552c862e5d6d564e8f6601206b305a61fdf661a2b7"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""} +jsonschema-specifications = ">=2023.03.6" +referencing = ">=0.28.4" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""} +rpds-py = ">=0.7.1" +uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format-nongpl\""} + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.12.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.12.1-py3-none-any.whl", hash = "sha256:87e4fdf3a94858b8a2ba2778d9ba57d8a9cafca7c7489c46ba0d30a8bc6a9c3c"}, + {file = "jsonschema_specifications-2023.12.1.tar.gz", hash = "sha256:48a76787b3e70f5ed53f1160d2b81f586e4ca6d1548c5de7085d1682674764cc"}, +] + +[package.dependencies] +referencing = ">=0.31.0" + +[[package]] +name = "jupyter" +version = "1.0.0" +description = "Jupyter metapackage. Install all the Jupyter components in one go." +optional = false +python-versions = "*" +files = [ + {file = "jupyter-1.0.0-py2.py3-none-any.whl", hash = "sha256:5b290f93b98ffbc21c0c7e749f054b3267782166d72fa5e3ed1ed4eaf34a2b78"}, + {file = "jupyter-1.0.0.tar.gz", hash = "sha256:d9dc4b3318f310e34c82951ea5d6683f67bed7def4b259fafbfe4f1beb1d8e5f"}, + {file = "jupyter-1.0.0.zip", hash = "sha256:3e1f86076bbb7c8c207829390305a2b1fe836d471ed54be66a3b8c41e7f46cc7"}, +] + +[package.dependencies] +ipykernel = "*" +ipywidgets = "*" +jupyter-console = "*" +nbconvert = "*" +notebook = "*" +qtconsole = "*" + +[[package]] +name = "jupyter-client" +version = "8.6.1" +description = "Jupyter protocol implementation and client libraries" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.6.1-py3-none-any.whl", hash = "sha256:3b7bd22f058434e3b9a7ea4b1500ed47de2713872288c0d511d19926f99b459f"}, + {file = "jupyter_client-8.6.1.tar.gz", hash = "sha256:e842515e2bab8e19186d89fdfea7abd15e39dd581f94e399f00e2af5a1652d3f"}, +] + +[package.dependencies] +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-console" +version = "6.6.3" +description = "Jupyter terminal console" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"}, + {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"}, +] + +[package.dependencies] +ipykernel = ">=6.14" +ipython = "*" +jupyter-client = ">=7.0.0" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +prompt-toolkit = ">=3.0.30" +pygments = "*" +pyzmq = ">=17" +traitlets = ">=5.4" + +[package.extras] +test = ["flaky", "pexpect", "pytest"] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +description = "Jupyter core package. A base package on which Jupyter projects rely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "jupyter-events" +version = "0.10.0" +description = "Jupyter Event System library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"}, + {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"}, +] + +[package.dependencies] +jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]} +python-json-logger = ">=2.0.4" +pyyaml = ">=5.3" +referencing = "*" +rfc3339-validator = "*" +rfc3986-validator = ">=0.1.1" +traitlets = ">=5.3" + +[package.extras] +cli = ["click", "rich"] +docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"] +test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"] + +[[package]] +name = "jupyter-lsp" +version = "2.2.5" +description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"}, + {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"}, +] + +[package.dependencies] +jupyter-server = ">=1.1.2" + +[[package]] +name = "jupyter-server" +version = "2.14.0" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server-2.14.0-py3-none-any.whl", hash = "sha256:fb6be52c713e80e004fac34b35a0990d6d36ba06fd0a2b2ed82b899143a64210"}, + {file = "jupyter_server-2.14.0.tar.gz", hash = "sha256:659154cea512083434fd7c93b7fe0897af7a2fd0b9dd4749282b42eaac4ae677"}, +] + +[package.dependencies] +anyio = ">=3.1.0" +argon2-cffi = ">=21.1" +jinja2 = ">=3.0.3" +jupyter-client = ">=7.4.4" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +jupyter-events = ">=0.9.0" +jupyter-server-terminals = ">=0.4.4" +nbconvert = ">=6.4.4" +nbformat = ">=5.3.0" +overrides = ">=5.0" +packaging = ">=22.0" +prometheus-client = ">=0.9" +pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""} +pyzmq = ">=24" +send2trash = ">=1.8.2" +terminado = ">=0.8.3" +tornado = ">=6.2.0" +traitlets = ">=5.6.0" +websocket-client = ">=1.7" + +[package.extras] +docs = ["ipykernel", "jinja2", "jupyter-client", "jupyter-server", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"] +test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"] + +[[package]] +name = "jupyter-server-terminals" +version = "0.5.3" +description = "A Jupyter Server Extension Providing Terminals." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"}, + {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"}, +] + +[package.dependencies] +pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""} +terminado = ">=0.8.3" + +[package.extras] +docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"] +test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"] + +[[package]] +name = "jupyterlab" +version = "4.1.8" +description = "JupyterLab computational environment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab-4.1.8-py3-none-any.whl", hash = "sha256:c3baf3a2f91f89d110ed5786cd18672b9a357129d4e389d2a0dead15e11a4d2c"}, + {file = "jupyterlab-4.1.8.tar.gz", hash = "sha256:3384aded8680e7ce504fd63b8bb89a39df21c9c7694d9e7dc4a68742cdb30f9b"}, +] + +[package.dependencies] +async-lru = ">=1.0.0" +httpx = ">=0.25.0" +ipykernel = ">=6.5.0" +jinja2 = ">=3.0.3" +jupyter-core = "*" +jupyter-lsp = ">=2.0.0" +jupyter-server = ">=2.4.0,<3" +jupyterlab-server = ">=2.27.1,<3" +notebook-shim = ">=0.2" +packaging = "*" +tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} +tornado = ">=6.2.0" +traitlets = "*" + +[package.extras] +dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.2.0)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"] +docs-screenshots = ["altair (==5.2.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.1)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post6)", "matplotlib (==3.8.2)", "nbconvert (>=7.0.0)", "pandas (==2.2.0)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"] +test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] +upgrade-extension = ["copier (>=8.0,<9.0)", "jinja2-time (<0.3)", "pydantic (<2.0)", "pyyaml-include (<2.0)", "tomli-w (<2.0)"] + +[[package]] +name = "jupyterlab-pygments" +version = "0.3.0" +description = "Pygments theme using JupyterLab CSS variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"}, + {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"}, +] + +[[package]] +name = "jupyterlab-server" +version = "2.27.1" +description = "A set of server components for JupyterLab and JupyterLab like applications." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyterlab_server-2.27.1-py3-none-any.whl", hash = "sha256:f5e26156e5258b24d532c84e7c74cc212e203bff93eb856f81c24c16daeecc75"}, + {file = "jupyterlab_server-2.27.1.tar.gz", hash = "sha256:097b5ac709b676c7284ac9c5e373f11930a561f52cd5a86e4fc7e5a9c8a8631d"}, +] + +[package.dependencies] +babel = ">=2.10" +jinja2 = ">=3.0.3" +json5 = ">=0.9.0" +jsonschema = ">=4.18.0" +jupyter-server = ">=1.21,<3" +packaging = ">=21.3" +requests = ">=2.31" + +[package.extras] +docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"] +openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"] +test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"] + +[[package]] +name = "jupyterlab-widgets" +version = "3.0.10" +description = "Jupyter interactive widgets for JupyterLab" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jupyterlab_widgets-3.0.10-py3-none-any.whl", hash = "sha256:dd61f3ae7a5a7f80299e14585ce6cf3d6925a96c9103c978eda293197730cb64"}, + {file = "jupyterlab_widgets-3.0.10.tar.gz", hash = "sha256:04f2ac04976727e4f9d0fa91cdc2f1ab860f965e504c29dbd6a65c882c9d04c0"}, +] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mistune" +version = "3.0.2" +description = "A sane and fast Markdown parser with useful plugins and renderers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"}, + {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"}, +] + +[[package]] +name = "mypy" +version = "1.10.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da1cbf08fb3b851ab3b9523a884c232774008267b1f83371ace57f412fe308c2"}, + {file = "mypy-1.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:12b6bfc1b1a66095ab413160a6e520e1dc076a28f3e22f7fb25ba3b000b4ef99"}, + {file = "mypy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e36fb078cce9904c7989b9693e41cb9711e0600139ce3970c6ef814b6ebc2b2"}, + {file = "mypy-1.10.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b0695d605ddcd3eb2f736cd8b4e388288c21e7de85001e9f85df9187f2b50f9"}, + {file = "mypy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:cd777b780312ddb135bceb9bc8722a73ec95e042f911cc279e2ec3c667076051"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3be66771aa5c97602f382230165b856c231d1277c511c9a8dd058be4784472e1"}, + {file = "mypy-1.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8b2cbaca148d0754a54d44121b5825ae71868c7592a53b7292eeb0f3fdae95ee"}, + {file = "mypy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ec404a7cbe9fc0e92cb0e67f55ce0c025014e26d33e54d9e506a0f2d07fe5de"}, + {file = "mypy-1.10.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e22e1527dc3d4aa94311d246b59e47f6455b8729f4968765ac1eacf9a4760bc7"}, + {file = "mypy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:a87dbfa85971e8d59c9cc1fcf534efe664d8949e4c0b6b44e8ca548e746a8d53"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a781f6ad4bab20eef8b65174a57e5203f4be627b46291f4589879bf4e257b97b"}, + {file = "mypy-1.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b808e12113505b97d9023b0b5e0c0705a90571c6feefc6f215c1df9381256e30"}, + {file = "mypy-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f55583b12156c399dce2df7d16f8a5095291354f1e839c252ec6c0611e86e2e"}, + {file = "mypy-1.10.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4cf18f9d0efa1b16478c4c129eabec36148032575391095f73cae2e722fcf9d5"}, + {file = "mypy-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:bc6ac273b23c6b82da3bb25f4136c4fd42665f17f2cd850771cb600bdd2ebeda"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9fd50226364cd2737351c79807775136b0abe084433b55b2e29181a4c3c878c0"}, + {file = "mypy-1.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f90cff89eea89273727d8783fef5d4a934be2fdca11b47def50cf5d311aff727"}, + {file = "mypy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcfc70599efde5c67862a07a1aaf50e55bce629ace26bb19dc17cece5dd31ca4"}, + {file = "mypy-1.10.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:075cbf81f3e134eadaf247de187bd604748171d6b79736fa9b6c9685b4083061"}, + {file = "mypy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:3f298531bca95ff615b6e9f2fc0333aae27fa48052903a0ac90215021cdcfa4f"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa7ef5244615a2523b56c034becde4e9e3f9b034854c93639adb667ec9ec2976"}, + {file = "mypy-1.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3236a4c8f535a0631f85f5fcdffba71c7feeef76a6002fcba7c1a8e57c8be1ec"}, + {file = "mypy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a2b5cdbb5dd35aa08ea9114436e0d79aceb2f38e32c21684dcf8e24e1e92821"}, + {file = "mypy-1.10.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92f93b21c0fe73dc00abf91022234c79d793318b8a96faac147cd579c1671746"}, + {file = "mypy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:28d0e038361b45f099cc086d9dd99c15ff14d0188f44ac883010e172ce86c38a"}, + {file = "mypy-1.10.0-py3-none-any.whl", hash = "sha256:f8c083976eb530019175aabadb60921e73b4f45736760826aa1689dda8208aee"}, + {file = "mypy-1.10.0.tar.gz", hash = "sha256:3d087fcbec056c4ee34974da493a826ce316947485cef3901f511848e687c131"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.1.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nbclient" +version = "0.10.0" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"}, + {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"}, +] + +[package.dependencies] +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +nbformat = ">=5.1" +traitlets = ">=5.4" + +[package.extras] +dev = ["pre-commit"] +docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"] +test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"] + +[[package]] +name = "nbconvert" +version = "7.16.4" +description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)." +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"}, + {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +bleach = "!=5.0.0" +defusedxml = "*" +jinja2 = ">=3.0" +jupyter-core = ">=4.7" +jupyterlab-pygments = "*" +markupsafe = ">=2.0" +mistune = ">=2.0.3,<4" +nbclient = ">=0.5.0" +nbformat = ">=5.7" +packaging = "*" +pandocfilters = ">=1.4.1" +pygments = ">=2.4.1" +tinycss2 = "*" +traitlets = ">=5.1" + +[package.extras] +all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"] +docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"] +qtpdf = ["pyqtwebengine (>=5.15)"] +qtpng = ["pyqtwebengine (>=5.15)"] +serve = ["tornado (>=6.1)"] +test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"] +webpdf = ["playwright"] + +[[package]] +name = "nbformat" +version = "5.10.4" +description = "The Jupyter Notebook format" +optional = false +python-versions = ">=3.8" +files = [ + {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"}, + {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"}, +] + +[package.dependencies] +fastjsonschema = ">=2.15" +jsonschema = ">=2.6" +jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0" +traitlets = ">=5.1" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["pep440", "pre-commit", "pytest", "testpath"] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +description = "Patch asyncio to allow nested event loops" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + +[[package]] +name = "notebook" +version = "7.1.3" +description = "Jupyter Notebook - A web-based notebook environment for interactive computing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "notebook-7.1.3-py3-none-any.whl", hash = "sha256:919b911e59f41f6e3857ce93c9d93535ba66bb090059712770e5968c07e1004d"}, + {file = "notebook-7.1.3.tar.gz", hash = "sha256:41fcebff44cf7bb9377180808bcbae066629b55d8c7722f1ebbe75ca44f9cfc1"}, +] + +[package.dependencies] +jupyter-server = ">=2.4.0,<3" +jupyterlab = ">=4.1.1,<4.2" +jupyterlab-server = ">=2.22.1,<3" +notebook-shim = ">=0.2,<0.3" +tornado = ">=6.2.0" + +[package.extras] +dev = ["hatch", "pre-commit"] +docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.22.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"] + +[[package]] +name = "notebook-shim" +version = "0.2.4" +description = "A shim layer for notebook traits and config" +optional = false +python-versions = ">=3.7" +files = [ + {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"}, + {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"}, +] + +[package.dependencies] +jupyter-server = ">=1.8,<3" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"] + +[[package]] +name = "numpy" +version = "1.26.4" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, + {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, + {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, + {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, + {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, + {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, +] + +[[package]] +name = "openai" +version = "1.25.0" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-1.25.0-py3-none-any.whl", hash = "sha256:d0cfdf6afb31a5dabf3b95966cb31f3c757a0edaf3228715409cb404b9933de0"}, + {file = "openai-1.25.0.tar.gz", hash = "sha256:22c35b26b8281cd2759b1a4c05ac99e2f2b26a9df71f90a0b4ddb75aa27adc81"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.7,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] + +[[package]] +name = "overrides" +version = "7.7.0" +description = "A decorator to automatically detect mismatch when overriding a method." +optional = false +python-versions = ">=3.6" +files = [ + {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"}, + {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"}, +] + +[[package]] +name = "packaging" +version = "24.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, +] + +[[package]] +name = "pandocfilters" +version = "1.5.1" +description = "Utilities for writing pandoc filters in python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"}, + {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"}, +] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "platformdirs" +version = "4.2.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, + {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prometheus-client" +version = "0.20.0" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = ">=3.8" +files = [ + {file = "prometheus_client-0.20.0-py3-none-any.whl", hash = "sha256:cde524a85bce83ca359cc837f28b8c0db5cac7aa653a588fd7e84ba061c329e7"}, + {file = "prometheus_client-0.20.0.tar.gz", hash = "sha256:287629d00b147a32dcb2be0b9df905da599b2d82f80377083ec8463309a4bb89"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.43" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.43-py3-none-any.whl", hash = "sha256:a11a29cb3bf0a28a387fe5122cdb649816a957cd9261dcedf8c9f1fef33eacf6"}, + {file = "prompt_toolkit-3.0.43.tar.gz", hash = "sha256:3527b7af26106cbc65a040bcc84839a3566ec1b051bb0bfe953631e704b0ff7d"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "4.25.3" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"}, + {file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"}, + {file = "protobuf-4.25.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f1279ab38ecbfae7e456a108c5c0681e4956d5b1090027c1de0f934dfdb4b35c"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:e7cb0ae90dd83727f0c0718634ed56837bfeeee29a5f82a7514c03ee1364c019"}, + {file = "protobuf-4.25.3-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:7c8daa26095f82482307bc717364e7c13f4f1c99659be82890dcfc215194554d"}, + {file = "protobuf-4.25.3-cp38-cp38-win32.whl", hash = "sha256:f4f118245c4a087776e0a8408be33cf09f6c547442c00395fbfb116fac2f8ac2"}, + {file = "protobuf-4.25.3-cp38-cp38-win_amd64.whl", hash = "sha256:c053062984e61144385022e53678fbded7aea14ebb3e0305ae3592fb219ccfa4"}, + {file = "protobuf-4.25.3-cp39-cp39-win32.whl", hash = "sha256:19b270aeaa0099f16d3ca02628546b8baefe2955bbe23224aaf856134eccf1e4"}, + {file = "protobuf-4.25.3-cp39-cp39-win_amd64.whl", hash = "sha256:e3c97a1555fd6388f857770ff8b9703083de6bf1f9274a002a332d65fbb56c8c"}, + {file = "protobuf-4.25.3-py3-none-any.whl", hash = "sha256:f0700d54bcf45424477e46a9f0944155b46fb0639d69728739c0e47bab83f2b9"}, + {file = "protobuf-4.25.3.tar.gz", hash = "sha256:25b5d0b42fd000320bd7830b349e3b696435f3b329810427a6bcce6a5492cc5c"}, +] + +[[package]] +name = "psutil" +version = "5.9.8" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, + {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, + {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, + {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, + {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, + {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, + {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, + {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, + {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, + {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, + {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, + {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, + {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pyasn1" +version = "0.6.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, + {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, + {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.7.0" + +[[package]] +name = "pycodestyle" +version = "2.11.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.7.1" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.7.1-py3-none-any.whl", hash = "sha256:e029badca45266732a9a79898a15ae2e8b14840b1eabbb25844be28f0b33f3d5"}, + {file = "pydantic-2.7.1.tar.gz", hash = "sha256:e9dbb5eada8abe4d9ae5f46b9939aead650cd2b68f249bb3a8139dbe125803cc"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.18.2" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.18.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.18.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9e08e867b306f525802df7cd16c44ff5ebbe747ff0ca6cf3fde7f36c05a59a81"}, + {file = "pydantic_core-2.18.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0a21cbaa69900cbe1a2e7cad2aa74ac3cf21b10c3efb0fa0b80305274c0e8a2"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0680b1f1f11fda801397de52c36ce38ef1c1dc841a0927a94f226dea29c3ae3d"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:95b9d5e72481d3780ba3442eac863eae92ae43a5f3adb5b4d0a1de89d42bb250"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fcf5cd9c4b655ad666ca332b9a081112cd7a58a8b5a6ca7a3104bc950f2038"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b5155ff768083cb1d62f3e143b49a8a3432e6789a3abee8acd005c3c7af1c74"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:553ef617b6836fc7e4df130bb851e32fe357ce36336d897fd6646d6058d980af"}, + {file = "pydantic_core-2.18.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89ed9eb7d616ef5714e5590e6cf7f23b02d0d539767d33561e3675d6f9e3857"}, + {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:75f7e9488238e920ab6204399ded280dc4c307d034f3924cd7f90a38b1829563"}, + {file = "pydantic_core-2.18.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ef26c9e94a8c04a1b2924149a9cb081836913818e55681722d7f29af88fe7b38"}, + {file = "pydantic_core-2.18.2-cp310-none-win32.whl", hash = "sha256:182245ff6b0039e82b6bb585ed55a64d7c81c560715d1bad0cbad6dfa07b4027"}, + {file = "pydantic_core-2.18.2-cp310-none-win_amd64.whl", hash = "sha256:e23ec367a948b6d812301afc1b13f8094ab7b2c280af66ef450efc357d2ae543"}, + {file = "pydantic_core-2.18.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:219da3f096d50a157f33645a1cf31c0ad1fe829a92181dd1311022f986e5fbe3"}, + {file = "pydantic_core-2.18.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:cc1cfd88a64e012b74e94cd00bbe0f9c6df57049c97f02bb07d39e9c852e19a4"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b7133a6e6aeb8df37d6f413f7705a37ab4031597f64ab56384c94d98fa0e90"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:224c421235f6102e8737032483f43c1a8cfb1d2f45740c44166219599358c2cd"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b14d82cdb934e99dda6d9d60dc84a24379820176cc4a0d123f88df319ae9c150"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2728b01246a3bba6de144f9e3115b532ee44bd6cf39795194fb75491824a1413"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:470b94480bb5ee929f5acba6995251ada5e059a5ef3e0dfc63cca287283ebfa6"}, + {file = "pydantic_core-2.18.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:997abc4df705d1295a42f95b4eec4950a37ad8ae46d913caeee117b6b198811c"}, + {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:75250dbc5290e3f1a0f4618db35e51a165186f9034eff158f3d490b3fed9f8a0"}, + {file = "pydantic_core-2.18.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4456f2dca97c425231d7315737d45239b2b51a50dc2b6f0c2bb181fce6207664"}, + {file = "pydantic_core-2.18.2-cp311-none-win32.whl", hash = "sha256:269322dcc3d8bdb69f054681edff86276b2ff972447863cf34c8b860f5188e2e"}, + {file = "pydantic_core-2.18.2-cp311-none-win_amd64.whl", hash = "sha256:800d60565aec896f25bc3cfa56d2277d52d5182af08162f7954f938c06dc4ee3"}, + {file = "pydantic_core-2.18.2-cp311-none-win_arm64.whl", hash = "sha256:1404c69d6a676245199767ba4f633cce5f4ad4181f9d0ccb0577e1f66cf4c46d"}, + {file = "pydantic_core-2.18.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:fb2bd7be70c0fe4dfd32c951bc813d9fe6ebcbfdd15a07527796c8204bd36242"}, + {file = "pydantic_core-2.18.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6132dd3bd52838acddca05a72aafb6eab6536aa145e923bb50f45e78b7251043"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d904828195733c183d20a54230c0df0eb46ec746ea1a666730787353e87182"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9bd70772c720142be1020eac55f8143a34ec9f82d75a8e7a07852023e46617f"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8ed04b3582771764538f7ee7001b02e1170223cf9b75dff0bc698fadb00cf3"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e6dac87ddb34aaec85f873d737e9d06a3555a1cc1a8e0c44b7f8d5daeb89d86f"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ca4ae5a27ad7a4ee5170aebce1574b375de390bc01284f87b18d43a3984df72"}, + {file = "pydantic_core-2.18.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:886eec03591b7cf058467a70a87733b35f44707bd86cf64a615584fd72488b7c"}, + {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ca7b0c1f1c983e064caa85f3792dd2fe3526b3505378874afa84baf662e12241"}, + {file = "pydantic_core-2.18.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4b4356d3538c3649337df4074e81b85f0616b79731fe22dd11b99499b2ebbdf3"}, + {file = "pydantic_core-2.18.2-cp312-none-win32.whl", hash = "sha256:8b172601454f2d7701121bbec3425dd71efcb787a027edf49724c9cefc14c038"}, + {file = "pydantic_core-2.18.2-cp312-none-win_amd64.whl", hash = "sha256:b1bd7e47b1558ea872bd16c8502c414f9e90dcf12f1395129d7bb42a09a95438"}, + {file = "pydantic_core-2.18.2-cp312-none-win_arm64.whl", hash = "sha256:98758d627ff397e752bc339272c14c98199c613f922d4a384ddc07526c86a2ec"}, + {file = "pydantic_core-2.18.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9fdad8e35f278b2c3eb77cbdc5c0a49dada440657bf738d6905ce106dc1de439"}, + {file = "pydantic_core-2.18.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1d90c3265ae107f91a4f279f4d6f6f1d4907ac76c6868b27dc7fb33688cfb347"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:390193c770399861d8df9670fb0d1874f330c79caaca4642332df7c682bf6b91"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:82d5d4d78e4448683cb467897fe24e2b74bb7b973a541ea1dcfec1d3cbce39fb"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4774f3184d2ef3e14e8693194f661dea5a4d6ca4e3dc8e39786d33a94865cefd"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4d938ec0adf5167cb335acb25a4ee69a8107e4984f8fbd2e897021d9e4ca21b"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0e8b1be28239fc64a88a8189d1df7fad8be8c1ae47fcc33e43d4be15f99cc70"}, + {file = "pydantic_core-2.18.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868649da93e5a3d5eacc2b5b3b9235c98ccdbfd443832f31e075f54419e1b96b"}, + {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:78363590ef93d5d226ba21a90a03ea89a20738ee5b7da83d771d283fd8a56761"}, + {file = "pydantic_core-2.18.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:852e966fbd035a6468fc0a3496589b45e2208ec7ca95c26470a54daed82a0788"}, + {file = "pydantic_core-2.18.2-cp38-none-win32.whl", hash = "sha256:6a46e22a707e7ad4484ac9ee9f290f9d501df45954184e23fc29408dfad61350"}, + {file = "pydantic_core-2.18.2-cp38-none-win_amd64.whl", hash = "sha256:d91cb5ea8b11607cc757675051f61b3d93f15eca3cefb3e6c704a5d6e8440f4e"}, + {file = "pydantic_core-2.18.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:ae0a8a797a5e56c053610fa7be147993fe50960fa43609ff2a9552b0e07013e8"}, + {file = "pydantic_core-2.18.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:042473b6280246b1dbf530559246f6842b56119c2926d1e52b631bdc46075f2a"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a388a77e629b9ec814c1b1e6b3b595fe521d2cdc625fcca26fbc2d44c816804"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25add29b8f3b233ae90ccef2d902d0ae0432eb0d45370fe315d1a5cf231004b"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f459a5ce8434614dfd39bbebf1041952ae01da6bed9855008cb33b875cb024c0"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eff2de745698eb46eeb51193a9f41d67d834d50e424aef27df2fcdee1b153845"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8309f67285bdfe65c372ea3722b7a5642680f3dba538566340a9d36e920b5f0"}, + {file = "pydantic_core-2.18.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f93a8a2e3938ff656a7c1bc57193b1319960ac015b6e87d76c76bf14fe0244b4"}, + {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:22057013c8c1e272eb8d0eebc796701167d8377441ec894a8fed1af64a0bf399"}, + {file = "pydantic_core-2.18.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cfeecd1ac6cc1fb2692c3d5110781c965aabd4ec5d32799773ca7b1456ac636b"}, + {file = "pydantic_core-2.18.2-cp39-none-win32.whl", hash = "sha256:0d69b4c2f6bb3e130dba60d34c0845ba31b69babdd3f78f7c0c8fae5021a253e"}, + {file = "pydantic_core-2.18.2-cp39-none-win_amd64.whl", hash = "sha256:d9319e499827271b09b4e411905b24a426b8fb69464dfa1696258f53a3334641"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a1874c6dd4113308bd0eb568418e6114b252afe44319ead2b4081e9b9521fe75"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:ccdd111c03bfd3666bd2472b674c6899550e09e9f298954cfc896ab92b5b0e6d"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e18609ceaa6eed63753037fc06ebb16041d17d28199ae5aba0052c51449650a9"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e5c584d357c4e2baf0ff7baf44f4994be121e16a2c88918a5817331fc7599d7"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43f0f463cf89ace478de71a318b1b4f05ebc456a9b9300d027b4b57c1a2064fb"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e1b395e58b10b73b07b7cf740d728dd4ff9365ac46c18751bf8b3d8cca8f625a"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0098300eebb1c837271d3d1a2cd2911e7c11b396eac9661655ee524a7f10587b"}, + {file = "pydantic_core-2.18.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:36789b70d613fbac0a25bb07ab3d9dba4d2e38af609c020cf4d888d165ee0bf3"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f9a801e7c8f1ef8718da265bba008fa121243dfe37c1cea17840b0944dfd72c"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:3a6515ebc6e69d85502b4951d89131ca4e036078ea35533bb76327f8424531ce"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aca1e2298c56ececfd8ed159ae4dde2df0781988c97ef77d5c16ff4bd5b400"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:223ee893d77a310a0391dca6df00f70bbc2f36a71a895cecd9a0e762dc37b349"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2334ce8c673ee93a1d6a65bd90327588387ba073c17e61bf19b4fd97d688d63c"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:cbca948f2d14b09d20268cda7b0367723d79063f26c4ffc523af9042cad95592"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b3ef08e20ec49e02d5c6717a91bb5af9b20f1805583cb0adfe9ba2c6b505b5ae"}, + {file = "pydantic_core-2.18.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6fdc8627910eed0c01aed6a390a252fe3ea6d472ee70fdde56273f198938374"}, + {file = "pydantic_core-2.18.2.tar.gz", hash = "sha256:2e29d20810dfc3043ee13ac7d9e25105799817683348823f305ab3f349b9386e"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyflakes" +version = "3.1.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyflakes-3.1.0-py2.py3-none-any.whl", hash = "sha256:4132f6d49cb4dae6819e5379898f2b8cce3c5f23994194c24b77d5da2e36f774"}, + {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, +] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyparsing" +version = "3.1.2" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyproject-flake8" +version = "6.1.0" +description = "pyproject-flake8 (`pflake8`), a monkey patching wrapper to connect flake8 with pyproject.toml configuration" +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "pyproject_flake8-6.1.0-py3-none-any.whl", hash = "sha256:86ea5559263c098e1aa4f866776aa2cf45362fd91a576b9fd8fbbbb55db12c4e"}, + {file = "pyproject_flake8-6.1.0.tar.gz", hash = "sha256:6da8e5a264395e0148bc11844c6fb50546f1fac83ac9210f7328664135f9e70f"}, +] + +[package.dependencies] +flake8 = "6.1.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} + +[[package]] +name = "pytest" +version = "8.2.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.2.0-py3-none-any.whl", hash = "sha256:1733f0620f6cda4095bbf0d9ff8022486e91892245bb9e7d5542c018f612f233"}, + {file = "pytest-8.2.0.tar.gz", hash = "sha256:d507d4482197eac0ba2bae2e9babf0672eb333017bcedaa5fb1a3d42c1174b3f"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.0.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + +[[package]] +name = "pywin32" +version = "306" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, + {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, + {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, + {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, + {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, + {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, + {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, + {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, + {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, + {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, + {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, + {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, + {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, + {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, +] + +[[package]] +name = "pywinpty" +version = "2.0.13" +description = "Pseudo terminal support for Windows from Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"}, + {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"}, + {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"}, + {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"}, + {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"}, + {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "pyzmq" +version = "26.0.3" +description = "Python bindings for 0MQ" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:44dd6fc3034f1eaa72ece33588867df9e006a7303725a12d64c3dff92330f625"}, + {file = "pyzmq-26.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:acb704195a71ac5ea5ecf2811c9ee19ecdc62b91878528302dd0be1b9451cc90"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dbb9c997932473a27afa93954bb77a9f9b786b4ccf718d903f35da3232317de"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bcb34f869d431799c3ee7d516554797f7760cb2198ecaa89c3f176f72d062be"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ece17ec5f20d7d9b442e5174ae9f020365d01ba7c112205a4d59cf19dc38ee"}, + {file = "pyzmq-26.0.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ba6e5e6588e49139a0979d03a7deb9c734bde647b9a8808f26acf9c547cab1bf"}, + {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3bf8b000a4e2967e6dfdd8656cd0757d18c7e5ce3d16339e550bd462f4857e59"}, + {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2136f64fbb86451dbbf70223635a468272dd20075f988a102bf8a3f194a411dc"}, + {file = "pyzmq-26.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8918973fbd34e7814f59143c5f600ecd38b8038161239fd1a3d33d5817a38b8"}, + {file = "pyzmq-26.0.3-cp310-cp310-win32.whl", hash = "sha256:0aaf982e68a7ac284377d051c742610220fd06d330dcd4c4dbb4cdd77c22a537"}, + {file = "pyzmq-26.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:f1a9b7d00fdf60b4039f4455afd031fe85ee8305b019334b72dcf73c567edc47"}, + {file = "pyzmq-26.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:80b12f25d805a919d53efc0a5ad7c0c0326f13b4eae981a5d7b7cc343318ebb7"}, + {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:a72a84570f84c374b4c287183debc776dc319d3e8ce6b6a0041ce2e400de3f32"}, + {file = "pyzmq-26.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7ca684ee649b55fd8f378127ac8462fb6c85f251c2fb027eb3c887e8ee347bcd"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e222562dc0f38571c8b1ffdae9d7adb866363134299264a1958d077800b193b7"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f17cde1db0754c35a91ac00b22b25c11da6eec5746431d6e5092f0cd31a3fea9"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b7c0c0b3244bb2275abe255d4a30c050d541c6cb18b870975553f1fb6f37527"}, + {file = "pyzmq-26.0.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:ac97a21de3712afe6a6c071abfad40a6224fd14fa6ff0ff8d0c6e6cd4e2f807a"}, + {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:88b88282e55fa39dd556d7fc04160bcf39dea015f78e0cecec8ff4f06c1fc2b5"}, + {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:72b67f966b57dbd18dcc7efbc1c7fc9f5f983e572db1877081f075004614fcdd"}, + {file = "pyzmq-26.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f4b6cecbbf3b7380f3b61de3a7b93cb721125dc125c854c14ddc91225ba52f83"}, + {file = "pyzmq-26.0.3-cp311-cp311-win32.whl", hash = "sha256:eed56b6a39216d31ff8cd2f1d048b5bf1700e4b32a01b14379c3b6dde9ce3aa3"}, + {file = "pyzmq-26.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:3191d312c73e3cfd0f0afdf51df8405aafeb0bad71e7ed8f68b24b63c4f36500"}, + {file = "pyzmq-26.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:b6907da3017ef55139cf0e417c5123a84c7332520e73a6902ff1f79046cd3b94"}, + {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:068ca17214038ae986d68f4a7021f97e187ed278ab6dccb79f837d765a54d753"}, + {file = "pyzmq-26.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7821d44fe07335bea256b9f1f41474a642ca55fa671dfd9f00af8d68a920c2d4"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eeb438a26d87c123bb318e5f2b3d86a36060b01f22fbdffd8cf247d52f7c9a2b"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69ea9d6d9baa25a4dc9cef5e2b77b8537827b122214f210dd925132e34ae9b12"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7daa3e1369355766dea11f1d8ef829905c3b9da886ea3152788dc25ee6079e02"}, + {file = "pyzmq-26.0.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:6ca7a9a06b52d0e38ccf6bca1aeff7be178917893f3883f37b75589d42c4ac20"}, + {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1b7d0e124948daa4d9686d421ef5087c0516bc6179fdcf8828b8444f8e461a77"}, + {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e746524418b70f38550f2190eeee834db8850088c834d4c8406fbb9bc1ae10b2"}, + {file = "pyzmq-26.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6b3146f9ae6af82c47a5282ac8803523d381b3b21caeae0327ed2f7ecb718798"}, + {file = "pyzmq-26.0.3-cp312-cp312-win32.whl", hash = "sha256:2b291d1230845871c00c8462c50565a9cd6026fe1228e77ca934470bb7d70ea0"}, + {file = "pyzmq-26.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:926838a535c2c1ea21c903f909a9a54e675c2126728c21381a94ddf37c3cbddf"}, + {file = "pyzmq-26.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:5bf6c237f8c681dfb91b17f8435b2735951f0d1fad10cc5dfd96db110243370b"}, + {file = "pyzmq-26.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c0991f5a96a8e620f7691e61178cd8f457b49e17b7d9cfa2067e2a0a89fc1d5"}, + {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dbf012d8fcb9f2cf0643b65df3b355fdd74fc0035d70bb5c845e9e30a3a4654b"}, + {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:01fbfbeb8249a68d257f601deb50c70c929dc2dfe683b754659569e502fbd3aa"}, + {file = "pyzmq-26.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c8eb19abe87029c18f226d42b8a2c9efdd139d08f8bf6e085dd9075446db450"}, + {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5344b896e79800af86ad643408ca9aa303a017f6ebff8cee5a3163c1e9aec987"}, + {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:204e0f176fd1d067671157d049466869b3ae1fc51e354708b0dc41cf94e23a3a"}, + {file = "pyzmq-26.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a42db008d58530efa3b881eeee4991146de0b790e095f7ae43ba5cc612decbc5"}, + {file = "pyzmq-26.0.3-cp37-cp37m-win32.whl", hash = "sha256:8d7a498671ca87e32b54cb47c82a92b40130a26c5197d392720a1bce1b3c77cf"}, + {file = "pyzmq-26.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:3b4032a96410bdc760061b14ed6a33613ffb7f702181ba999df5d16fb96ba16a"}, + {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2cc4e280098c1b192c42a849de8de2c8e0f3a84086a76ec5b07bfee29bda7d18"}, + {file = "pyzmq-26.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5bde86a2ed3ce587fa2b207424ce15b9a83a9fa14422dcc1c5356a13aed3df9d"}, + {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:34106f68e20e6ff253c9f596ea50397dbd8699828d55e8fa18bd4323d8d966e6"}, + {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ebbbd0e728af5db9b04e56389e2299a57ea8b9dd15c9759153ee2455b32be6ad"}, + {file = "pyzmq-26.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6b1d1c631e5940cac5a0b22c5379c86e8df6a4ec277c7a856b714021ab6cfad"}, + {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e891ce81edd463b3b4c3b885c5603c00141151dd9c6936d98a680c8c72fe5c67"}, + {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9b273ecfbc590a1b98f014ae41e5cf723932f3b53ba9367cfb676f838038b32c"}, + {file = "pyzmq-26.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b32bff85fb02a75ea0b68f21e2412255b5731f3f389ed9aecc13a6752f58ac97"}, + {file = "pyzmq-26.0.3-cp38-cp38-win32.whl", hash = "sha256:f6c21c00478a7bea93caaaef9e7629145d4153b15a8653e8bb4609d4bc70dbfc"}, + {file = "pyzmq-26.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:3401613148d93ef0fd9aabdbddb212de3db7a4475367f49f590c837355343972"}, + {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:2ed8357f4c6e0daa4f3baf31832df8a33334e0fe5b020a61bc8b345a3db7a606"}, + {file = "pyzmq-26.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1c8f2a2ca45292084c75bb6d3a25545cff0ed931ed228d3a1810ae3758f975f"}, + {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b63731993cdddcc8e087c64e9cf003f909262b359110070183d7f3025d1c56b5"}, + {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b3cd31f859b662ac5d7f4226ec7d8bd60384fa037fc02aee6ff0b53ba29a3ba8"}, + {file = "pyzmq-26.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:115f8359402fa527cf47708d6f8a0f8234f0e9ca0cab7c18c9c189c194dbf620"}, + {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:715bdf952b9533ba13dfcf1f431a8f49e63cecc31d91d007bc1deb914f47d0e4"}, + {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e1258c639e00bf5e8a522fec6c3eaa3e30cf1c23a2f21a586be7e04d50c9acab"}, + {file = "pyzmq-26.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:15c59e780be8f30a60816a9adab900c12a58d79c1ac742b4a8df044ab2a6d920"}, + {file = "pyzmq-26.0.3-cp39-cp39-win32.whl", hash = "sha256:d0cdde3c78d8ab5b46595054e5def32a755fc028685add5ddc7403e9f6de9879"}, + {file = "pyzmq-26.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:ce828058d482ef860746bf532822842e0ff484e27f540ef5c813d516dd8896d2"}, + {file = "pyzmq-26.0.3-cp39-cp39-win_arm64.whl", hash = "sha256:788f15721c64109cf720791714dc14afd0f449d63f3a5487724f024345067381"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2c18645ef6294d99b256806e34653e86236eb266278c8ec8112622b61db255de"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e6bc96ebe49604df3ec2c6389cc3876cabe475e6bfc84ced1bf4e630662cb35"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:971e8990c5cc4ddcff26e149398fc7b0f6a042306e82500f5e8db3b10ce69f84"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8416c23161abd94cc7da80c734ad7c9f5dbebdadfdaa77dad78244457448223"}, + {file = "pyzmq-26.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:082a2988364b60bb5de809373098361cf1dbb239623e39e46cb18bc035ed9c0c"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d57dfbf9737763b3a60d26e6800e02e04284926329aee8fb01049635e957fe81"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:77a85dca4c2430ac04dc2a2185c2deb3858a34fe7f403d0a946fa56970cf60a1"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c82a6d952a1d555bf4be42b6532927d2a5686dd3c3e280e5f63225ab47ac1f5"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4496b1282c70c442809fc1b151977c3d967bfb33e4e17cedbf226d97de18f709"}, + {file = "pyzmq-26.0.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:e4946d6bdb7ba972dfda282f9127e5756d4f299028b1566d1245fa0d438847e6"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03c0ae165e700364b266876d712acb1ac02693acd920afa67da2ebb91a0b3c09"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:3e3070e680f79887d60feeda051a58d0ac36622e1759f305a41059eff62c6da7"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6ca08b840fe95d1c2bd9ab92dac5685f949fc6f9ae820ec16193e5ddf603c3b2"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e76654e9dbfb835b3518f9938e565c7806976c07b37c33526b574cc1a1050480"}, + {file = "pyzmq-26.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:871587bdadd1075b112e697173e946a07d722459d20716ceb3d1bd6c64bd08ce"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d0a2d1bd63a4ad79483049b26514e70fa618ce6115220da9efdff63688808b17"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0270b49b6847f0d106d64b5086e9ad5dc8a902413b5dbbb15d12b60f9c1747a4"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:703c60b9910488d3d0954ca585c34f541e506a091a41930e663a098d3b794c67"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74423631b6be371edfbf7eabb02ab995c2563fee60a80a30829176842e71722a"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4adfbb5451196842a88fda3612e2c0414134874bffb1c2ce83ab4242ec9e027d"}, + {file = "pyzmq-26.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3516119f4f9b8671083a70b6afaa0a070f5683e431ab3dc26e9215620d7ca1ad"}, + {file = "pyzmq-26.0.3.tar.gz", hash = "sha256:dba7d9f2e047dfa2bca3b01f4f84aa5246725203d6284e3790f2ca15fba6b40a"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "qtconsole" +version = "5.5.1" +description = "Jupyter Qt console" +optional = false +python-versions = ">= 3.8" +files = [ + {file = "qtconsole-5.5.1-py3-none-any.whl", hash = "sha256:8c75fa3e9b4ed884880ff7cea90a1b67451219279ec33deaee1d59e3df1a5d2b"}, + {file = "qtconsole-5.5.1.tar.gz", hash = "sha256:a0e806c6951db9490628e4df80caec9669b65149c7ba40f9bf033c025a5b56bc"}, +] + +[package.dependencies] +ipykernel = ">=4.1" +jupyter-client = ">=4.1" +jupyter-core = "*" +packaging = "*" +pygments = "*" +pyzmq = ">=17.1" +qtpy = ">=2.4.0" +traitlets = "<5.2.1 || >5.2.1,<5.2.2 || >5.2.2" + +[package.extras] +doc = ["Sphinx (>=1.3)"] +test = ["flaky", "pytest", "pytest-qt"] + +[[package]] +name = "qtpy" +version = "2.4.1" +description = "Provides an abstraction layer on top of the various Qt bindings (PyQt5/6 and PySide2/6)." +optional = false +python-versions = ">=3.7" +files = [ + {file = "QtPy-2.4.1-py3-none-any.whl", hash = "sha256:1c1d8c4fa2c884ae742b069151b0abe15b3f70491f3972698c683b8e38de839b"}, + {file = "QtPy-2.4.1.tar.gz", hash = "sha256:a5a15ffd519550a1361bdc56ffc07fda56a6af7292f17c7b395d4083af632987"}, +] + +[package.dependencies] +packaging = "*" + +[package.extras] +test = ["pytest (>=6,!=7.0.0,!=7.0.1)", "pytest-cov (>=3.0.0)", "pytest-qt"] + +[[package]] +name = "referencing" +version = "0.35.1" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, + {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, +] + +[package.dependencies] +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" + +[[package]] +name = "regex" +version = "2024.5.15" +description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=3.8" +files = [ + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"}, + {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"}, + {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"}, + {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"}, + {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"}, + {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"}, + {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"}, + {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"}, + {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"}, + {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"}, + {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"}, + {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"}, + {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"}, + {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"}, + {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"}, + {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"}, + {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"}, + {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"}, + {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"}, + {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"}, + {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"}, + {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"}, + {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"}, + {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"}, + {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"}, + {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"}, + {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3986-validator" +version = "0.1.1" +description = "Pure python rfc3986 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"}, + {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"}, +] + +[[package]] +name = "rpds-py" +version = "0.18.0" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.18.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:5b4e7d8d6c9b2e8ee2d55c90b59c707ca59bc30058269b3db7b1f8df5763557e"}, + {file = "rpds_py-0.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c463ed05f9dfb9baebef68048aed8dcdc94411e4bf3d33a39ba97e271624f8f7"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e36a39af54a30f28b73096dd39b6802eddd04c90dbe161c1b8dbe22353189f"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d62dec4976954a23d7f91f2f4530852b0c7608116c257833922a896101336c51"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd18772815d5f008fa03d2b9a681ae38d5ae9f0e599f7dda233c439fcaa00d40"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:923d39efa3cfb7279a0327e337a7958bff00cc447fd07a25cddb0a1cc9a6d2da"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39514da80f971362f9267c600b6d459bfbbc549cffc2cef8e47474fddc9b45b1"}, + {file = "rpds_py-0.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a34d557a42aa28bd5c48a023c570219ba2593bcbbb8dc1b98d8cf5d529ab1434"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:93df1de2f7f7239dc9cc5a4a12408ee1598725036bd2dedadc14d94525192fc3"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:34b18ba135c687f4dac449aa5157d36e2cbb7c03cbea4ddbd88604e076aa836e"}, + {file = "rpds_py-0.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c0b5dcf9193625afd8ecc92312d6ed78781c46ecbf39af9ad4681fc9f464af88"}, + {file = "rpds_py-0.18.0-cp310-none-win32.whl", hash = "sha256:c4325ff0442a12113a6379af66978c3fe562f846763287ef66bdc1d57925d337"}, + {file = "rpds_py-0.18.0-cp310-none-win_amd64.whl", hash = "sha256:7223a2a5fe0d217e60a60cdae28d6949140dde9c3bcc714063c5b463065e3d66"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3a96e0c6a41dcdba3a0a581bbf6c44bb863f27c541547fb4b9711fd8cf0ffad4"}, + {file = "rpds_py-0.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30f43887bbae0d49113cbaab729a112251a940e9b274536613097ab8b4899cf6"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fcb25daa9219b4cf3a0ab24b0eb9a5cc8949ed4dc72acb8fa16b7e1681aa3c58"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d68c93e381010662ab873fea609bf6c0f428b6d0bb00f2c6939782e0818d37bf"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b34b7aa8b261c1dbf7720b5d6f01f38243e9b9daf7e6b8bc1fd4657000062f2c"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e6d75ab12b0bbab7215e5d40f1e5b738aa539598db27ef83b2ec46747df90e1"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8612cd233543a3781bc659c731b9d607de65890085098986dfd573fc2befe5"}, + {file = "rpds_py-0.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aec493917dd45e3c69d00a8874e7cbed844efd935595ef78a0f25f14312e33c6"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:661d25cbffaf8cc42e971dd570d87cb29a665f49f4abe1f9e76be9a5182c4688"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1df3659d26f539ac74fb3b0c481cdf9d725386e3552c6fa2974f4d33d78e544b"}, + {file = "rpds_py-0.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a1ce3ba137ed54f83e56fb983a5859a27d43a40188ba798993812fed73c70836"}, + {file = "rpds_py-0.18.0-cp311-none-win32.whl", hash = "sha256:69e64831e22a6b377772e7fb337533c365085b31619005802a79242fee620bc1"}, + {file = "rpds_py-0.18.0-cp311-none-win_amd64.whl", hash = "sha256:998e33ad22dc7ec7e030b3df701c43630b5bc0d8fbc2267653577e3fec279afa"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7f2facbd386dd60cbbf1a794181e6aa0bd429bd78bfdf775436020172e2a23f0"}, + {file = "rpds_py-0.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1d9a5be316c15ffb2b3c405c4ff14448c36b4435be062a7f578ccd8b01f0c4d8"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd5bf1af8efe569654bbef5a3e0a56eca45f87cfcffab31dd8dde70da5982475"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5417558f6887e9b6b65b4527232553c139b57ec42c64570569b155262ac0754f"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56a737287efecafc16f6d067c2ea0117abadcd078d58721f967952db329a3e5c"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f03bccbd8586e9dd37219bce4d4e0d3ab492e6b3b533e973fa08a112cb2ffc9"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4457a94da0d5c53dc4b3e4de1158bdab077db23c53232f37a3cb7afdb053a4e3"}, + {file = "rpds_py-0.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0ab39c1ba9023914297dd88ec3b3b3c3f33671baeb6acf82ad7ce883f6e8e157"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9d54553c1136b50fd12cc17e5b11ad07374c316df307e4cfd6441bea5fb68496"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0af039631b6de0397ab2ba16eaf2872e9f8fca391b44d3d8cac317860a700a3f"}, + {file = "rpds_py-0.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:84ffab12db93b5f6bad84c712c92060a2d321b35c3c9960b43d08d0f639d60d7"}, + {file = "rpds_py-0.18.0-cp312-none-win32.whl", hash = "sha256:685537e07897f173abcf67258bee3c05c374fa6fff89d4c7e42fb391b0605e98"}, + {file = "rpds_py-0.18.0-cp312-none-win_amd64.whl", hash = "sha256:e003b002ec72c8d5a3e3da2989c7d6065b47d9eaa70cd8808b5384fbb970f4ec"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:08f9ad53c3f31dfb4baa00da22f1e862900f45908383c062c27628754af2e88e"}, + {file = "rpds_py-0.18.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0013fe6b46aa496a6749c77e00a3eb07952832ad6166bd481c74bda0dcb6d58"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32a92116d4f2a80b629778280103d2a510a5b3f6314ceccd6e38006b5e92dcb"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e541ec6f2ec456934fd279a3120f856cd0aedd209fc3852eca563f81738f6861"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bed88b9a458e354014d662d47e7a5baafd7ff81c780fd91584a10d6ec842cb73"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2644e47de560eb7bd55c20fc59f6daa04682655c58d08185a9b95c1970fa1e07"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e8916ae4c720529e18afa0b879473049e95949bf97042e938530e072fde061d"}, + {file = "rpds_py-0.18.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:465a3eb5659338cf2a9243e50ad9b2296fa15061736d6e26240e713522b6235c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ea7d4a99f3b38c37eac212dbd6ec42b7a5ec51e2c74b5d3223e43c811609e65f"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:67071a6171e92b6da534b8ae326505f7c18022c6f19072a81dcf40db2638767c"}, + {file = "rpds_py-0.18.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:41ef53e7c58aa4ef281da975f62c258950f54b76ec8e45941e93a3d1d8580594"}, + {file = "rpds_py-0.18.0-cp38-none-win32.whl", hash = "sha256:fdea4952db2793c4ad0bdccd27c1d8fdd1423a92f04598bc39425bcc2b8ee46e"}, + {file = "rpds_py-0.18.0-cp38-none-win_amd64.whl", hash = "sha256:7cd863afe7336c62ec78d7d1349a2f34c007a3cc6c2369d667c65aeec412a5b1"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5307def11a35f5ae4581a0b658b0af8178c65c530e94893345bebf41cc139d33"}, + {file = "rpds_py-0.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:77f195baa60a54ef9d2de16fbbfd3ff8b04edc0c0140a761b56c267ac11aa467"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39f5441553f1c2aed4de4377178ad8ff8f9d733723d6c66d983d75341de265ab"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a00312dea9310d4cb7dbd7787e722d2e86a95c2db92fbd7d0155f97127bcb40"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f2fc11e8fe034ee3c34d316d0ad8808f45bc3b9ce5857ff29d513f3ff2923a1"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:586f8204935b9ec884500498ccc91aa869fc652c40c093bd9e1471fbcc25c022"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddc2f4dfd396c7bfa18e6ce371cba60e4cf9d2e5cdb71376aa2da264605b60b9"}, + {file = "rpds_py-0.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ddcba87675b6d509139d1b521e0c8250e967e63b5909a7e8f8944d0f90ff36f"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7bd339195d84439cbe5771546fe8a4e8a7a045417d8f9de9a368c434e42a721e"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d7c36232a90d4755b720fbd76739d8891732b18cf240a9c645d75f00639a9024"}, + {file = "rpds_py-0.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6b0817e34942b2ca527b0e9298373e7cc75f429e8da2055607f4931fded23e20"}, + {file = "rpds_py-0.18.0-cp39-none-win32.whl", hash = "sha256:99f70b740dc04d09e6b2699b675874367885217a2e9f782bdf5395632ac663b7"}, + {file = "rpds_py-0.18.0-cp39-none-win_amd64.whl", hash = "sha256:6ef687afab047554a2d366e112dd187b62d261d49eb79b77e386f94644363294"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ad36cfb355e24f1bd37cac88c112cd7730873f20fb0bdaf8ba59eedf8216079f"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:36b3ee798c58ace201289024b52788161e1ea133e4ac93fba7d49da5fec0ef9e"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8a2f084546cc59ea99fda8e070be2fd140c3092dc11524a71aa8f0f3d5a55ca"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e4461d0f003a0aa9be2bdd1b798a041f177189c1a0f7619fe8c95ad08d9a45d7"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8db715ebe3bb7d86d77ac1826f7d67ec11a70dbd2376b7cc214199360517b641"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:793968759cd0d96cac1e367afd70c235867831983f876a53389ad869b043c948"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66e6a3af5a75363d2c9a48b07cb27c4ea542938b1a2e93b15a503cdfa8490795"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ef0befbb5d79cf32d0266f5cff01545602344eda89480e1dd88aca964260b18"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d4acf42190d449d5e89654d5c1ed3a4f17925eec71f05e2a41414689cda02d1"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a5f446dd5055667aabaee78487f2b5ab72e244f9bc0b2ffebfeec79051679984"}, + {file = "rpds_py-0.18.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9dbbeb27f4e70bfd9eec1be5477517365afe05a9b2c441a0b21929ee61048124"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:22806714311a69fd0af9b35b7be97c18a0fc2826e6827dbb3a8c94eac6cf7eeb"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b34ae4636dfc4e76a438ab826a0d1eed2589ca7d9a1b2d5bb546978ac6485461"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c8370641f1a7f0e0669ddccca22f1da893cef7628396431eb445d46d893e5cd"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c8362467a0fdeccd47935f22c256bec5e6abe543bf0d66e3d3d57a8fb5731863"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11a8c85ef4a07a7638180bf04fe189d12757c696eb41f310d2426895356dcf05"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b316144e85316da2723f9d8dc75bada12fa58489a527091fa1d5a612643d1a0e"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf1ea2e34868f6fbf070e1af291c8180480310173de0b0c43fc38a02929fc0e3"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e546e768d08ad55b20b11dbb78a745151acbd938f8f00d0cfbabe8b0199b9880"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4901165d170a5fde6f589acb90a6b33629ad1ec976d4529e769c6f3d885e3e80"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:618a3d6cae6ef8ec88bb76dd80b83cfe415ad4f1d942ca2a903bf6b6ff97a2da"}, + {file = "rpds_py-0.18.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:ed4eb745efbff0a8e9587d22a84be94a5eb7d2d99c02dacf7bd0911713ed14dd"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c81e5f372cd0dc5dc4809553d34f832f60a46034a5f187756d9b90586c2c307"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:43fbac5f22e25bee1d482c97474f930a353542855f05c1161fd804c9dc74a09d"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d7faa6f14017c0b1e69f5e2c357b998731ea75a442ab3841c0dbbbfe902d2c4"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:08231ac30a842bd04daabc4d71fddd7e6d26189406d5a69535638e4dcb88fe76"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:044a3e61a7c2dafacae99d1e722cc2d4c05280790ec5a05031b3876809d89a5c"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f26b5bd1079acdb0c7a5645e350fe54d16b17bfc5e71f371c449383d3342e17"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:482103aed1dfe2f3b71a58eff35ba105289b8d862551ea576bd15479aba01f66"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1374f4129f9bcca53a1bba0bb86bf78325a0374577cf7e9e4cd046b1e6f20e24"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:635dc434ff724b178cb192c70016cc0ad25a275228f749ee0daf0eddbc8183b1"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:bc362ee4e314870a70f4ae88772d72d877246537d9f8cb8f7eacf10884862432"}, + {file = "rpds_py-0.18.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:4832d7d380477521a8c1644bbab6588dfedea5e30a7d967b5fb75977c45fd77f"}, + {file = "rpds_py-0.18.0.tar.gz", hash = "sha256:42821446ee7a76f5d9f71f9e33a4fb2ffd724bb3e7f93386150b61a43115788d"}, +] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "send2trash" +version = "1.8.3" +description = "Send file to trash natively under Mac OS X, Windows and Linux" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"}, + {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"}, +] + +[package.extras] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] +win32 = ["pywin32"] + +[[package]] +name = "shapely" +version = "2.0.4" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:011b77153906030b795791f2fdfa2d68f1a8d7e40bce78b029782ade3afe4f2f"}, + {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9831816a5d34d5170aa9ed32a64982c3d6f4332e7ecfe62dc97767e163cb0b17"}, + {file = "shapely-2.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5c4849916f71dc44e19ed370421518c0d86cf73b26e8656192fcfcda08218fbd"}, + {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841f93a0e31e4c64d62ea570d81c35de0f6cea224568b2430d832967536308e6"}, + {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b4431f522b277c79c34b65da128029a9955e4481462cbf7ebec23aab61fc58"}, + {file = "shapely-2.0.4-cp310-cp310-win32.whl", hash = "sha256:92a41d936f7d6743f343be265ace93b7c57f5b231e21b9605716f5a47c2879e7"}, + {file = "shapely-2.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:30982f79f21bb0ff7d7d4a4e531e3fcaa39b778584c2ce81a147f95be1cd58c9"}, + {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de0205cb21ad5ddaef607cda9a3191eadd1e7a62a756ea3a356369675230ac35"}, + {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7d56ce3e2a6a556b59a288771cf9d091470116867e578bebced8bfc4147fbfd7"}, + {file = "shapely-2.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:58b0ecc505bbe49a99551eea3f2e8a9b3b24b3edd2a4de1ac0dc17bc75c9ec07"}, + {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:790a168a808bd00ee42786b8ba883307c0e3684ebb292e0e20009588c426da47"}, + {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4310b5494271e18580d61022c0857eb85d30510d88606fa3b8314790df7f367d"}, + {file = "shapely-2.0.4-cp311-cp311-win32.whl", hash = "sha256:63f3a80daf4f867bd80f5c97fbe03314348ac1b3b70fb1c0ad255a69e3749879"}, + {file = "shapely-2.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:c52ed79f683f721b69a10fb9e3d940a468203f5054927215586c5d49a072de8d"}, + {file = "shapely-2.0.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5bbd974193e2cc274312da16b189b38f5f128410f3377721cadb76b1e8ca5328"}, + {file = "shapely-2.0.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:41388321a73ba1a84edd90d86ecc8bfed55e6a1e51882eafb019f45895ec0f65"}, + {file = "shapely-2.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0776c92d584f72f1e584d2e43cfc5542c2f3dd19d53f70df0900fda643f4bae6"}, + {file = "shapely-2.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c75c98380b1ede1cae9a252c6dc247e6279403fae38c77060a5e6186c95073ac"}, + {file = "shapely-2.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3e700abf4a37b7b8b90532fa6ed5c38a9bfc777098bc9fbae5ec8e618ac8f30"}, + {file = "shapely-2.0.4-cp312-cp312-win32.whl", hash = "sha256:4f2ab0faf8188b9f99e6a273b24b97662194160cc8ca17cf9d1fb6f18d7fb93f"}, + {file = "shapely-2.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:03152442d311a5e85ac73b39680dd64a9892fa42bb08fd83b3bab4fe6999bfa0"}, + {file = "shapely-2.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:994c244e004bc3cfbea96257b883c90a86e8cbd76e069718eb4c6b222a56f78b"}, + {file = "shapely-2.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05ffd6491e9e8958b742b0e2e7c346635033d0a5f1a0ea083547fcc854e5d5cf"}, + {file = "shapely-2.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fbdc1140a7d08faa748256438291394967aa54b40009f54e8d9825e75ef6113"}, + {file = "shapely-2.0.4-cp37-cp37m-win32.whl", hash = "sha256:5af4cd0d8cf2912bd95f33586600cac9c4b7c5053a036422b97cfe4728d2eb53"}, + {file = "shapely-2.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:464157509ce4efa5ff285c646a38b49f8c5ef8d4b340f722685b09bb033c5ccf"}, + {file = "shapely-2.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:489c19152ec1f0e5c5e525356bcbf7e532f311bff630c9b6bc2db6f04da6a8b9"}, + {file = "shapely-2.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b79bbd648664aa6f44ef018474ff958b6b296fed5c2d42db60078de3cffbc8aa"}, + {file = "shapely-2.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:674d7baf0015a6037d5758496d550fc1946f34bfc89c1bf247cabdc415d7747e"}, + {file = "shapely-2.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cd4ccecc5ea5abd06deeaab52fcdba372f649728050c6143cc405ee0c166679"}, + {file = "shapely-2.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb5cdcbbe3080181498931b52a91a21a781a35dcb859da741c0345c6402bf00c"}, + {file = "shapely-2.0.4-cp38-cp38-win32.whl", hash = "sha256:55a38dcd1cee2f298d8c2ebc60fc7d39f3b4535684a1e9e2f39a80ae88b0cea7"}, + {file = "shapely-2.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:ec555c9d0db12d7fd777ba3f8b75044c73e576c720a851667432fabb7057da6c"}, + {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9103abd1678cb1b5f7e8e1af565a652e036844166c91ec031eeb25c5ca8af0"}, + {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:263bcf0c24d7a57c80991e64ab57cba7a3906e31d2e21b455f493d4aab534aaa"}, + {file = "shapely-2.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddf4a9bfaac643e62702ed662afc36f6abed2a88a21270e891038f9a19bc08fc"}, + {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:485246fcdb93336105c29a5cfbff8a226949db37b7473c89caa26c9bae52a242"}, + {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8de4578e838a9409b5b134a18ee820730e507b2d21700c14b71a2b0757396acc"}, + {file = "shapely-2.0.4-cp39-cp39-win32.whl", hash = "sha256:9dab4c98acfb5fb85f5a20548b5c0abe9b163ad3525ee28822ffecb5c40e724c"}, + {file = "shapely-2.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:31c19a668b5a1eadab82ff070b5a260478ac6ddad3a5b62295095174a8d26398"}, + {file = "shapely-2.0.4.tar.gz", hash = "sha256:5dc736127fac70009b8d309a0eeb74f3e08979e530cf7017f2f507ef62e6cfb8"}, +] + +[package.dependencies] +numpy = ">=1.14,<3" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "soupsieve" +version = "2.5" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.8" +files = [ + {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, + {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "terminado" +version = "0.18.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +optional = false +python-versions = ">=3.8" +files = [ + {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"}, + {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"}, +] + +[package.dependencies] +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} +tornado = ">=6.1.0" + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"] +typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"] + +[[package]] +name = "tiktoken" +version = "0.7.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tiktoken-0.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485f3cc6aba7c6b6ce388ba634fbba656d9ee27f766216f45146beb4ac18b25f"}, + {file = "tiktoken-0.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e54be9a2cd2f6d6ffa3517b064983fb695c9a9d8aa7d574d1ef3c3f931a99225"}, + {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79383a6e2c654c6040e5f8506f3750db9ddd71b550c724e673203b4f6b4b4590"}, + {file = "tiktoken-0.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d4511c52caacf3c4981d1ae2df85908bd31853f33d30b345c8b6830763f769c"}, + {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:13c94efacdd3de9aff824a788353aa5749c0faee1fbe3816df365ea450b82311"}, + {file = "tiktoken-0.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8e58c7eb29d2ab35a7a8929cbeea60216a4ccdf42efa8974d8e176d50c9a3df5"}, + {file = "tiktoken-0.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:21a20c3bd1dd3e55b91c1331bf25f4af522c525e771691adbc9a69336fa7f702"}, + {file = "tiktoken-0.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:10c7674f81e6e350fcbed7c09a65bca9356eaab27fb2dac65a1e440f2bcfe30f"}, + {file = "tiktoken-0.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:084cec29713bc9d4189a937f8a35dbdfa785bd1235a34c1124fe2323821ee93f"}, + {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:811229fde1652fedcca7c6dfe76724d0908775b353556d8a71ed74d866f73f7b"}, + {file = "tiktoken-0.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b6e7dc2e7ad1b3757e8a24597415bafcfb454cebf9a33a01f2e6ba2e663992"}, + {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1063c5748be36344c7e18c7913c53e2cca116764c2080177e57d62c7ad4576d1"}, + {file = "tiktoken-0.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:20295d21419bfcca092644f7e2f2138ff947a6eb8cfc732c09cc7d76988d4a89"}, + {file = "tiktoken-0.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:959d993749b083acc57a317cbc643fb85c014d055b2119b739487288f4e5d1cb"}, + {file = "tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908"}, + {file = "tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410"}, + {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704"}, + {file = "tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350"}, + {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4"}, + {file = "tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97"}, + {file = "tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f"}, + {file = "tiktoken-0.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2398fecd38c921bcd68418675a6d155fad5f5e14c2e92fcf5fe566fa5485a858"}, + {file = "tiktoken-0.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f5f6afb52fb8a7ea1c811e435e4188f2bef81b5e0f7a8635cc79b0eef0193d6"}, + {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:861f9ee616766d736be4147abac500732b505bf7013cfaf019b85892637f235e"}, + {file = "tiktoken-0.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54031f95c6939f6b78122c0aa03a93273a96365103793a22e1793ee86da31685"}, + {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fffdcb319b614cf14f04d02a52e26b1d1ae14a570f90e9b55461a72672f7b13d"}, + {file = "tiktoken-0.7.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c72baaeaefa03ff9ba9688624143c858d1f6b755bb85d456d59e529e17234769"}, + {file = "tiktoken-0.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:131b8aeb043a8f112aad9f46011dced25d62629091e51d9dc1adbf4a1cc6aa98"}, + {file = "tiktoken-0.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cabc6dc77460df44ec5b879e68692c63551ae4fae7460dd4ff17181df75f1db7"}, + {file = "tiktoken-0.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8d57f29171255f74c0aeacd0651e29aa47dff6f070cb9f35ebc14c82278f3b25"}, + {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ee92776fdbb3efa02a83f968c19d4997a55c8e9ce7be821ceee04a1d1ee149c"}, + {file = "tiktoken-0.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e215292e99cb41fbc96988ef62ea63bb0ce1e15f2c147a61acc319f8b4cbe5bf"}, + {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a81bac94769cab437dd3ab0b8a4bc4e0f9cf6835bcaa88de71f39af1791727a"}, + {file = "tiktoken-0.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d6d73ea93e91d5ca771256dfc9d1d29f5a554b83821a1dc0891987636e0ae226"}, + {file = "tiktoken-0.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:2bcb28ddf79ffa424f171dfeef9a4daff61a94c631ca6813f43967cb263b83b9"}, + {file = "tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + +[[package]] +name = "tinycss2" +version = "1.3.0" +description = "A tiny CSS parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"}, + {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"}, +] + +[package.dependencies] +webencodings = ">=0.4" + +[package.extras] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["pytest", "ruff"] + +[[package]] +name = "tokenizers" +version = "0.19.1" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tokenizers-0.19.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:952078130b3d101e05ecfc7fc3640282d74ed26bcf691400f872563fca15ac97"}, + {file = "tokenizers-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:82c8b8063de6c0468f08e82c4e198763e7b97aabfe573fd4cf7b33930ca4df77"}, + {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f03727225feaf340ceeb7e00604825addef622d551cbd46b7b775ac834c1e1c4"}, + {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:453e4422efdfc9c6b6bf2eae00d5e323f263fff62b29a8c9cd526c5003f3f642"}, + {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02e81bf089ebf0e7f4df34fa0207519f07e66d8491d963618252f2e0729e0b46"}, + {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b07c538ba956843833fee1190cf769c60dc62e1cf934ed50d77d5502194d63b1"}, + {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28cab1582e0eec38b1f38c1c1fb2e56bce5dc180acb1724574fc5f47da2a4fe"}, + {file = "tokenizers-0.19.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b01afb7193d47439f091cd8f070a1ced347ad0f9144952a30a41836902fe09e"}, + {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7fb297edec6c6841ab2e4e8f357209519188e4a59b557ea4fafcf4691d1b4c98"}, + {file = "tokenizers-0.19.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e8a3dd055e515df7054378dc9d6fa8c8c34e1f32777fb9a01fea81496b3f9d3"}, + {file = "tokenizers-0.19.1-cp310-none-win32.whl", hash = "sha256:7ff898780a155ea053f5d934925f3902be2ed1f4d916461e1a93019cc7250837"}, + {file = "tokenizers-0.19.1-cp310-none-win_amd64.whl", hash = "sha256:bea6f9947e9419c2fda21ae6c32871e3d398cba549b93f4a65a2d369662d9403"}, + {file = "tokenizers-0.19.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5c88d1481f1882c2e53e6bb06491e474e420d9ac7bdff172610c4f9ad3898059"}, + {file = "tokenizers-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ddf672ed719b4ed82b51499100f5417d7d9f6fb05a65e232249268f35de5ed14"}, + {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:dadc509cc8a9fe460bd274c0e16ac4184d0958117cf026e0ea8b32b438171594"}, + {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfedf31824ca4915b511b03441784ff640378191918264268e6923da48104acc"}, + {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac11016d0a04aa6487b1513a3a36e7bee7eec0e5d30057c9c0408067345c48d2"}, + {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76951121890fea8330d3a0df9a954b3f2a37e3ec20e5b0530e9a0044ca2e11fe"}, + {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b342d2ce8fc8d00f376af068e3274e2e8649562e3bc6ae4a67784ded6b99428d"}, + {file = "tokenizers-0.19.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d16ff18907f4909dca9b076b9c2d899114dd6abceeb074eca0c93e2353f943aa"}, + {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:706a37cc5332f85f26efbe2bdc9ef8a9b372b77e4645331a405073e4b3a8c1c6"}, + {file = "tokenizers-0.19.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:16baac68651701364b0289979ecec728546133e8e8fe38f66fe48ad07996b88b"}, + {file = "tokenizers-0.19.1-cp311-none-win32.whl", hash = "sha256:9ed240c56b4403e22b9584ee37d87b8bfa14865134e3e1c3fb4b2c42fafd3256"}, + {file = "tokenizers-0.19.1-cp311-none-win_amd64.whl", hash = "sha256:ad57d59341710b94a7d9dbea13f5c1e7d76fd8d9bcd944a7a6ab0b0da6e0cc66"}, + {file = "tokenizers-0.19.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:621d670e1b1c281a1c9698ed89451395d318802ff88d1fc1accff0867a06f153"}, + {file = "tokenizers-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d924204a3dbe50b75630bd16f821ebda6a5f729928df30f582fb5aade90c818a"}, + {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4f3fefdc0446b1a1e6d81cd4c07088ac015665d2e812f6dbba4a06267d1a2c95"}, + {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9620b78e0b2d52ef07b0d428323fb34e8ea1219c5eac98c2596311f20f1f9266"}, + {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:04ce49e82d100594715ac1b2ce87d1a36e61891a91de774755f743babcd0dd52"}, + {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5c2ff13d157afe413bf7e25789879dd463e5a4abfb529a2d8f8473d8042e28f"}, + {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3174c76efd9d08f836bfccaca7cfec3f4d1c0a4cf3acbc7236ad577cc423c840"}, + {file = "tokenizers-0.19.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9d5b6c0e7a1e979bec10ff960fae925e947aab95619a6fdb4c1d8ff3708ce3"}, + {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a179856d1caee06577220ebcfa332af046d576fb73454b8f4d4b0ba8324423ea"}, + {file = "tokenizers-0.19.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:952b80dac1a6492170f8c2429bd11fcaa14377e097d12a1dbe0ef2fb2241e16c"}, + {file = "tokenizers-0.19.1-cp312-none-win32.whl", hash = "sha256:01d62812454c188306755c94755465505836fd616f75067abcae529c35edeb57"}, + {file = "tokenizers-0.19.1-cp312-none-win_amd64.whl", hash = "sha256:b70bfbe3a82d3e3fb2a5e9b22a39f8d1740c96c68b6ace0086b39074f08ab89a"}, + {file = "tokenizers-0.19.1-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:bb9dfe7dae85bc6119d705a76dc068c062b8b575abe3595e3c6276480e67e3f1"}, + {file = "tokenizers-0.19.1-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:1f0360cbea28ea99944ac089c00de7b2e3e1c58f479fb8613b6d8d511ce98267"}, + {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:71e3ec71f0e78780851fef28c2a9babe20270404c921b756d7c532d280349214"}, + {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b82931fa619dbad979c0ee8e54dd5278acc418209cc897e42fac041f5366d626"}, + {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e8ff5b90eabdcdaa19af697885f70fe0b714ce16709cf43d4952f1f85299e73a"}, + {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e742d76ad84acbdb1a8e4694f915fe59ff6edc381c97d6dfdd054954e3478ad4"}, + {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8c5d59d7b59885eab559d5bc082b2985555a54cda04dda4c65528d90ad252ad"}, + {file = "tokenizers-0.19.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b2da5c32ed869bebd990c9420df49813709e953674c0722ff471a116d97b22d"}, + {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:638e43936cc8b2cbb9f9d8dde0fe5e7e30766a3318d2342999ae27f68fdc9bd6"}, + {file = "tokenizers-0.19.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:78e769eb3b2c79687d9cb0f89ef77223e8e279b75c0a968e637ca7043a84463f"}, + {file = "tokenizers-0.19.1-cp37-none-win32.whl", hash = "sha256:72791f9bb1ca78e3ae525d4782e85272c63faaef9940d92142aa3eb79f3407a3"}, + {file = "tokenizers-0.19.1-cp37-none-win_amd64.whl", hash = "sha256:f3bbb7a0c5fcb692950b041ae11067ac54826204318922da754f908d95619fbc"}, + {file = "tokenizers-0.19.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:07f9295349bbbcedae8cefdbcfa7f686aa420be8aca5d4f7d1ae6016c128c0c5"}, + {file = "tokenizers-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:10a707cc6c4b6b183ec5dbfc5c34f3064e18cf62b4a938cb41699e33a99e03c1"}, + {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6309271f57b397aa0aff0cbbe632ca9d70430839ca3178bf0f06f825924eca22"}, + {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ad23d37d68cf00d54af184586d79b84075ada495e7c5c0f601f051b162112dc"}, + {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:427c4f0f3df9109314d4f75b8d1f65d9477033e67ffaec4bca53293d3aca286d"}, + {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e83a31c9cf181a0a3ef0abad2b5f6b43399faf5da7e696196ddd110d332519ee"}, + {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c27b99889bd58b7e301468c0838c5ed75e60c66df0d4db80c08f43462f82e0d3"}, + {file = "tokenizers-0.19.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bac0b0eb952412b0b196ca7a40e7dce4ed6f6926489313414010f2e6b9ec2adf"}, + {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8a6298bde623725ca31c9035a04bf2ef63208d266acd2bed8c2cb7d2b7d53ce6"}, + {file = "tokenizers-0.19.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:08a44864e42fa6d7d76d7be4bec62c9982f6f6248b4aa42f7302aa01e0abfd26"}, + {file = "tokenizers-0.19.1-cp38-none-win32.whl", hash = "sha256:1de5bc8652252d9357a666e609cb1453d4f8e160eb1fb2830ee369dd658e8975"}, + {file = "tokenizers-0.19.1-cp38-none-win_amd64.whl", hash = "sha256:0bcce02bf1ad9882345b34d5bd25ed4949a480cf0e656bbd468f4d8986f7a3f1"}, + {file = "tokenizers-0.19.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0b9394bd204842a2a1fd37fe29935353742be4a3460b6ccbaefa93f58a8df43d"}, + {file = "tokenizers-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4692ab92f91b87769d950ca14dbb61f8a9ef36a62f94bad6c82cc84a51f76f6a"}, + {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6258c2ef6f06259f70a682491c78561d492e885adeaf9f64f5389f78aa49a051"}, + {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c85cf76561fbd01e0d9ea2d1cbe711a65400092bc52b5242b16cfd22e51f0c58"}, + {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670b802d4d82bbbb832ddb0d41df7015b3e549714c0e77f9bed3e74d42400fbe"}, + {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:85aa3ab4b03d5e99fdd31660872249df5e855334b6c333e0bc13032ff4469c4a"}, + {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cbf001afbbed111a79ca47d75941e9e5361297a87d186cbfc11ed45e30b5daba"}, + {file = "tokenizers-0.19.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c89aa46c269e4e70c4d4f9d6bc644fcc39bb409cb2a81227923404dd6f5227"}, + {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39c1ec76ea1027438fafe16ecb0fb84795e62e9d643444c1090179e63808c69d"}, + {file = "tokenizers-0.19.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c2a0d47a89b48d7daa241e004e71fb5a50533718897a4cd6235cb846d511a478"}, + {file = "tokenizers-0.19.1-cp39-none-win32.whl", hash = "sha256:61b7fe8886f2e104d4caf9218b157b106207e0f2a4905c9c7ac98890688aabeb"}, + {file = "tokenizers-0.19.1-cp39-none-win_amd64.whl", hash = "sha256:f97660f6c43efd3e0bfd3f2e3e5615bf215680bad6ee3d469df6454b8c6e8256"}, + {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3b11853f17b54c2fe47742c56d8a33bf49ce31caf531e87ac0d7d13d327c9334"}, + {file = "tokenizers-0.19.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d26194ef6c13302f446d39972aaa36a1dda6450bc8949f5eb4c27f51191375bd"}, + {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e8d1ed93beda54bbd6131a2cb363a576eac746d5c26ba5b7556bc6f964425594"}, + {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca407133536f19bdec44b3da117ef0d12e43f6d4b56ac4c765f37eca501c7bda"}, + {file = "tokenizers-0.19.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce05fde79d2bc2e46ac08aacbc142bead21614d937aac950be88dc79f9db9022"}, + {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:35583cd46d16f07c054efd18b5d46af4a2f070a2dd0a47914e66f3ff5efb2b1e"}, + {file = "tokenizers-0.19.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:43350270bfc16b06ad3f6f07eab21f089adb835544417afda0f83256a8bf8b75"}, + {file = "tokenizers-0.19.1-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b4399b59d1af5645bcee2072a463318114c39b8547437a7c2d6a186a1b5a0e2d"}, + {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6852c5b2a853b8b0ddc5993cd4f33bfffdca4fcc5d52f89dd4b8eada99379285"}, + {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd266ae85c3d39df2f7e7d0e07f6c41a55e9a3123bb11f854412952deacd828"}, + {file = "tokenizers-0.19.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecb2651956eea2aa0a2d099434134b1b68f1c31f9a5084d6d53f08ed43d45ff2"}, + {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:b279ab506ec4445166ac476fb4d3cc383accde1ea152998509a94d82547c8e2a"}, + {file = "tokenizers-0.19.1-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:89183e55fb86e61d848ff83753f64cded119f5d6e1f553d14ffee3700d0a4a49"}, + {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2edbc75744235eea94d595a8b70fe279dd42f3296f76d5a86dde1d46e35f574"}, + {file = "tokenizers-0.19.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:0e64bfde9a723274e9a71630c3e9494ed7b4c0f76a1faacf7fe294cd26f7ae7c"}, + {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0b5ca92bfa717759c052e345770792d02d1f43b06f9e790ca0a1db62838816f3"}, + {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f8a20266e695ec9d7a946a019c1d5ca4eddb6613d4f466888eee04f16eedb85"}, + {file = "tokenizers-0.19.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63c38f45d8f2a2ec0f3a20073cccb335b9f99f73b3c69483cd52ebc75369d8a1"}, + {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:dd26e3afe8a7b61422df3176e06664503d3f5973b94f45d5c45987e1cb711876"}, + {file = "tokenizers-0.19.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:eddd5783a4a6309ce23432353cdb36220e25cbb779bfa9122320666508b44b88"}, + {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:56ae39d4036b753994476a1b935584071093b55c7a72e3b8288e68c313ca26e7"}, + {file = "tokenizers-0.19.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f9939ca7e58c2758c01b40324a59c034ce0cebad18e0d4563a9b1beab3018243"}, + {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6c330c0eb815d212893c67a032e9dc1b38a803eccb32f3e8172c19cc69fbb439"}, + {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec11802450a2487cdf0e634b750a04cbdc1c4d066b97d94ce7dd2cb51ebb325b"}, + {file = "tokenizers-0.19.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b718f316b596f36e1dae097a7d5b91fc5b85e90bf08b01ff139bd8953b25af"}, + {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ed69af290c2b65169f0ba9034d1dc39a5db9459b32f1dd8b5f3f32a3fcf06eab"}, + {file = "tokenizers-0.19.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f8a9c828277133af13f3859d1b6bf1c3cb6e9e1637df0e45312e6b7c2e622b1f"}, + {file = "tokenizers-0.19.1.tar.gz", hash = "sha256:ee59e6680ed0fdbe6b724cf38bd70400a0c1dd623b07ac729087270caeac88e3"}, +] + +[package.dependencies] +huggingface-hub = ">=0.16.4,<1.0" + +[package.extras] +dev = ["tokenizers[testing]"] +docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +testing = ["black (==22.3)", "datasets", "numpy", "pytest", "requests", "ruff"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tornado" +version = "6.4" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">= 3.8" +files = [ + {file = "tornado-6.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:02ccefc7d8211e5a7f9e8bc3f9e5b0ad6262ba2fbb683a6443ecc804e5224ce0"}, + {file = "tornado-6.4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:27787de946a9cffd63ce5814c33f734c627a87072ec7eed71f7fc4417bb16263"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7894c581ecdcf91666a0912f18ce5e757213999e183ebfc2c3fdbf4d5bd764e"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e43bc2e5370a6a8e413e1e1cd0c91bedc5bd62a74a532371042a18ef19e10579"}, + {file = "tornado-6.4-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0251554cdd50b4b44362f73ad5ba7126fc5b2c2895cc62b14a1c2d7ea32f212"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fd03192e287fbd0899dd8f81c6fb9cbbc69194d2074b38f384cb6fa72b80e9c2"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_i686.whl", hash = "sha256:88b84956273fbd73420e6d4b8d5ccbe913c65d31351b4c004ae362eba06e1f78"}, + {file = "tornado-6.4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:71ddfc23a0e03ef2df1c1397d859868d158c8276a0603b96cf86892bff58149f"}, + {file = "tornado-6.4-cp38-abi3-win32.whl", hash = "sha256:6f8a6c77900f5ae93d8b4ae1196472d0ccc2775cc1dfdc9e7727889145c45052"}, + {file = "tornado-6.4-cp38-abi3-win_amd64.whl", hash = "sha256:10aeaa8006333433da48dec9fe417877f8bcc21f48dda8d661ae79da357b2a63"}, + {file = "tornado-6.4.tar.gz", hash = "sha256:72291fa6e6bc84e626589f1c29d90a5a6d593ef5ae68052ee2ef000dfd273dee"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20240316" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-python-dateutil-2.9.0.20240316.tar.gz", hash = "sha256:5d2f2e240b86905e40944dd787db6da9263f0deabef1076ddaed797351ec0202"}, + {file = "types_python_dateutil-2.9.0.20240316-py3-none-any.whl", hash = "sha256:6b8cb66d960771ce5ff974e9dd45e38facb81718cc1e208b10b1baccbfdbee3b"}, +] + +[[package]] +name = "typing-extensions" +version = "4.11.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.11.0-py3-none-any.whl", hash = "sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a"}, + {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, +] + +[[package]] +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" +optional = false +python-versions = ">=3.7" +files = [ + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, +] + +[package.extras] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] + +[[package]] +name = "uritemplate" +version = "4.1.1" +description = "Implementation of RFC 6570 URI Templates" +optional = false +python-versions = ">=3.6" +files = [ + {file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"}, + {file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"}, +] + +[[package]] +name = "urllib3" +version = "2.2.1" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "webcolors" +version = "1.13" +description = "A library for working with the color formats defined by HTML and CSS." +optional = false +python-versions = ">=3.7" +files = [ + {file = "webcolors-1.13-py3-none-any.whl", hash = "sha256:29bc7e8752c0a1bd4a1f03c14d6e6a72e93d82193738fa860cbff59d0fcc11bf"}, + {file = "webcolors-1.13.tar.gz", hash = "sha256:c225b674c83fa923be93d235330ce0300373d02885cef23238813b0d5668304a"}, +] + +[package.extras] +docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "widgetsnbextension" +version = "4.0.10" +description = "Jupyter interactive widgets for Jupyter Notebook" +optional = false +python-versions = ">=3.7" +files = [ + {file = "widgetsnbextension-4.0.10-py3-none-any.whl", hash = "sha256:d37c3724ec32d8c48400a435ecfa7d3e259995201fbefa37163124a9fcb393cc"}, + {file = "widgetsnbextension-4.0.10.tar.gz", hash = "sha256:64196c5ff3b9a9183a8e699a4227fb0b7002f252c814098e66c4d1cd0644688f"}, +] + +[metadata] +lock-version = "2.0" +python-versions = "^3.10" +content-hash = "0ca2ed53f95e448bafc005b51038d4afd9b175552267cd2199be42a04b387e40" diff --git a/project/.env.template b/project/.env.template new file mode 100644 index 0000000000000000000000000000000000000000..67bff8ac66db7349b007ce439387f868c3bc6256 --- /dev/null +++ b/project/.env.template @@ -0,0 +1,24 @@ +LLM_PLATFORM=azure + +# OpenAIkey +OPENAI_API_KEY=sk-XXX + +# Azure OpenAIkey +AZURE_OPENAI_API_KEY=XXX # AZURE_OPENAI_AD_TOKEN=YYY +AZURE_OPENAI_ENDPOINT=https://neoai-pjname.openai.azure.com/ # (not-recomended): AZURE_API_BASE +OPENAI_API_VERSION=2024-02-01 # (not-recomended): AZURE_API_VERSION + +# ENGINE +# 1106 ---------------------------------------------------------- +AZURE_ENGINE_GPT35T_1106=xxx +AZURE_ENGINE_GPT4T_1106=xxx +# 0613 ---------------------------------------------------------- +AZURE_ENGINE_GPT35T_0613=neoai-pjname-gpt-35 # (not-recomended): AZURE_ENGINE_GPT35, AZURE_ENGINE_GPT35_0613 +AZURE_ENGINE_GPT35T_16K_0613=neoai-pjname-gpt-35-16k # (not-recomended): AZURE_ENGINE_GPT35_16k, AZURE_ENGINE_GPT35_16K_0613 +AZURE_ENGINE_GPT4_0613=neoai-pjname-gpt4 # (not-recomended): AZURE_ENGINE_GPT4 +AZURE_ENGINE_GPT4_32K_0613=neoai-pjname-gpt4-32k # (not recomended): AZURE_ENGINE_GPT4_32k + +# Anthropic +ANTHROPIC_API_KEY=xxx + +# GCP diff --git a/project/ex_module/ex_profile_extractor.py b/project/ex_module/ex_profile_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..7d7b09ad8edd4f0e3ae1564cf7f55e06871ebad8 --- /dev/null +++ b/project/ex_module/ex_profile_extractor.py @@ -0,0 +1,113 @@ +from typing import Any, Literal, TypedDict + +from neollm import MyLLM +from neollm.types import Functions +from neollm.utils.postprocess import json2dict +from neollm.utils.preprocess import optimize_token + + +class ProfileExtractorInputType(TypedDict): + text: str + + +class ProfileExtractorOuputType(TypedDict): + name: str + birth_year: int + domain: str + lang: Literal["ENG", "JPN"] + + +class ProfileExtractor(MyLLM): + """情報を抽出するMyLLM + + Notes: + inputs: + >>> {"text": str} + outpus: + >>> {"text_translated": str | None(うまくいかなかった場合)} + """ + + def _preprocess(self, inputs: ProfileExtractorInputType): + system_prompt = "より情報を抽出する。存在しない場合nullとする" + user_prompt = "\n" f"'''{inputs['text'].strip()}'''" + messages = [ + {"role": "system", "content": optimize_token(system_prompt)}, + {"role": "user", "content": optimize_token(user_prompt)}, + ] + return messages + + def _check_input( + self, inputs: ProfileExtractorInputType, messages + ) -> tuple[bool, ProfileExtractorOuputType | None]: + # 入力がない場合の処理 + if inputs["text"].strip() == "": + # requestしない, ルールベースのoutput + return False, {"name": "", "birth_year": -1, "domain": "", "lang": "JPN"} + # 入力が多い時に16kを使う + if self.llm.count_tokens(messages) >= 1600: + self.model = "gpt-3.5-turbo-16k" + else: + self.model = "gpt-3.5-turbo" + # requestする, _ + return True, None + + def _postprocess(self, response) -> ProfileExtractorOuputType: + if dict(response["choices"][0]["message"]).get("function_call"): + try: + extracted_data = json2dict(response["choices"][0]["message"]["function_call"]["arguments"]) + except Exception: + extracted_data = {} + else: + extracted_data = {} + + lang_ = extracted_data.get("lang") + if lang_ in {"ENG", "JPN"}: + lang = lang_ + else: + lang = "JPN" + + outputs: ProfileExtractorOuputType = { + "name": str(extracted_data.get("name") or ""), + "birth_year": int(extracted_data.get("birth_year") or -1), + "domain": str(extracted_data.get("domain") or ""), + "lang": lang, + } + return outputs + + # Function Callingを使う場合必要 + def _add_functions(self, inputs: Any) -> Functions | None: + functions: Functions = [ + { + "name": "extract_profile", + "description": "extract profile of a person", + "parameters": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "名前", + }, + "domain": { + "type": "string", + "description": "研究ドメイン カンマ区切り", + }, + "birth_year": { + "type": "integer", + "description": "the year of the birth YYYY", + }, + "lang": { + "type": "string", + "description": "the language of the text", + "enum": ["ENG", "JPN"], + }, + }, + "required": ["name", "birth_year", "domain", "lang"], + }, + } + ] + return functions + + # 型定義のために必要 + def __call__(self, inputs: ProfileExtractorInputType) -> ProfileExtractorOuputType: + outputs: ProfileExtractorOuputType = super().__call__(inputs) + return outputs diff --git a/project/ex_module/ex_translated_profile_extractor.py b/project/ex_module/ex_translated_profile_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..4b285060399b9d2deb6598c130b13f9a3a0f96f2 --- /dev/null +++ b/project/ex_module/ex_translated_profile_extractor.py @@ -0,0 +1,49 @@ +from typing import TypedDict + +from ex_profile_extractor import ProfileExtractor, ProfileExtractorInputType +from ex_translator import Translator + +from neollm import MyL3M2 + + +class TranslatedProfileExtractorOutputType(TypedDict): + name_ENG: str + name_JPN: str + domain_ENG: str + domain_JPN: str + birth_year: int + + +class TranslatedProfileExtractor(MyL3M2): + def _link(self, inputs: ProfileExtractorInputType) -> TranslatedProfileExtractorOutputType: + # Profile Extract + profile_extractor = ProfileExtractor(parent=self, silent_list=["llm_settings", "inputs", "messages"]) + profile = profile_extractor(inputs) + # Translator name + translator_name = Translator(parent=self, silent_list=["llm_settings", "inputs", "messages"]) + translated_name = translator_name(inputs={"text": profile["name"]})["text_translated"] + # Translate domain + translator_domain = Translator(parent=self, silent_list=["llm_settings", "inputs", "messages"]) + translated_domain = translator_domain(inputs={"text": profile["domain"]})["text_translated"] + + outputs: TranslatedProfileExtractorOutputType = { + "name_ENG": profile["name"], + "name_JPN": profile["name"], + "domain_ENG": profile["domain"], + "domain_JPN": profile["domain"], + "birth_year": profile["birth_year"], + } + + if profile["lang"] == "ENG": + outputs["name_JPN"] = translated_name + outputs["domain_JPN"] = translated_domain + else: + outputs["name_ENG"] = translated_name + outputs["domain_ENG"] = translated_domain + + return outputs + + # 型定義のために必要 + def __call__(self, inputs: ProfileExtractorInputType) -> TranslatedProfileExtractorOutputType: + outputs: TranslatedProfileExtractorOutputType = super().__call__(inputs) + return outputs diff --git a/project/ex_module/ex_translator.py b/project/ex_module/ex_translator.py new file mode 100644 index 0000000000000000000000000000000000000000..38850f3820ca86c7cf405de8088519e403429869 --- /dev/null +++ b/project/ex_module/ex_translator.py @@ -0,0 +1,62 @@ +from typing import TypedDict + +from neollm import MyLLM +from neollm.types import Messages, OpenAIResponse +from neollm.utils.postprocess import strip_string +from neollm.utils.preprocess import optimize_token + + +class TranslatorInputType(TypedDict): + text: str + + +class TranslatorOuputType(TypedDict): + text_translated: str + + +class Translator(MyLLM): + """情報を抽出するMyLLM + + Notes: + inputs: + >>> {"text": str} + outpus: + >>> {"text_translated": str | None(うまくいかなかった場合)} + """ + + def _preprocess(self, inputs: TranslatorInputType) -> Messages: + system_prompt = ( + "You are a good translator. Translate Japanese into English or English into Japanese.\n" + "# output_format:\n\n{translated text in English or Japanese}" + ) + user_prompt = "\n" f"'''{inputs['text'].strip()}'''" + messages: Messages = [ + {"role": "system", "content": optimize_token(system_prompt)}, + {"role": "user", "content": optimize_token(user_prompt)}, + ] + return messages + + def _ruleprocess(self, inputs: TranslatorInputType) -> None | TranslatorOuputType: + # 入力がない場合の処理 + if inputs["text"].strip() == "": + return {"text_translated": ""} + return None + + def _update_settings(self) -> None: + # 入力が多い時に16kを使う + if self.messages is not None: + if self.llm.count_tokens(self.messages) >= 1600: + self.model = "gpt-3.5-turbo-16k" + else: + self.model = "gpt-3.5-turbo" + + def _postprocess(self, response: OpenAIResponse) -> TranslatorOuputType: + text_translated: str = str(response.choices[0].message["content"]) + text_translated = strip_string(text=text_translated, first_character=["", ""]) + outputs: TranslatorOuputType = {"text_translated": text_translated} + return outputs + + # 型定義のために必要 + def __call__(self, inputs: TranslatorInputType) -> TranslatorOuputType: + outputs: TranslatorOuputType = super().__call__(inputs) + return outputs diff --git a/project/neollm-tutorial.ipynb b/project/neollm-tutorial.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..0c431594052dfb1bbcf3297b6e60bbb275061dfe --- /dev/null +++ b/project/neollm-tutorial.ipynb @@ -0,0 +1,713 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# settings\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1. install neollm\n", + "\n", + "[Document インストール方法](https://www.notion.so/c760d96f1b4240e6880a32bee96bba35)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# githubのssh接続してね\n", + "# versionは適宜変更してね\n", + "%pip install git+https://github.com/neoAI-inc/neo-llm-module.git@v1.2.6\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 2 環境変数の設定方法\n", + "\n", + "[Document env ファイルの作り方](https://www.notion.so/env-32ebb04105684a77bbc730c39865df34)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "環境変数読み込み成功\n" + ] + } + ], + "source": [ + "from dotenv import load_dotenv\n", + "\n", + "env_path = \".env\" # .envのpath 適宜変更\n", + "if load_dotenv(env_path):\n", + " print(\"環境変数読み込み成功\")\n", + "else:\n", + " print(\"path違うよ〜\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# neoLLM  使い方\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "neollm は、前処理・LLM のリクエスト・後処理を 1 つのクラスにした、Pytorch 的な記法で書ける neoAI の LLM 統一ライブラリ。\n", + "\n", + "大きく 2 種類のクラスがあり、MyLLM は 1 つのリクエスト、MyL3M2 は複数のリクエストを受け持つことができる。\n", + "\n", + "![概観図](../asset/external_view.png)\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "##### モデルの定義\n" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[43mWARNING: AZURE_API_BASEではなく、AZURE_OPENAI_ENDPOINTにしてね\u001b[0m\n", + "\u001b[43mWARNING: AZURE_API_VERSIONではなく、OPENAI_API_VERSIONにしてね\u001b[0m\n" + ] + } + ], + "source": [ + "from neollm import MyLLM\n", + "\n", + "# 例: 翻訳をするclass\n", + "# _preprocess, _postprocessを必ず書く\n", + "\n", + "\n", + "class Translator(MyLLM):\n", + " # _preprocessは、前処理をしてMessageを作る関数\n", + " def _preprocess(self, inputs: str):\n", + " messages = [\n", + " {\"role\": \"system\", \"content\": \"英語を日本語に翻訳するAIです。\"},\n", + " {\"role\": \"user\", \"content\": inputs},\n", + " ]\n", + " return messages\n", + "\n", + " # _postprocessは、APIのResponseを後処理をして、欲しいものを返す関数\n", + " def _postprocess(self, response):\n", + " text_translated: str = str(response.choices[0].message.content)\n", + " return text_translated" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "##### モデルの呼び出し\n" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[41mPARENT\u001b[0m\n", + "MyLLM(Translator) ----------------------------------------------------------------------------------\n", + "\u001b[34m[inputs]\u001b[0m\n", + "\"Hello, We are neoAI.\"\n", + "\u001b[34m[messages]\u001b[0m\n", + " \u001b[32msystem\u001b[0m\n", + " 英語を日本語に翻訳するAIです。\n", + " \u001b[32muser\u001b[0m\n", + " Hello, We are neoAI.\n", + " \u001b[32massistant\u001b[0m\n", + " こんにちは、私たちはneoAIです。\n", + "\u001b[34m[outputs]\u001b[0m\n", + "\"こんにちは、私たちはneoAIです。\"\n", + "\u001b[34m[client_settings]\u001b[0m -\n", + "\u001b[34m[llm_settings]\u001b[0m {'platform': 'azure', 'temperature': 1, 'model': 'gpt-3.5-turbo-0613', 'engine': 'neoai-free-swd-gpt-35-0613'}\n", + "\u001b[34m[metadata]\u001b[0m 1.6s; 45(36+9)tokens; $6.8e-05; ¥0.0095\n", + "----------------------------------------------------------------------------------------------------\n", + "こんにちは、私たちはneoAIです。\n" + ] + } + ], + "source": [ + "# 初期化 (platformやmodelなど設定をしておく)\n", + "# 詳細: https://www.notion.so/neollm-MyLLM-581cd7562df9473b91c981d88469c452?pvs=4#ac5361a5e3fa46a48441fdd538858fee\n", + "translator = Translator(\n", + " platform=\"azure\", # azure or openai\n", + " model=\"gpt-3.5-turbo-0613\", # gpt-3.5-turbo-1106, gpt-4-turbo-1106\n", + " llm_settings={\"temperature\": 1}, # llmの設定 dictで渡す\n", + ")\n", + "\n", + "# 呼び出し\n", + "# preprocessでinputsとしたものを入力として、postprocessで処理したものを出力とする。\n", + "translated_text = translator(inputs=\"Hello, We are neoAI.\")\n", + "print(translated_text)" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "時間 1.5658628940582275\n", + "token数 TokenInfo(input=36, output=9, total=45)\n", + "token数合計 45\n", + "値段(USD) PriceInfo(input=5.4e-05, output=1.8e-05, total=6.75e-05)\n", + "値段数合計(USD) 6.75e-05\n" + ] + } + ], + "source": [ + "# 処理時間\n", + "print(\"時間\", translator.time)\n", + "# トークン数\n", + "print(\"token数\", translator.token)\n", + "print(\"token数合計\", translator.token.total)\n", + "# 値段の取得\n", + "print(\"値段(USD)\", translator.price)\n", + "print(\"値段数合計(USD)\", translator.price.total)" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "inputs Hello, We are neoAI.\n", + "messages [{'role': 'system', 'content': '英語を日本語に翻訳するAIです。'}, {'role': 'user', 'content': 'Hello, We are neoAI.'}]\n", + "response ChatCompletion(id='chatcmpl-8T5MkidV9bhqewdzcUwO1PioHOSHi', choices=[Choice(finish_reason='stop', index=0, message=ChatCompletionMessage(content='こんにちは、私たちはneoAIです。', role='assistant', function_call=None, tool_calls=None), content_filter_results={'hate': {'filtered': False, 'severity': 'safe'}, 'self_harm': {'filtered': False, 'severity': 'safe'}, 'sexual': {'filtered': False, 'severity': 'safe'}, 'violence': {'filtered': False, 'severity': 'safe'}})], created=1701942830, model='gpt-35-turbo', object='chat.completion', system_fingerprint=None, usage=CompletionUsage(completion_tokens=9, prompt_tokens=36, total_tokens=45), prompt_filter_results=[{'prompt_index': 0, 'content_filter_results': {'hate': {'filtered': False, 'severity': 'safe'}, 'self_harm': {'filtered': False, 'severity': 'safe'}, 'sexual': {'filtered': False, 'severity': 'safe'}, 'violence': {'filtered': False, 'severity': 'safe'}}}])\n", + "outputs こんにちは、私たちはneoAIです。\n", + "chat_history [{'role': 'system', 'content': '英語を日本語に翻訳するAIです。'}, {'role': 'user', 'content': 'Hello, We are neoAI.'}, {'content': 'こんにちは、私たちはneoAIです。', 'role': 'assistant'}]\n" + ] + } + ], + "source": [ + "# その他property\n", + "print(\"inputs\", translator.inputs)\n", + "print(\"messages\", translator.messages)\n", + "print(\"response\", translator.response)\n", + "print(\"outputs\", translator.outputs)\n", + "\n", + "print(\"chat_history\", translator.chat_history)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# neoLLM  例\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### 1-1 MyLLM (ex. 翻訳)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [], + "source": [ + "from neollm import MyLLM\n", + "from neollm.utils.preprocess import optimize_token\n", + "from neollm.utils.postprocess import strip_string\n", + "\n", + "\n", + "class Translator(MyLLM):\n", + " def _preprocess(self, inputs):\n", + " system_prompt = (\n", + " \"You are a good translator. Translate Japanese into English or English into Japanese.\\n\"\n", + " \"# output_format:\\n\\n{translated text in English or Japanese}\"\n", + " )\n", + " user_prompt = \"\\n\" f\"'''{inputs['text'].strip()}'''\"\n", + " messages = [\n", + " {\"role\": \"system\", \"content\": optimize_token(system_prompt)},\n", + " {\"role\": \"user\", \"content\": optimize_token(user_prompt)},\n", + " ]\n", + " return messages\n", + "\n", + " def _ruleprocess(self, inputs):\n", + " # 例外処理\n", + " if inputs[\"text\"].strip() == \"\":\n", + " return {\"text_translated\": \"\"}\n", + " # APIリクエストを送る場合はNone\n", + " return None\n", + "\n", + " def _update_settings(self):\n", + " # 入力によってAPIの設定を変更する\n", + "\n", + " # トークン数: self.llm.count_tokens(self.messsage)\n", + "\n", + " # モデル変更: self.model = \"gpt-3.5-turbo-16k\"\n", + "\n", + " # パラメータ変更: self.llm_settings = {\"temperature\": 0.2}\n", + "\n", + " # 入力が多い時に16kを使う(1106の場合はやらなくていい)\n", + " if self.messages is not None:\n", + " if self.llm.count_tokens(self.messages) >= 1600:\n", + " self.model = \"gpt-3.5-turbo-16k-0613\"\n", + " else:\n", + " self.model = \"gpt-3.5-turbo-0613\"\n", + "\n", + " def _postprocess(self, response):\n", + " text_translated: str = str(response.choices[0].message.content)\n", + " text_translated = strip_string(text=text_translated, first_character=[\"\", \"\"])\n", + " outputs = {\"text_translated\": text_translated}\n", + " return outputs" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[41mPARENT\u001b[0m\n", + "MyLLM(Translator) ----------------------------------------------------------------------------------\n", + "\u001b[34m[inputs]\u001b[0m\n", + "{\n", + " \"text\": \"大規模LLMモデル\"\n", + "}\n", + "\u001b[34m[messages]\u001b[0m\n", + " \u001b[32msystem\u001b[0m\n", + " You are a good translator. Translate Japanese into English or English into Japanese.\n", + " # output_format:\n", + " \n", + " {translated text in English or Japanese}\n", + " \u001b[32muser\u001b[0m\n", + " \n", + " '''大規模LLMモデル'''\n", + " \u001b[32massistant\u001b[0m\n", + " \n", + " \"Large-Scale LLM Model\"\n", + "\u001b[34m[outputs]\u001b[0m\n", + "{\n", + " \"text_translated\": \"Large-Scale LLM Model\"\n", + "}\n", + "\u001b[34m[client_settings]\u001b[0m -\n", + "\u001b[34m[llm_settings]\u001b[0m {'platform': 'azure', 'temperature': 1, 'model': 'gpt-3.5-turbo-0613', 'engine': 'neoai-free-swd-gpt-35-0613'}\n", + "\u001b[34m[metadata]\u001b[0m 1.5s; 66(55+11)tokens; $9.9e-05; ¥0.014\n", + "----------------------------------------------------------------------------------------------------\n", + "{'text_translated': 'Large-Scale LLM Model'}\n" + ] + } + ], + "source": [ + "translator = Translator(\n", + " llm_settings={\"temperature\": 1}, # defaultは、{\"temperature\": 0}\n", + " model=\"gpt-3.5-turbo-0613\", # defaultは、DEFAULT_MODEL_NAME\n", + " platform=\"azure\", # defaultは、LLM_PLATFORM\n", + " verbose=True,\n", + " silent_list=[], # 表示しないもの\n", + ")\n", + "output_1 = translator(inputs={\"text\": \"大規模LLMモデル\"})\n", + "print(output_1)" + ] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[43mWARNING: model_nameに日付を指定してください\u001b[0m\n", + "model_name: gpt-3.5-turbo -> gpt-3.5-turbo-0613\n", + "\u001b[41mPARENT\u001b[0m\n", + "MyLLM(Translator) ----------------------------------------------------------------------------------\n", + "\u001b[34m[inputs]\u001b[0m\n", + "{\n", + " \"text\": \"Large LLM Model\"\n", + "}\n", + "\u001b[34m[messages]\u001b[0m\n", + " \u001b[32msystem\u001b[0m\n", + " You are a good translator. Translate Japanese into English or English into Japanese.\n", + " # output_format:\n", + " \n", + " {translated text in English or Japanese}\n", + " \u001b[32muser\u001b[0m\n", + " \n", + " '''Large LLM Model'''\n", + "\u001b[43mWARNING: model_nameに日付を指定してください\u001b[0m\n", + "model_name: gpt-3.5-turbo -> gpt-3.5-turbo-0613\n", + " \u001b[32massistant\u001b[0m\n", + " \n", + " 大きなLLMモデル\n", + "\u001b[34m[outputs]\u001b[0m\n", + "{\n", + " \"text_translated\": \"大きなLLMモデル\"\n", + "}\n", + "\u001b[34m[client_settings]\u001b[0m -\n", + "\u001b[34m[llm_settings]\u001b[0m {'platform': 'openai', 'temperature': 0, 'model': 'gpt-3.5-turbo-0613'}\n", + "\u001b[34m[metadata]\u001b[0m 0.9s; 61(49+12)tokens; $9.2e-05; ¥0.013\n", + "----------------------------------------------------------------------------------------------------\n", + "{'text_translated': '大きなLLMモデル'}\n" + ] + } + ], + "source": [ + "translator = Translator(\n", + " platform=\"openai\", # <- 変えてみる\n", + " verbose=True,\n", + ")\n", + "output_1 = translator(inputs={\"text\": \"Large LLM Model\"})\n", + "print(output_1)" + ] + }, + { + "cell_type": "code", + "execution_count": 26, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[43mWARNING: model_nameに日付を指定してください\u001b[0m\n", + "model_name: gpt-3.5-turbo -> gpt-3.5-turbo-0613\n", + "\u001b[41mPARENT\u001b[0m\n", + "MyLLM(Translator) ----------------------------------------------------------------------------------\n", + "\u001b[34m[inputs]\u001b[0m\n", + "{\n", + " \"text\": \"\"\n", + "}\n", + "\u001b[34m[outputs]\u001b[0m\n", + "{\n", + " \"text_translated\": \"\"\n", + "}\n", + "\u001b[34m[client_settings]\u001b[0m -\n", + "\u001b[34m[llm_settings]\u001b[0m {'platform': 'azure', 'temperature': 0, 'model': 'gpt-3.5-turbo-0613', 'engine': 'neoai-free-swd-gpt-35-0613'}\n", + "\u001b[34m[metadata]\u001b[0m 0.0s; 0(0+0)tokens; $0; ¥0\n", + "----------------------------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "{'text_translated': ''}" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# ルールベースが起動\n", + "data = {\"text\": \"\"}\n", + "translator = Translator(verbose=True)\n", + "translator(data)" + ] + }, + { + "cell_type": "code", + "execution_count": 27, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[43mWARNING: model_nameに日付を指定してください\u001b[0m\n", + "model_name: gpt-3.5-turbo -> gpt-3.5-turbo-0613\n", + "\u001b[41mPARENT\u001b[0m\n", + "MyLLM(Translator) ----------------------------------------------------------------------------------\n", + "\u001b[34m[inputs]\u001b[0m\n", + "{\n", + " \"text\": \"こんにちは!!\\nこんにちは?こんにちは?\"\n", + "}\n", + "\u001b[34m[messages]\u001b[0m\n", + " \u001b[32msystem\u001b[0m\n", + " You are a good translator. Translate Japanese into English or English into Japanese.\n", + " # output_format:\n", + " \n", + " {translated text in English or Japanese}\n", + " \u001b[32muser\u001b[0m\n", + " \n", + " '''こんにちは!!\n", + " こんにちは?こんにちは?'''\n", + "\u001b[43mWARNING: model_nameに日付を指定してください\u001b[0m\n", + "model_name: gpt-3.5-turbo -> gpt-3.5-turbo-0613\n", + " \u001b[32massistant\u001b[0m\n", + " \n", + " Hello!!\n", + " Hello? Hello?\n", + "\u001b[34m[outputs]\u001b[0m\n", + "{\n", + " \"text_translated\": \"Hello!!\\nHello? Hello?\"\n", + "}\n", + "\u001b[34m[client_settings]\u001b[0m -\n", + "\u001b[34m[llm_settings]\u001b[0m {'platform': 'azure', 'temperature': 0, 'model': 'gpt-3.5-turbo-0613', 'engine': 'neoai-free-swd-gpt-35-0613'}\n", + "\u001b[34m[metadata]\u001b[0m 1.4s; 60(51+9)tokens; $9e-05; ¥0.013\n", + "----------------------------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "{'text_translated': 'Hello!!\\nHello? Hello?'}" + ] + }, + "execution_count": 27, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "data = {\"text\": \"こんにちは!!\\nこんにちは?こんにちは?\"}\n", + "translator = Translator(verbose=True)\n", + "translator(data)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## 情報抽出\n" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": {}, + "outputs": [], + "source": [ + "from neollm import MyLLM\n", + "from neollm.utils.preprocess import optimize_token, dict2json\n", + "from neollm.utils.postprocess import json2dict\n", + "\n", + "\n", + "class Extractor(MyLLM):\n", + " def _preprocess(self, inputs):\n", + " system_prompt = \"から、にしたがって、情報を抽出しなさい。\"\n", + " output_format = {\"date\": \"yy-mm-dd形式 日付\", \"event\": \"起きたことを簡潔に。\"}\n", + " user_prompt = (\n", + " \"\\n\"\n", + " \"```\\n\"\n", + " f\"{inputs['info'].strip()}\\n\"\n", + " \"```\\n\"\n", + " \"\\n\"\n", + " \"\\n\"\n", + " \"```json\\n\"\n", + " f\"{dict2json(output_format)}\\n\"\n", + " \"```\"\n", + " )\n", + "\n", + " messages = [\n", + " {\"role\": \"system\", \"content\": optimize_token(system_prompt)},\n", + " {\"role\": \"user\", \"content\": optimize_token(user_prompt)},\n", + " ]\n", + " return messages\n", + "\n", + " def _ruleprocess(self, inputs):\n", + " # 例外処理\n", + " if inputs[\"info\"].strip() == \"\":\n", + " return {\"date\": \"\", \"event\": \"\"}\n", + " # APIリクエストを送る場合はNone\n", + " return None\n", + "\n", + " def _postprocess(self, response):\n", + " return json2dict(response.choices[0].message.content)" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[41mPARENT\u001b[0m\n", + "MyLLM(Extractor) -----------------------------------------------------------------------------------\n", + "\u001b[34m[inputs]\u001b[0m\n", + "{\n", + " \"info\": \"2021年6月13日に、neoAIのサービスが始まりました。\"\n", + "}\n", + "\u001b[34m[messages]\u001b[0m\n", + " \u001b[32msystem\u001b[0m\n", + " から、にしたがって、情報を抽出しなさい。\n", + " \u001b[32muser\u001b[0m\n", + " \n", + " ```\n", + " 2021年6月13日に、neoAIのサービスが始まりました。\n", + " ```\n", + " \n", + " \n", + " ```json\n", + " {\n", + " \"date\": \"yy-mm-dd形式 日付\",\n", + " \"event\": \"起きたことを簡潔に。\"\n", + " }\n", + " ```\n", + " \u001b[32massistant\u001b[0m\n", + " ```json\n", + " {\n", + " \"date\": \"2021-06-13\",\n", + " \"event\": \"neoAIのサービスが始まりました。\"\n", + " }\n", + " ```\n", + "\u001b[34m[outputs]\u001b[0m\n", + "{\n", + " \"date\": \"2021-06-13\",\n", + " \"event\": \"neoAIのサービスが始まりました。\"\n", + "}\n", + "\u001b[34m[client_settings]\u001b[0m -\n", + "\u001b[34m[llm_settings]\u001b[0m {'platform': 'azure', 'temperature': 0, 'model': 'gpt-3.5-turbo-0613', 'engine': 'neoai-free-swd-gpt-35-0613'}\n", + "\u001b[34m[metadata]\u001b[0m 1.6s; 143(106+37)tokens; $0.00021; ¥0.03\n", + "----------------------------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "{'date': '2021-06-13', 'event': 'neoAIのサービスが始まりました。'}" + ] + }, + "execution_count": 51, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "extractor = Extractor(model=\"gpt-3.5-turbo-0613\")\n", + "\n", + "extractor(inputs={\"info\": \"2021年6月13日に、neoAIのサービスが始まりました。\"})" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[41mPARENT\u001b[0m\n", + "MyLLM(Extractor) -----------------------------------------------------------------------------------\n", + "\u001b[34m[inputs]\u001b[0m\n", + "{\n", + " \"info\": \"1998年4月1日に、neoAI大学が設立されました。\"\n", + "}\n", + "\u001b[34m[messages]\u001b[0m\n", + " \u001b[32msystem\u001b[0m\n", + " から、にしたがって、情報を抽出しなさい。\n", + " \u001b[32muser\u001b[0m\n", + " \n", + " ```\n", + " 1998年4月1日に、neoAI大学が設立されました。\n", + " ```\n", + " \n", + " \n", + " ```json\n", + " {\n", + " \"date\": \"yy-mm-dd形式 日付\",\n", + " \"event\": \"起きたことを簡潔に。\"\n", + " }\n", + " ```\n", + " \u001b[32massistant\u001b[0m\n", + " \n", + " ```json\n", + " {\n", + " \"date\": \"1998-04-01\",\n", + " \"event\": \"neoAI大学の設立\"\n", + " }\n", + " ```\n", + "\u001b[34m[outputs]\u001b[0m\n", + "{\n", + " \"date\": \"1998-04-01\",\n", + " \"event\": \"neoAI大学の設立\"\n", + "}\n", + "\u001b[34m[client_settings]\u001b[0m -\n", + "\u001b[34m[llm_settings]\u001b[0m {'platform': 'azure', 'temperature': 0, 'model': 'gpt-3.5-turbo-0613', 'engine': 'neoai-free-swd-gpt-35-0613'}\n", + "\u001b[34m[metadata]\u001b[0m 1.6s; 139(104+35)tokens; $0.00021; ¥0.029\n", + "----------------------------------------------------------------------------------------------------\n" + ] + }, + { + "data": { + "text/plain": [ + "{'date': '1998-04-01', 'event': 'neoAI大学の設立'}" + ] + }, + "execution_count": 52, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "extractor = Extractor(model=\"gpt-3.5-turbo-0613\")\n", + "\n", + "extractor(inputs={\"info\": \"1998年4月1日に、neoAI大学が設立されました。\"})" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.11" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..b1f690006203b2902b11d9e0239af432701d18eb --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,81 @@ +[tool.poetry] +name = "neollm" +version = "1.3.3" +description = "neo LLM Module for Python 3.10" +authors = ["KoshiroTerasawa "] +readme = "README.md" +packages = [{ include = "neollm" }] + +[tool.poetry.dependencies] +python = "^3.10" +python-dotenv = "^1.0.0" +pydantic = "^2.4.2" +openai = "^1.1.1" +google-cloud-aiplatform = "^1.48.0" +anthropic = { version = "^0.18.1", extras = ["vertex"] } +typing-extensions = "^4.8.0" +google-generativeai = "0.5.2" +tiktoken = "0.7.0" + + +[tool.poetry.group.dev.dependencies] +isort = "^5.12.0" +black = "24.3.0" +mypy = "^1.8.0" +pyproject-flake8 = "^6.1.0" +ipykernel = "^6.26.0" +jupyter = "^1.0.0" +jupyter-client = "^8.6.0" +pytest = "^8.1.1" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.black] +line-length = 119 +exclude = ''' +/( + \venv + | \.git + | \.hg + | __pycache__ + | \.mypy_cache +)/ +''' + +[tool.isort] +profile = "black" +multi_line_output = 3 + +[tool.flake8] +max-line-length = 119 +extend-ignore = ["E203", "W503", "E501", "E704"] +exclude = [".venv", ".git", "__pycache__", ".mypy_cache", ".hg"] +max-complexity = 15 + +[tool.mypy] +ignore_missing_imports = true +# follow_imports = normal +disallow_any_unimported = false +disallow_any_expr = false # 式でのAny禁止 +disallow_any_decorated = false +disallow_any_explicit = false # 変数でAny禁止 +disallow_any_generics = true # ジェネリックで書かないの禁止 +disallow_subclassing_any = true # Anyのサブクラス禁止 + +disallow_untyped_calls = true # 型なし関数呼び出し禁止 `a: int = f()` +disallow_untyped_defs = true # 型なし関数定義禁止 `def f(a: int) -> int` +disallow_incomplete_defs = true # 一部の型定義を禁止 `def f(a: int, b)` +check_untyped_defs = true +disallow_untyped_decorators = true +no_implicit_optional = true + +warn_redundant_casts = true +warn_unused_ignores = true +warn_return_any = true +warn_unreachable = true # 辿りつかないコードの検出 +allow_redefinition = false # 変数の再定義を禁止 + +show_error_context = true +show_column_numbers = true diff --git a/test/llm/claude/test_claude_llm.py b/test/llm/claude/test_claude_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..d8faa7fa7d117d7ce10ccc76b283201ce85f0e38 --- /dev/null +++ b/test/llm/claude/test_claude_llm.py @@ -0,0 +1,37 @@ +# from neollm.llm.gpt.azure_llm import ( +# AzureGPT4_0613, +# AzureGPT4T_0125, +# AzureGPT4T_1106, +# AzureGPT4T_20240409, +# AzureGPT4VT_1106, +# AzureGPT35FT, +# AzureGPT35T16k_0613, +# AzureGPT35T_0125, +# AzureGPT35T_0613, +# AzureGPT35T_1106, +# AzureGPT432k_0613, +# ) +# from neollm.types.info import APIPricing + + +# def test_check_price() -> None: +# # https://azure.microsoft.com/ja-jp/pricing/details/cognitive-services/openai-service/ + +# # これからのモデル -------------------------------------------------------- +# assert AzureGPT4T_20240409.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) +# # Updated -------------------------------------------------------- +# # GPT3.5T +# assert AzureGPT35T_0125.dollar_per_ktoken == APIPricing(input=0.0005, output=0.0015) +# # GPT4 +# assert AzureGPT4T_0125.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) +# assert AzureGPT4VT_1106.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) +# assert AzureGPT4T_1106.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) +# assert AzureGPT4_0613.dollar_per_ktoken == APIPricing(input=0.03, output=0.06) +# assert AzureGPT432k_0613.dollar_per_ktoken == APIPricing(input=0.06, output=0.12) +# # FT +# assert AzureGPT35FT.dollar_per_ktoken == APIPricing(input=0.0005, output=0.0015) +# # Legacy --------------------------------------------------------- +# # AzureGPT35T_0301 なし +# assert AzureGPT35T_0613.dollar_per_ktoken == APIPricing(input=0.0015, output=0.002) +# assert AzureGPT35T16k_0613.dollar_per_ktoken == APIPricing(input=0.003, output=0.004) +# assert AzureGPT35T_1106.dollar_per_ktoken == APIPricing(input=0.001, output=0.002) diff --git a/test/llm/gpt/test_azure_llm.py b/test/llm/gpt/test_azure_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..5517a6157812aff7f2aae3d88f99ec7cb259f9f1 --- /dev/null +++ b/test/llm/gpt/test_azure_llm.py @@ -0,0 +1,92 @@ +from neollm.llm.gpt.azure_llm import ( + AzureGPT4_0613, + AzureGPT4O_20240513, + AzureGPT4T_0125, + AzureGPT4T_1106, + AzureGPT4T_20240409, + AzureGPT4VT_1106, + AzureGPT35FT, + AzureGPT35T16k_0613, + AzureGPT35T_0125, + AzureGPT35T_0613, + AzureGPT35T_1106, + AzureGPT432k_0613, + get_azure_llm, +) +from neollm.types.info import APIPricing + + +def test_get_azure_llm() -> None: + + # no date + assert get_azure_llm("gpt-3.5-turbo", {}).__class__ == AzureGPT35T_0613 + assert get_azure_llm("gpt-35-turbo", {}).__class__ == AzureGPT35T_0613 + assert get_azure_llm("gpt-3.5-turbo-16k", {}).__class__ == AzureGPT35T16k_0613 + assert get_azure_llm("gpt-35-turbo-16k", {}).__class__ == AzureGPT35T16k_0613 + assert get_azure_llm("gpt-4", {}).__class__ == AzureGPT4_0613 + assert get_azure_llm("gpt-4-32k", {}).__class__ == AzureGPT432k_0613 + assert get_azure_llm("gpt-4-turbo", {}).__class__ == AzureGPT4T_1106 + assert get_azure_llm("gpt-4v-turbo", {}).__class__ == AzureGPT4VT_1106 + assert get_azure_llm("gpt-4o", {}).__class__ == AzureGPT4O_20240513 + # with date + assert get_azure_llm("gpt-4o-2024-05-13", {}).__class__ == AzureGPT4O_20240513 + assert get_azure_llm("gpt-4-turbo-2024-04-09", {}).__class__ == AzureGPT4T_20240409 + assert get_azure_llm("gpt-3.5-turbo-0125", {}).__class__ == AzureGPT35T_0125 + assert get_azure_llm("gpt-35-turbo-0125", {}).__class__ == AzureGPT35T_0125 + assert get_azure_llm("gpt-4-turbo-0125", {}).__class__ == AzureGPT4T_0125 + assert get_azure_llm("gpt-3.5-turbo-1106", {}).__class__ == AzureGPT35T_1106 + assert get_azure_llm("gpt-35-turbo-1106", {}).__class__ == AzureGPT35T_1106 + assert get_azure_llm("gpt-4-turbo-1106", {}).__class__ == AzureGPT4T_1106 + assert get_azure_llm("gpt-4v-turbo-1106", {}).__class__ == AzureGPT4VT_1106 + assert get_azure_llm("gpt-3.5-turbo-0613", {}).__class__ == AzureGPT35T_0613 + assert get_azure_llm("gpt-35-turbo-0613", {}).__class__ == AzureGPT35T_0613 + assert get_azure_llm("gpt-3.5-turbo-16k-0613", {}).__class__ == AzureGPT35T16k_0613 + assert get_azure_llm("gpt-35-turbo-16k-0613", {}).__class__ == AzureGPT35T16k_0613 + assert get_azure_llm("gpt-4-0613", {}).__class__ == AzureGPT4_0613 + assert get_azure_llm("gpt-4-32k-0613", {}).__class__ == AzureGPT432k_0613 + # ft + assert get_azure_llm("ft:gpt-3.5-turbo-1106-XXXX", {}).__class__ == AzureGPT35FT + + +def test_check_price() -> None: + # https://azure.microsoft.com/ja-jp/pricing/details/cognitive-services/openai-service/ + + # これからのモデル -------------------------------------------------------- + assert AzureGPT4T_20240409.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) + # Updated -------------------------------------------------------- + # GPT3.5T + assert AzureGPT35T_0125.dollar_per_ktoken == APIPricing(input=0.0005, output=0.0015) + # GPT4 + assert AzureGPT4O_20240513.dollar_per_ktoken == APIPricing(input=0.005, output=0.015) + assert AzureGPT4T_0125.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) + assert AzureGPT4VT_1106.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) + assert AzureGPT4T_1106.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) + assert AzureGPT4_0613.dollar_per_ktoken == APIPricing(input=0.03, output=0.06) + assert AzureGPT432k_0613.dollar_per_ktoken == APIPricing(input=0.06, output=0.12) + # FT + assert AzureGPT35FT.dollar_per_ktoken == APIPricing(input=0.0005, output=0.0015) + # Legacy --------------------------------------------------------- + # AzureGPT35T_0301 なし + assert AzureGPT35T_0613.dollar_per_ktoken == APIPricing(input=0.0015, output=0.002) + assert AzureGPT35T16k_0613.dollar_per_ktoken == APIPricing(input=0.003, output=0.004) + assert AzureGPT35T_1106.dollar_per_ktoken == APIPricing(input=0.001, output=0.002) + + +def test_check_context_window() -> None: + # https://learn.microsoft.com/ja-jp/azure/ai-services/openai/concepts/models#gpt-4-and-gpt-4-turbo-preview-models + assert AzureGPT4T_20240409.context_window == 128_000 + + assert AzureGPT4T_0125.context_window == 128_000 + assert AzureGPT35T_0125.context_window == 16_385 + + assert AzureGPT4O_20240513.context_window == 128_000 + assert AzureGPT4T_1106.context_window == 128_000 + assert AzureGPT4VT_1106.context_window == 128_000 + assert AzureGPT35T_1106.context_window == 16_385 + + assert AzureGPT35T_0613.context_window == 4_096 + assert AzureGPT4_0613.context_window == 8_192 + assert AzureGPT35T16k_0613.context_window == 16_385 + assert AzureGPT432k_0613.context_window == 32_768 + + assert AzureGPT35FT.context_window == 4_096 diff --git a/test/llm/gpt/test_openai_llm.py b/test/llm/gpt/test_openai_llm.py new file mode 100644 index 0000000000000000000000000000000000000000..d8faa7fa7d117d7ce10ccc76b283201ce85f0e38 --- /dev/null +++ b/test/llm/gpt/test_openai_llm.py @@ -0,0 +1,37 @@ +# from neollm.llm.gpt.azure_llm import ( +# AzureGPT4_0613, +# AzureGPT4T_0125, +# AzureGPT4T_1106, +# AzureGPT4T_20240409, +# AzureGPT4VT_1106, +# AzureGPT35FT, +# AzureGPT35T16k_0613, +# AzureGPT35T_0125, +# AzureGPT35T_0613, +# AzureGPT35T_1106, +# AzureGPT432k_0613, +# ) +# from neollm.types.info import APIPricing + + +# def test_check_price() -> None: +# # https://azure.microsoft.com/ja-jp/pricing/details/cognitive-services/openai-service/ + +# # これからのモデル -------------------------------------------------------- +# assert AzureGPT4T_20240409.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) +# # Updated -------------------------------------------------------- +# # GPT3.5T +# assert AzureGPT35T_0125.dollar_per_ktoken == APIPricing(input=0.0005, output=0.0015) +# # GPT4 +# assert AzureGPT4T_0125.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) +# assert AzureGPT4VT_1106.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) +# assert AzureGPT4T_1106.dollar_per_ktoken == APIPricing(input=0.01, output=0.03) +# assert AzureGPT4_0613.dollar_per_ktoken == APIPricing(input=0.03, output=0.06) +# assert AzureGPT432k_0613.dollar_per_ktoken == APIPricing(input=0.06, output=0.12) +# # FT +# assert AzureGPT35FT.dollar_per_ktoken == APIPricing(input=0.0005, output=0.0015) +# # Legacy --------------------------------------------------------- +# # AzureGPT35T_0301 なし +# assert AzureGPT35T_0613.dollar_per_ktoken == APIPricing(input=0.0015, output=0.002) +# assert AzureGPT35T16k_0613.dollar_per_ktoken == APIPricing(input=0.003, output=0.004) +# assert AzureGPT35T_1106.dollar_per_ktoken == APIPricing(input=0.001, output=0.002) diff --git a/test/llm/platform.py b/test/llm/platform.py new file mode 100644 index 0000000000000000000000000000000000000000..3024a1a79d6776d028d59563c6ab4cf0a11900ed --- /dev/null +++ b/test/llm/platform.py @@ -0,0 +1,32 @@ +import pytest + +from neollm.llm.platform import Platform + + +class TestPlatform: + def test_str(self) -> None: + assert Platform.AZURE == "azure" # type: ignore + assert Platform.OPENAI == "openai" # type: ignore + assert Platform.ANTHROPIC == "anthropic" # type: ignore + assert Platform.GCP == "gcp" # type: ignore + + def test_init(self) -> None: + assert Platform("azure") == Platform.AZURE + assert Platform("openai") == Platform.OPENAI + assert Platform("anthropic") == Platform.ANTHROPIC + assert Platform("gcp") == Platform.GCP + + assert Platform("Azure ") == Platform.AZURE + assert Platform(" OpenAI") == Platform.OPENAI + assert Platform("Anthropic ") == Platform.ANTHROPIC + assert Platform("GcP") == Platform.GCP + + def test_from_string(self) -> None: + assert Platform.from_string("azure") == Platform.AZURE + assert Platform.from_string("openai") == Platform.OPENAI + assert Platform.from_string("anthropic") == Platform.ANTHROPIC + assert Platform.from_string("gcp") == Platform.GCP + + def test_from_string_error(self) -> None: + with pytest.raises(ValueError): + Platform.from_string("error") diff --git a/test/myllm/test_neollm.py b/test/myllm/test_neollm.py new file mode 100644 index 0000000000000000000000000000000000000000..5057278f91b03fc708015691fb887f95cf48833d --- /dev/null +++ b/test/myllm/test_neollm.py @@ -0,0 +1,116 @@ +import pytest # noqa F401 +from dotenv import load_dotenv + +from neollm import MyLLM +from neollm.types import Messages, Response +from neollm.utils.inference import execute_parallel + +env_file_path = "project/.env" +assert load_dotenv(env_file_path, override=True) + + +class SampleMyLLM(MyLLM[str, str]): + def _preprocess(self, inputs: str) -> Messages: + messages: Messages = [ + {"role": "system", "content": ("1+1={int}")}, + {"role": "user", "content": str(inputs) + "\n{int}"}, + ] + return messages + + def _postprocess(self, response: Response) -> str: + return response.choices[0].message.content or "" + + +class TestMyLLM: + def call_myllm(self, model: str, platform: str) -> None: + myllm = SampleMyLLM(model=model, platform=platform, llm_settings={"max_tokens": 1}) + myllm("1+1") + + def call_stream_myllm(self, model: str, platform: str) -> None: + myllm = SampleMyLLM(model=model, platform=platform) + myllm.call_stream("1+1") + + def test_azure(self) -> None: + kwargs = [ + {"model": model, "platform": "azure"} + for model in [ + # "gpt-4o-2024-05-13", # 2024/05/15時点、AOAIで使用不可(リージョン不明) + # "gpt-4-turbo-2024-04-09", # スウェーデンにない + # "gpt-3.5-turbo-0125",# スウェーデンにない + # "gpt-4-turbo-0125",# スウェーデンにない + "gpt-3.5-turbo-1106", + "gpt-4-turbo-1106", + # "gpt-4v-turbo-1106", # スウェーデンにない + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k-0613", + "gpt-4-0613", + "gpt-4-32k-0613", + # w/o date + "gpt-4-turbo", + "gpt-3.5-turbo", + # "gpt-4v-turbo", # スウェーデンにない + "gpt-3.5-turbo-16k", + "gpt-4", + "gpt-4-32k", + ] + ] + execute_parallel(self.call_myllm, kwargs, max_workers=10) + execute_parallel(self.call_stream_myllm, kwargs, max_workers=10) + + def test_openai(self) -> None: + kwargs = [ + {"model": model, "platform": "openai"} + for model in [ + "gpt-4o-2024-05-13", + "gpt-4-turbo-2024-04-09", + "gpt-3.5-turbo-0125", + "gpt-4-turbo-0125", + "gpt-3.5-turbo-1106", + "gpt-4-turbo-1106", + "gpt-4v-turbo-1106", + "gpt-3.5-turbo-0613", + "gpt-3.5-turbo-16k-0613", + "gpt-4-0613", + # "gpt-4-32k-0613", # 使えなくなってる + # w/o date + "gpt-4-turbo", + "gpt-3.5-turbo", + "gpt-4v-turbo", + "gpt-3.5-turbo-16k", + "gpt-4", + # "gpt-4-32k", # 使えなくなってる + ] + ] + execute_parallel(self.call_myllm, kwargs, max_workers=10) + execute_parallel(self.call_stream_myllm, kwargs, max_workers=10) + + def test_anthropic(self) -> None: + kwargs = [ + {"model": model, "platform": "anthropic"} + for model in [ + "claude-3-opus", + "claude-3-sonnet", + "claude-3-haiku", + "claude-3-opus-20240229", + "claude-3-sonnet-20240229", + "claude-3-haiku-20240307", + ] + ] + execute_parallel(self.call_myllm, kwargs, max_workers=10) + execute_parallel(self.call_stream_myllm, kwargs, max_workers=10) + + # TODO! google + def _test_gcp(self) -> None: + kwargs = [ + {"model": model, "platform": "gcp"} + for model in [ + "claude-3-opus", + "claude-3-sonnet", + "claude-3-haiku", + "claude-3-opus@20240229", + "claude-3-sonnet@20240229", + "claude-3-haiku@20240307", + ] + ] + execute_parallel(self.call_myllm, kwargs, max_workers=10) + execute_parallel(self.call_stream_myllm, kwargs, max_workers=10) diff --git a/test/types/test_info.py b/test/types/test_info.py new file mode 100644 index 0000000000000000000000000000000000000000..abaaca523e6ff183436a73a9ac4dc66b9676ed25 --- /dev/null +++ b/test/types/test_info.py @@ -0,0 +1,82 @@ +import pytest # noqa F401 + +from neollm.types import APIPricing, PriceInfo, TimeInfo, TokenInfo + + +class TestTimeInfo: + def test_init_default(self): + time_info = TimeInfo() + assert time_info.total == 0.0 + assert time_info.preprocess == 0.0 + assert time_info.main == 0.0 + assert time_info.postprocess == 0.0 + + def test_init(self): + time_info = TimeInfo(total=1.0, preprocess=0.5, main=0.3, postprocess=0.2) + assert time_info.total == 1.0 + assert time_info.preprocess == 0.5 + assert time_info.main == 0.3 + assert time_info.postprocess == 0.2 + + def test_repr(self): + time_info = TimeInfo(total=1.000001, preprocess=0.5, main=0.3, postprocess=0.2) + assert repr(time_info) == "TimeInfo(total=1.000, preprocess=0.500, main=0.300, postprocess=0.200)" + + +class TestTokenInfo: + def test_init(self): + token_info = TokenInfo(input=100, output=200, total=300) + assert token_info.input == 100 + assert token_info.output == 200 + assert token_info.total == 300 + + def test_add(self): + token_info1 = TokenInfo(input=100, output=200, total=300) + token_info2 = TokenInfo(input=10, output=20, total=30) + token_info3 = token_info1 + token_info2 + assert token_info3.input == 110 + assert token_info3.output == 220 + assert token_info3.total == 330 + + def test_iadd(self): + token_info1 = TokenInfo(input=100, output=200, total=300) + token_info2 = TokenInfo(input=10, output=20, total=30) + token_info1 += token_info2 + assert token_info1.input == 110 + assert token_info1.output == 220 + assert token_info1.total == 330 + + +class TestPriceInfo: + def test_init(self): + price_info = PriceInfo(input=0.001, output=0.002, total=0.003) + assert price_info.input == 0.001 + assert price_info.output == 0.002 + assert price_info.total == 0.003 + + def test_add(self): + price_info1 = PriceInfo(input=0.001, output=0.002, total=0.003) + price_info2 = PriceInfo(input=0.0001, output=0.0002, total=0.0003) + price_info3 = price_info1 + price_info2 + assert price_info3.input == 0.0011 + assert price_info3.output == 0.0022 + assert price_info3.total == 0.0033 + + def test_iadd(self): + price_info1 = PriceInfo(input=0.001, output=0.002, total=0.003) + price_info2 = PriceInfo(input=0.0001, output=0.0002, total=0.0003) + price_info1 += price_info2 + assert price_info1.input == 0.0011 + assert price_info1.output == 0.0022 + assert price_info1.total == 0.0033 + + def test_repr(self): + price_info = PriceInfo(input=0.0010001, output=0.002, total=0.003) + assert repr(price_info) == "PriceInfo(input=0.001, output=0.002, total=0.003)" + + +class TestAPIPricing: + def test_init(self): + api_pricing = APIPricing(input=0.0010, output=0.0020) + assert api_pricing.input == 0.0010 + assert api_pricing.output == 0.0020 diff --git a/test/types/test_model.py b/test/types/test_model.py new file mode 100644 index 0000000000000000000000000000000000000000..e82d36669e60852da01c4ff80fd9432d2afbbc54 --- /dev/null +++ b/test/types/test_model.py @@ -0,0 +1,16 @@ +import pytest # noqa F401 + +from neollm.types._model import DictableBaseModel + + +class TestDictableBaseModel: + def test_getitem(self): + class TestModel(DictableBaseModel): + a: int + b: str + + model = TestModel(a=1, b="test") + assert model["a"] == 1 + assert model["b"] == "test" + assert model.a == 1 + assert model.b == "test" diff --git a/test/utils/test_preprocess.py b/test/utils/test_preprocess.py new file mode 100644 index 0000000000000000000000000000000000000000..9a3473af5c2f438caec14506196b84955829cc27 --- /dev/null +++ b/test/utils/test_preprocess.py @@ -0,0 +1,12 @@ +import pytest # noqa E402 + +from neollm.utils.preprocess import dict2json, optimize_token + + +def test_dict2json() -> None: + assert dict2json({"a": 1, "b": 2.1}) == '{\n "a": 1,\n "b": 2.1\n}' + assert dict2json({"あいうえお": False, "漢字": None}) == '{\n "あいうえお": false,\n "漢字": null\n}' + + +def test_optimize_token() -> None: + assert optimize_token("\na\n\n\nb \n") == "a\n\nb"