Skip to content

Enforce modern Python code using pre-commit running pyupgrade --py39-plus #1329

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 6 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,11 @@ repos:
- id: check-builtin-literals
- id: trailing-whitespace

- repo: https://github.com/PyCQA/autoflake
rev: v2.3.1
hooks:
- id: autoflake

- repo: https://github.com/psf/black
rev: 25.1.0
hooks:
Expand All @@ -33,7 +38,7 @@ repos:
rev: v3.19.1
hooks:
- id: pyupgrade
args: [--py37-plus]
args: [--py39-plus]

- repo: https://github.com/pycqa/flake8
rev: 7.1.1
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from typing import Dict

from .models.completion import InlineCompletionRequest


Expand All @@ -9,7 +7,7 @@ def token_from_request(request: InlineCompletionRequest, suggestion: int):
return f"t{request.number}s{suggestion}"


def template_inputs_from_request(request: InlineCompletionRequest) -> Dict:
def template_inputs_from_request(request: InlineCompletionRequest) -> dict:
suffix = request.suffix.strip()
filename = request.path.split("/")[-1] if request.path else "untitled"

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import ClassVar, List, Optional
from typing import ClassVar, Optional

from jupyter_ai_magics.providers import (
AuthStrategy,
Expand Down Expand Up @@ -27,7 +27,7 @@ class BaseEmbeddingsProvider(BaseModel):
name: ClassVar[str] = ...
"""User-facing name of this provider."""

models: ClassVar[List[str]] = ...
models: ClassVar[list[str]] = ...
"""List of supported models by their IDs. For registry providers, this will
be just ["*"]."""

Expand All @@ -38,7 +38,7 @@ class BaseEmbeddingsProvider(BaseModel):
model_id_key: ClassVar[str] = ...
"""Kwarg expected by the upstream LangChain provider."""

pypi_package_deps: ClassVar[List[str]] = []
pypi_package_deps: ClassVar[list[str]] = []
"""List of PyPi package dependencies."""

auth_strategy: ClassVar[AuthStrategy] = None
Expand All @@ -50,7 +50,7 @@ class BaseEmbeddingsProvider(BaseModel):
registry: ClassVar[bool] = False
"""Whether this provider is a registry provider."""

fields: ClassVar[List[Field]] = []
fields: ClassVar[list[Field]] = []
"""Fields expected by this provider in its constructor. Each `Field` `f`
should be passed as a keyword argument, keyed by `f.key`."""

Expand Down
2 changes: 0 additions & 2 deletions packages/jupyter-ai-magics/jupyter_ai_magics/exception.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import traceback

from IPython.core.magic import register_line_magic
from IPython.core.ultratb import ListTB

Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import List, Literal, Optional
from typing import Literal, Optional

from pydantic import BaseModel

Expand Down Expand Up @@ -50,7 +50,7 @@ class CompletionError(BaseModel):
class InlineCompletionList(BaseModel):
"""Reflection of JupyterLab's `IInlineCompletionList`."""

items: List[InlineCompletionItem]
items: list[InlineCompletionItem]


class InlineCompletionReply(BaseModel):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import copy
import json
from typing import Any, Coroutine, Dict
from collections.abc import Coroutine
from typing import Any

from jsonpath_ng import parse
from langchain_aws import BedrockEmbeddings, BedrockLLM, ChatBedrock, SagemakerEndpoint
Expand Down Expand Up @@ -155,7 +156,7 @@ def __init__(self, request_schema, response_path):
self.response_path = response_path
self.response_parser = parse(response_path)

def replace_values(self, old_val, new_val, d: Dict[str, Any]):
def replace_values(self, old_val, new_val, d: dict[str, Any]):
"""Replaces values of a dictionary recursively."""
for key, val in d.items():
if val == old_val:
Expand All @@ -165,7 +166,7 @@ def replace_values(self, old_val, new_val, d: Dict[str, Any]):

return d

def transform_input(self, prompt: str, model_kwargs: Dict) -> bytes:
def transform_input(self, prompt: str, model_kwargs: dict) -> bytes:
request_obj = copy.deepcopy(self.request_schema)
self.replace_values("<prompt>", prompt, request_obj)
request = json.dumps(request_obj).encode("utf-8")
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from typing import Dict

from jupyter_ai_magics import BaseProvider
from jupyter_ai_magics.providers import EnvAuthStrategy, TextField
from langchain_core.utils import get_from_dict_or_env
Expand All @@ -8,7 +6,7 @@

class ChatOpenRouter(ChatOpenAI):
@property
def lc_secrets(self) -> Dict[str, str]:
def lc_secrets(self) -> dict[str, str]:
return {"openai_api_key": "OPENROUTER_API_KEY"}


Expand Down
17 changes: 7 additions & 10 deletions packages/jupyter-ai-magics/jupyter_ai_magics/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,12 @@
import functools
import io
import json
from collections.abc import AsyncIterator, Coroutine
from concurrent.futures import ThreadPoolExecutor
from types import MappingProxyType
from typing import (
Any,
AsyncIterator,
ClassVar,
Coroutine,
Dict,
List,
Literal,
Optional,
Union,
Expand Down Expand Up @@ -127,7 +124,7 @@ class MultiEnvAuthStrategy(BaseModel):
"""Require multiple auth tokens via multiple environment variables."""

type: Literal["multienv"] = "multienv"
names: List[str]
names: list[str]


class AwsAuthStrategy(BaseModel):
Expand Down Expand Up @@ -183,7 +180,7 @@ class BaseProvider(BaseModel):
name: ClassVar[str] = ...
"""User-facing name of this provider."""

models: ClassVar[List[str]] = ...
models: ClassVar[list[str]] = ...
"""List of supported models by their IDs. For registry providers, this will
be just ["*"]."""

Expand All @@ -207,7 +204,7 @@ class BaseProvider(BaseModel):
If unset, the label shown in the UI defaults to "Model ID".
"""

pypi_package_deps: ClassVar[List[str]] = []
pypi_package_deps: ClassVar[list[str]] = []
"""List of PyPi package dependencies."""

auth_strategy: ClassVar[AuthStrategy] = None
Expand All @@ -217,7 +214,7 @@ class BaseProvider(BaseModel):
registry: ClassVar[bool] = False
"""Whether this provider is a registry provider."""

fields: ClassVar[List[Field]] = []
fields: ClassVar[list[Field]] = []
"""User inputs expected by this provider when initializing it. Each `Field` `f`
should be passed in the constructor as a keyword argument, keyed by `f.key`."""

Expand Down Expand Up @@ -266,7 +263,7 @@ def completion_models(self):
# instance attrs
#
model_id: str
prompt_templates: Dict[str, PromptTemplate]
prompt_templates: dict[str, PromptTemplate]
"""Prompt templates for each output type. Can be overridden with
`update_prompt_template`. The function `prompt_template`, in the base class,
refers to this."""
Expand Down Expand Up @@ -599,7 +596,7 @@ class HfHubProvider(BaseProvider, HuggingFaceEndpoint):

# Handle text and image outputs
def _call(
self, prompt: str, stop: Optional[List[str]] = None, **kwargs: Any
self, prompt: str, stop: Optional[list[str]] = None, **kwargs: Any
) -> str:
"""Call out to Hugging Face Hub's inference endpoint.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,5 +119,5 @@ def test_reset(ip):
ip.extension_manager.load_extension("jupyter_ai_magics")
ai_magics = ip.magics_manager.registry["AiMagics"]
ai_magics.transcript = [AI1, H1, AI2, H2, AI3]
result = ip.run_line_magic("ai", "reset")
ip.run_line_magic("ai", "reset")
assert ai_magics.transcript == []
20 changes: 10 additions & 10 deletions packages/jupyter-ai-magics/jupyter_ai_magics/utils.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
import logging
from typing import Dict, List, Literal, Optional, Tuple, Type, Union
from typing import Literal, Optional, Union

from importlib_metadata import entry_points
from jupyter_ai_magics.aliases import MODEL_ID_ALIASES
from jupyter_ai_magics.embedding_providers import BaseEmbeddingsProvider
from jupyter_ai_magics.providers import BaseProvider

Logger = Union[logging.Logger, logging.LoggerAdapter]
LmProvidersDict = Dict[str, BaseProvider]
EmProvidersDict = Dict[str, BaseEmbeddingsProvider]
LmProvidersDict = dict[str, BaseProvider]
EmProvidersDict = dict[str, BaseEmbeddingsProvider]
AnyProvider = Union[BaseProvider, BaseEmbeddingsProvider]
ProviderDict = Dict[str, AnyProvider]
ProviderRestrictions = Dict[
Literal["allowed_providers", "blocked_providers"], Optional[List[str]]
ProviderDict = dict[str, AnyProvider]
ProviderRestrictions = dict[
Literal["allowed_providers", "blocked_providers"], Optional[list[str]]
]


Expand Down Expand Up @@ -80,8 +80,8 @@ def get_em_providers(


def decompose_model_id(
model_id: str, providers: Dict[str, BaseProvider]
) -> Tuple[str, str]:
model_id: str, providers: dict[str, BaseProvider]
) -> tuple[str, str]:
"""Breaks down a model ID into a two-tuple (provider_id, local_model_id). Returns (None, None) if indeterminate."""
if model_id in MODEL_ID_ALIASES:
model_id = MODEL_ID_ALIASES[model_id]
Expand All @@ -104,15 +104,15 @@ def decompose_model_id(

def get_lm_provider(
model_id: str, lm_providers: LmProvidersDict
) -> Tuple[str, Type[BaseProvider]]:
) -> tuple[str, type[BaseProvider]]:
"""Gets a two-tuple (<local-model-id>, <provider-class>) specified by a
global model ID."""
return _get_provider(model_id, lm_providers)


def get_em_provider(
model_id: str, em_providers: EmProvidersDict
) -> Tuple[str, Type[BaseEmbeddingsProvider]]:
) -> tuple[str, type[BaseEmbeddingsProvider]]:
"""Gets a two-tuple (<local-model-id>, <provider-class>) specified by a
global model ID."""
return _get_provider(model_id, em_providers)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, List, Optional
from typing import Any, Optional

from langchain_core.callbacks.manager import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM
Expand All @@ -14,7 +14,7 @@ def _llm_type(self) -> str:
def _call(
self,
prompt: str,
stop: Optional[List[str]] = None,
stop: Optional[list[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> str:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import ClassVar, List
from typing import ClassVar

from jupyter_ai import AuthStrategy, BaseProvider, Field

Expand Down Expand Up @@ -41,7 +41,7 @@ class TestModelProvider(BaseProvider, <langchain-llm-class>):
name: ClassVar[str] = "Test Provider"
"""User-facing name of this provider."""

models: ClassVar[List[str]] = ["test-model-1"]
models: ClassVar[list[str]] = ["test-model-1"]
"""List of supported models by their IDs. For registry providers, this will
be just ["*"]."""

Expand All @@ -55,7 +55,7 @@ class TestModelProvider(BaseProvider, <langchain-llm-class>):
model_id_label: ClassVar[str] = "Model ID"
"""Human-readable label of the model ID."""

pypi_package_deps: ClassVar[List[str]] = []
pypi_package_deps: ClassVar[list[str]] = []
"""List of PyPi package dependencies."""

auth_strategy: ClassVar[AuthStrategy] = None
Expand All @@ -65,6 +65,6 @@ class TestModelProvider(BaseProvider, <langchain-llm-class>):
registry: ClassVar[bool] = False
"""Whether this provider is a registry provider."""

fields: ClassVar[List[Field]] = []
fields: ClassVar[list[Field]] = []
"""User inputs expected by this provider when initializing it. Each `Field` `f`
should be passed in the constructor as a keyword argument, keyed by `f.key`."""
9 changes: 5 additions & 4 deletions packages/jupyter-ai-test/jupyter_ai_test/test_llms.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import time
from typing import Any, Iterator, List, Optional
from collections.abc import Iterator
from typing import Any, Optional

from langchain_core.callbacks.manager import CallbackManagerForLLMRun
from langchain_core.language_models.llms import LLM
Expand All @@ -16,7 +17,7 @@ def _llm_type(self) -> str:
def _call(
self,
prompt: str,
stop: Optional[List[str]] = None,
stop: Optional[list[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> str:
Expand All @@ -34,7 +35,7 @@ def _llm_type(self) -> str:
def _call(
self,
prompt: str,
stop: Optional[List[str]] = None,
stop: Optional[list[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> str:
Expand All @@ -44,7 +45,7 @@ def _call(
def _stream(
self,
prompt: str,
stop: Optional[List[str]] = None,
stop: Optional[list[str]] = None,
run_manager: Optional[CallbackManagerForLLMRun] = None,
**kwargs: Any,
) -> Iterator[GenerationChunk]:
Expand Down
Loading
Loading