Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/reusable-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ jobs:
fail-fast: false
matrix:
os: [ ubuntu-latest ]
python-version: [ "3.10", "3.11", "3.12", "3.13", ]
python-version: [ "3.10", "3.11", "3.12", "3.13", "3.14", ]
include:
- os: windows-latest
python-version: "3.10"
Expand Down
9 changes: 7 additions & 2 deletions docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,21 +5,26 @@

# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
from __future__ import annotations

import os
import sys
from importlib.metadata import version
from pathlib import Path

from sphinx.application import Sphinx
from typing import TYPE_CHECKING

conf_dir = os.path.dirname(os.path.abspath(__file__)) # noqa: PTH100, PTH120

sys.path.insert(0, conf_dir)


from docs_utils.skip_members import skip_member # noqa: E402
from docs_utils.tutorials import generate_tutorial_links_for_notebook_creation # noqa: E402
from docs_utils.versions_generator import generate_versions_json # noqa: E402

if TYPE_CHECKING:
from sphinx.application import Sphinx

project = "AutoIntent"
copyright = "2025, DeepPavlov"
author = "DeepPavlov"
Expand Down
8 changes: 6 additions & 2 deletions docs/source/docs_utils/notebook.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,15 @@
from __future__ import annotations

import abc
import re
from typing import ClassVar, Literal
from typing import TYPE_CHECKING, ClassVar, Literal

import nbformat
from jupytext import jupytext
from pydantic import BaseModel

if TYPE_CHECKING:
import nbformat


class ReplacePattern(BaseModel, abc.ABC):
"""
Expand Down
9 changes: 6 additions & 3 deletions docs/source/docs_utils/skip_members.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
from typing import Any
from __future__ import annotations

from sphinx.application import Sphinx
from sphinx.ext.autodoc import Options
from typing import TYPE_CHECKING, Any

if TYPE_CHECKING:
from sphinx.application import Sphinx
from sphinx.ext.autodoc import Options


def skip_member(app: Sphinx, what: str, name: str, obj: Any, skip: bool, options: Options) -> bool | None: # noqa: ANN401, ARG001
Expand Down
7 changes: 6 additions & 1 deletion docs/source/docs_utils/versions_generator.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
from __future__ import annotations

import json
import logging
import os
import re
import subprocess
from pathlib import Path
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from pathlib import Path

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
Expand Down
23 changes: 21 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ classifiers=[
'Framework :: Sphinx',
'Typing :: Typed',
]
requires-python = ">=3.10,<3.14"
requires-python = ">=3.10,<3.15"
dependencies = [
"sentence-transformers (>=3,<6)",
"scikit-learn (>=1.5,<2.0)",
Expand All @@ -45,13 +45,15 @@ dependencies = [
"python-dotenv (>=1.0.1,<2.0.0)",
"transformers[torch] (>=4.49.0,<6.0.0)",
"peft (>= 0.10.0, !=0.15.0, !=0.15.1, <1.0.0)",
"catboost (>=1.2.8,<2.0.0)",
"aiometer (>=1.0.0,<2.0.0)",
"aiofiles (>=24.1.0,<25.0.0)",
"threadpoolctl (>=3.0.0,<4.0.0)",
]

[project.optional-dependencies]
catboost = [
"catboost (>=1.2.8,<2.0.0)",
]
dspy = [
"dspy (>=2.6.5,<3.0.0)",
]
Expand Down Expand Up @@ -158,6 +160,7 @@ ignore = [
"RUF001", # ambiguous unicode character
"PLC0415", # import outside top-level
]
future-annotations = true # For TC rules

[tool.ruff.lint.per-file-ignores]
"__init__.py" = ["F401", "D104"]
Expand All @@ -176,6 +179,22 @@ max-args = 10
[tool.ruff.lint.pydocstyle]
convention = "google"

[tool.ruff.lint.flake8-type-checking]
strict = true
runtime-evaluated-base-classes = ["pydantic.BaseModel"]
exempt-modules = [
"typing",
"typing_extensions",
# reqired for config validation
"autointent.Context",
"autointent.custom_types",
"autointent.configs",
"pydantic",
]

[tool.ruff.lint.flake8-unused-arguments]
ignore-variadic-names = true

[tool.pytest.ini_options]
minversion = "8.0"
testpaths = [
Expand Down
8 changes: 6 additions & 2 deletions src/autointent/_callbacks/base.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
"""Base class for reporters (W&B, TensorBoard, etc)."""

from __future__ import annotations

from abc import ABC, abstractmethod
from pathlib import Path
from typing import Any
from typing import TYPE_CHECKING, Any

if TYPE_CHECKING:
from pathlib import Path


class OptimizerCallback(ABC):
Expand Down
8 changes: 6 additions & 2 deletions src/autointent/_callbacks/callback_handler.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
from pathlib import Path
from typing import Any
from __future__ import annotations

from typing import TYPE_CHECKING, Any

from autointent._callbacks.base import OptimizerCallback

if TYPE_CHECKING:
from pathlib import Path


class CallbackHandler(OptimizerCallback):
"""Internal class that just calls the list of callbacks in order."""
Expand Down
8 changes: 6 additions & 2 deletions src/autointent/_callbacks/emissions_tracker.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
"""Emissions tracking functionality for monitoring energy consumption and carbon emissions."""

from __future__ import annotations

import json
import logging
from pathlib import Path
from typing import Any
from typing import TYPE_CHECKING, Any

from autointent._callbacks import OptimizerCallback

if TYPE_CHECKING:
from pathlib import Path

logger = logging.getLogger(__name__)


Expand Down
8 changes: 6 additions & 2 deletions src/autointent/_callbacks/wandb.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,14 @@
from __future__ import annotations

import logging
import os
from pathlib import Path
from typing import Any
from typing import TYPE_CHECKING, Any

from autointent._callbacks.base import OptimizerCallback

if TYPE_CHECKING:
from pathlib import Path

logger = logging.getLogger(__name__)


Expand Down
22 changes: 13 additions & 9 deletions src/autointent/_dataset/_dataset.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,23 @@
"""Defines the Dataset class and related utilities for handling datasets."""

from __future__ import annotations

import json
import logging
from collections import defaultdict
from functools import cached_property
from pathlib import Path
from typing import Any, TypedDict
from typing import TYPE_CHECKING, Any, TypedDict

from datasets import Dataset as HFDataset
from datasets import Sequence, get_dataset_config_names, load_dataset

from autointent.custom_types import LabelWithOOS, Split
from autointent.schemas import Intent, Tag
from autointent.custom_types import Split
from autointent.schemas import Tag

if TYPE_CHECKING:
from autointent.custom_types import LabelWithOOS
from autointent.schemas import Intent

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -72,7 +78,7 @@ def n_classes(self) -> int:
return len(self.intents)

@classmethod
def from_dict(cls, mapping: dict[str, Any]) -> "Dataset":
def from_dict(cls, mapping: dict[str, Any]) -> Dataset:
"""Creates a dataset from a dictionary mapping.

Args:
Expand All @@ -83,7 +89,7 @@ def from_dict(cls, mapping: dict[str, Any]) -> "Dataset":
return DictReader().read(mapping)

@classmethod
def from_json(cls, filepath: str | Path) -> "Dataset":
def from_json(cls, filepath: str | Path) -> Dataset:
"""Loads a dataset from a JSON file.

Args:
Expand All @@ -94,9 +100,7 @@ def from_json(cls, filepath: str | Path) -> "Dataset":
return JsonReader().read(filepath)

@classmethod
def from_hub(
cls, repo_name: str, data_split: str = "default", intent_subset_name: str = Split.INTENTS
) -> "Dataset":
def from_hub(cls, repo_name: str, data_split: str = "default", intent_subset_name: str = Split.INTENTS) -> Dataset:
"""Loads a dataset from the Hugging Face Hub.

Args:
Expand All @@ -113,7 +117,7 @@ def from_hub(

return DictReader().read(mapping)

def to_multilabel(self) -> "Dataset":
def to_multilabel(self) -> Dataset:
"""Converts dataset labels to multilabel format."""
for split_name, split in self.items():
self[split_name] = split.map(self._to_multilabel)
Expand Down
8 changes: 5 additions & 3 deletions src/autointent/_dataset/_validation.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""File containing definitions of DatasetReader and DatasetValidator for handling dataset operations."""

from __future__ import annotations

from pydantic import BaseModel, ConfigDict, model_validator

from autointent.schemas import Intent, Sample
Expand Down Expand Up @@ -31,7 +33,7 @@ class DatasetReader(BaseModel):
model_config = ConfigDict(extra="forbid")

@model_validator(mode="after")
def validate_dataset(self) -> "DatasetReader":
def validate_dataset(self) -> DatasetReader:
"""Validates dataset integrity by ensuring consistent data splits and intent mappings.

Raises:
Expand Down Expand Up @@ -119,7 +121,7 @@ def _validate_classes(self, splits: list[list[Sample]]) -> int:
raise ValueError(message)
return n_classes[0]

def _validate_intents(self, n_classes: int) -> "DatasetReader":
def _validate_intents(self, n_classes: int) -> DatasetReader:
"""Ensures intent IDs are sequential and match the number of classes.

Args:
Expand All @@ -143,7 +145,7 @@ def _validate_intents(self, n_classes: int) -> "DatasetReader":
raise ValueError(message)
return self

def _validate_split(self, split: list[Sample]) -> "DatasetReader":
def _validate_split(self, split: list[Sample]) -> DatasetReader:
"""Validate a dataset split to ensure all sample labels reference valid intent IDs.

Args:
Expand Down
8 changes: 6 additions & 2 deletions src/autointent/_dump_tools/base.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from __future__ import annotations

import logging
from abc import ABC, abstractmethod
from pathlib import Path
from typing import Any, Generic, TypeAlias, TypeVar
from typing import TYPE_CHECKING, Any, Generic, TypeAlias, TypeVar

import numpy as np
import numpy.typing as npt
Expand All @@ -11,6 +12,9 @@
from autointent._wrappers import BaseTorchModuleWithVocab
from autointent.schemas import TagsList

if TYPE_CHECKING:
from pathlib import Path

ModuleSimpleAttributes = None | str | int | float | bool | list # type: ignore[type-arg]

ModuleAttributes: TypeAlias = (
Expand Down
10 changes: 7 additions & 3 deletions src/autointent/_dump_tools/generator_dumper.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,16 @@
"""Separate file to fix circular import error."""

from pathlib import Path
from typing import Any
from __future__ import annotations

from typing import TYPE_CHECKING, Any

from autointent.generation import Generator

from .base import BaseObjectDumper

if TYPE_CHECKING:
from pathlib import Path


class GeneratorDumper(BaseObjectDumper[Generator]):
dir_or_file_name = "generators"
Expand All @@ -16,7 +20,7 @@ def dump(obj: Generator, path: Path, exists_ok: bool) -> None:
obj.dump(path, exist_ok=exists_ok)

@staticmethod
def load(path: Path, **kwargs: Any) -> Generator: # noqa: ANN401, ARG004
def load(path: Path, **kwargs: Any) -> Generator: # noqa: ANN401
return Generator.load(path)

@classmethod
Expand Down
18 changes: 13 additions & 5 deletions src/autointent/_dump_tools/main.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,15 @@
from __future__ import annotations

import logging
from pathlib import Path
from typing import Any, ClassVar, TypeVar
from typing import TYPE_CHECKING, Any, ClassVar, TypeVar

import numpy as np
import numpy.typing as npt
import torch

from autointent.configs import CrossEncoderConfig, EmbedderConfig
from autointent.context.optimization_info import Artifact
from autointent.schemas import TagsList

from .base import BaseObjectDumper, ModuleAttributes, ModuleSimpleAttributes
from .base import ModuleSimpleAttributes
from .generator_dumper import GeneratorDumper
from .unit_dumpers import (
ArraysDumper,
Expand All @@ -28,6 +27,15 @@
VectorIndexDumper,
)

if TYPE_CHECKING:
from pathlib import Path

import numpy.typing as npt

from autointent.configs import CrossEncoderConfig, EmbedderConfig

from .base import BaseObjectDumper, ModuleAttributes

T = TypeVar("T")
logger = logging.getLogger(__name__)

Expand Down
Loading
Loading