File size: 3,670 Bytes
ed4d993
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
"""A unit test meant to catch accidental introduction of non-optional dependencies."""

from pathlib import Path
from typing import Any, Dict, Mapping

import pytest
import toml

HERE = Path(__file__).parent

PYPROJECT_TOML = HERE / "../../pyproject.toml"


@pytest.fixture()
def poetry_conf() -> Dict[str, Any]:
    """Load the pyproject.toml file."""
    with open(PYPROJECT_TOML) as f:
        return toml.load(f)["tool"]["poetry"]


def test_required_dependencies(poetry_conf: Mapping[str, Any]) -> None:
    """A test that checks if a new non-optional dependency is being introduced.

    If this test is triggered, it means that a contributor is trying to introduce a new
    required dependency. This should be avoided in most situations.
    """
    # Get the dependencies from the [tool.poetry.dependencies] section
    dependencies = poetry_conf["dependencies"]

    is_required = {
        package_name: isinstance(requirements, str)
        or not requirements.get("optional", False)
        for package_name, requirements in dependencies.items()
    }
    required_dependencies = [
        package_name for package_name, required in is_required.items() if required
    ]

    assert sorted(required_dependencies) == sorted(
        [
            "PyYAML",
            "SQLAlchemy",
            "aiohttp",
            "dataclasses-json",
            "langchain-core",
            "langsmith",
            "numpy",
            "python",
            "requests",
            "tenacity",
            "langchain",
        ]
    )

    unrequired_dependencies = [
        package_name for package_name, required in is_required.items() if not required
    ]
    in_extras = [
        dep for group in poetry_conf.get("extras", {}).values() for dep in group
    ]
    assert set(unrequired_dependencies) == set(in_extras)


def test_test_group_dependencies(poetry_conf: Mapping[str, Any]) -> None:
    """Check if someone is attempting to add additional test dependencies.

    Only dependencies associated with test running infrastructure should be added
    to the test group; e.g., pytest, pytest-cov etc.

    Examples of dependencies that should NOT be included: boto3, azure, postgres, etc.
    """

    test_group_deps = sorted(poetry_conf["group"]["test"]["dependencies"])

    assert test_group_deps == sorted(
        [
            "duckdb-engine",
            "freezegun",
            "langchain-core",
            "langchain",
            "lark",
            "pandas",
            "pytest",
            "pytest-asyncio",
            "pytest-cov",
            "pytest-dotenv",
            "pytest-mock",
            "pytest-socket",
            "pytest-watcher",
            "responses",
            "syrupy",
            "requests-mock",
        ]
    )


def test_imports() -> None:
    """Test that you can import all top level things okay."""
    from langchain_core.prompts import BasePromptTemplate  # noqa: F401

    from langchain_community.callbacks import OpenAICallbackHandler  # noqa: F401
    from langchain_community.chat_models import ChatOpenAI  # noqa: F401
    from langchain_community.document_loaders import BSHTMLLoader  # noqa: F401
    from langchain_community.embeddings import OpenAIEmbeddings  # noqa: F401
    from langchain_community.llms import OpenAI  # noqa: F401
    from langchain_community.retrievers import VespaRetriever  # noqa: F401
    from langchain_community.tools import DuckDuckGoSearchResults  # noqa: F401
    from langchain_community.utilities import (
        SearchApiAPIWrapper,  # noqa: F401
        SerpAPIWrapper,  # noqa: F401
    )
    from langchain_community.vectorstores import FAISS  # noqa: F401