diff --git a/.astro/config.yaml b/.astro/config.yaml deleted file mode 100644 index 72839af..0000000 --- a/.astro/config.yaml +++ /dev/null @@ -1,2 +0,0 @@ -project: - name: airflow-mcp-server diff --git a/.astro/dag_integrity_exceptions.txt b/.astro/dag_integrity_exceptions.txt deleted file mode 100644 index c9a2a63..0000000 --- a/.astro/dag_integrity_exceptions.txt +++ /dev/null @@ -1 +0,0 @@ -# Add dag files to exempt from parse test below. ex: dags/ \ No newline at end of file diff --git a/.astro/test_dag_integrity_default.py b/.astro/test_dag_integrity_default.py deleted file mode 100644 index e433703..0000000 --- a/.astro/test_dag_integrity_default.py +++ /dev/null @@ -1,141 +0,0 @@ -"""Test the validity of all DAGs. **USED BY DEV PARSE COMMAND DO NOT EDIT**""" - -from contextlib import contextmanager -import logging -import os - -import pytest - -from airflow.models import DagBag, Variable, Connection -from airflow.hooks.base import BaseHook -from airflow.utils.db import initdb - -# init airflow database -initdb() - -# The following code patches errors caused by missing OS Variables, Airflow Connections, and Airflow Variables - - -# =========== MONKEYPATCH BaseHook.get_connection() =========== -def basehook_get_connection_monkeypatch(key: str, *args, **kwargs): - print( - f"Attempted to fetch connection during parse returning an empty Connection object for {key}" - ) - return Connection(key) - - -BaseHook.get_connection = basehook_get_connection_monkeypatch -# # =========== /MONKEYPATCH BASEHOOK.GET_CONNECTION() =========== - - -# =========== MONKEYPATCH OS.GETENV() =========== -def os_getenv_monkeypatch(key: str, *args, **kwargs): - default = None - if args: - default = args[0] # os.getenv should get at most 1 arg after the key - if kwargs: - default = kwargs.get( - "default", None - ) # and sometimes kwarg if people are using the sig - - env_value = os.environ.get(key, None) - - if env_value: - return env_value # if the env_value is set, return it - if ( - key == "JENKINS_HOME" and default is None - ): # fix https://github.com/astronomer/astro-cli/issues/601 - return None - if default: - return default # otherwise return whatever default has been passed - return f"MOCKED_{key.upper()}_VALUE" # if absolutely nothing has been passed - return the mocked value - - -os.getenv = os_getenv_monkeypatch -# # =========== /MONKEYPATCH OS.GETENV() =========== - -# =========== MONKEYPATCH VARIABLE.GET() =========== - - -class magic_dict(dict): - def __init__(self, *args, **kwargs): - self.update(*args, **kwargs) - - def __getitem__(self, key): - return {}.get(key, "MOCKED_KEY_VALUE") - - -_no_default = object() # allow falsey defaults - - -def variable_get_monkeypatch(key: str, default_var=_no_default, deserialize_json=False): - print( - f"Attempted to get Variable value during parse, returning a mocked value for {key}" - ) - - if default_var is not _no_default: - return default_var - if deserialize_json: - return magic_dict() - return "NON_DEFAULT_MOCKED_VARIABLE_VALUE" - - -Variable.get = variable_get_monkeypatch -# # =========== /MONKEYPATCH VARIABLE.GET() =========== - - -@contextmanager -def suppress_logging(namespace): - """ - Suppress logging within a specific namespace to keep tests "clean" during build - """ - logger = logging.getLogger(namespace) - old_value = logger.disabled - logger.disabled = True - try: - yield - finally: - logger.disabled = old_value - - -def get_import_errors(): - """ - Generate a tuple for import errors in the dag bag, and include DAGs without errors. - """ - with suppress_logging("airflow"): - dag_bag = DagBag(include_examples=False) - - def strip_path_prefix(path): - return os.path.relpath(path, os.environ.get("AIRFLOW_HOME")) - - # Initialize an empty list to store the tuples - result = [] - - # Iterate over the items in import_errors - for k, v in dag_bag.import_errors.items(): - result.append((strip_path_prefix(k), v.strip())) - - # Check if there are DAGs without errors - for file_path in dag_bag.dags: - # Check if the file_path is not in import_errors, meaning no errors - if file_path not in dag_bag.import_errors: - result.append((strip_path_prefix(file_path), "No import errors")) - - return result - - -@pytest.mark.parametrize( - "rel_path, rv", get_import_errors(), ids=[x[0] for x in get_import_errors()] -) -def test_file_imports(rel_path, rv): - """Test for import errors on a file""" - if os.path.exists(".astro/dag_integrity_exceptions.txt"): - with open(".astro/dag_integrity_exceptions.txt", "r") as f: - exceptions = f.readlines() - print(f"Exceptions: {exceptions}") - if (rv != "No import errors") and rel_path not in exceptions: - # If rv is not "No import errors," consider it a failed test - raise Exception(f"{rel_path} failed to import with message \n {rv}") - else: - # If rv is "No import errors," consider it a passed test - print(f"{rel_path} passed the import test") diff --git a/.dockerignore b/.dockerignore deleted file mode 100644 index 9ec1580..0000000 --- a/.dockerignore +++ /dev/null @@ -1,12 +0,0 @@ -astro -.git -.env -airflow_settings.yaml -logs/ -.venv -airflow.db -airflow.cfg -airflow-mcp-server/ -resources/ -assets/ -README.md diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml index 355ebcf..d5eb3e2 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/python-publish.yml @@ -31,7 +31,6 @@ jobs: pip install uv - name: Build release distributions - working-directory: airflow-mcp-server run: | uv pip install --system build python -m build @@ -40,7 +39,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: release-dists - path: airflow-mcp-server/dist/ + path: dist/ pypi-publish: runs-on: ubuntu-latest diff --git a/Dockerfile b/Dockerfile index 7eca009..84b935e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1 +1,27 @@ -FROM quay.io/astronomer/astro-runtime:12.6.0 +# Use a Python image with uv pre-installed +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv + +WORKDIR /app + +ENV UV_COMPILE_BYTECODE=1 +ENV UV_LINK_MODE=copy + +RUN --mount=type=cache,target=/root/.cache/uv \ + --mount=type=bind,source=uv.lock,target=uv.lock \ + --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ + uv sync --frozen --no-install-project --no-dev --no-editable + +ADD . /app +RUN --mount=type=cache,target=/root/.cache/uv \ + uv sync --frozen --no-dev --no-editable + +FROM python:3.12-slim-bookworm + +WORKDIR /app + +COPY --from=uv /root/.local /root/.local +COPY --from=uv --chown=app:app /app/.venv /app/.venv + +ENV PATH="/app/.venv/bin:$PATH" + +ENTRYPOINT ["airflow-mcp-server"] diff --git a/LICENSE b/LICENSE index 8036231..b17cff9 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2025 Abhishek +Copyright (c) 2025 Abhishek Bhakat Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md index 35e694a..fa81cf0 100644 --- a/README.md +++ b/README.md @@ -8,12 +8,13 @@ ## Overview -A Model Context Protocol server for controlling Airflow via Airflow APIs. +A [Model Context Protocol](https://modelcontextprotocol.io/) server for controlling Airflow via Airflow APIs. ## Demo Video https://github.com/user-attachments/assets/f3e60fff-8680-4dd9-b08e-fa7db655a705 + ## Setup ### Usage with Claude Desktop @@ -35,13 +36,41 @@ https://github.com/user-attachments/assets/f3e60fff-8680-4dd9-b08e-fa7db655a705 } ``` +### Operation Modes -# Scope +The server supports two operation modes: -2 different streams in which Airflow MCP Server can be used: -- Adding Airflow to AI (_complete access to an Airflow deployment_) - - This will enable AI to be able to write DAGs and just do things in a schedule on its own. - - Use command `airflow-mcp-server` or `airflow-mcp-server --unsafe`. -- Adding AI to Airflow (_read-only access using Airflow Plugin_) - - This stream can enable Users to be able to get a better understanding about their deployment. Specially in cases where teams have hundreds, if not thousands of dags. - - Use command `airflow-mcp-server --safe`. +- **Safe Mode** (`--safe`): Only allows read-only operations (GET requests). This is useful when you want to prevent any modifications to your Airflow instance. +- **Unsafe Mode** (`--unsafe`): Allows all operations including modifications. This is the default mode. + +To start in safe mode: +```bash +airflow-mcp-server --safe +``` + +To explicitly start in unsafe mode (though this is default): +```bash +airflow-mcp-server --unsafe +``` + +### Considerations + +The MCP Server expects environment variables to be set: +- `AIRFLOW_BASE_URL`: The base URL of the Airflow API +- `AUTH_TOKEN`: The token to use for authorization (_This should be base64 encoded username:password_) +- `OPENAPI_SPEC`: The path to the OpenAPI spec file (_Optional_) (_defaults to latest stable release_) + +*Currently, only Basic Auth is supported.* + +**Page Limit** + +The default is 100 items, but you can change it using `maximum_page_limit` option in [api] section in the `airflow.cfg` file. + +## Tasks + +- [x] First API +- [x] Parse OpenAPI Spec +- [x] Safe/Unsafe mode implementation +- [ ] Parse proper description with list_tools. +- [ ] Airflow config fetch (_specifically for page limit_) +- [ ] Env variables optional (_env variables might not be ideal for airflow plugins_) diff --git a/airflow-mcp-server/Dockerfile b/airflow-mcp-server/Dockerfile deleted file mode 100644 index 84b935e..0000000 --- a/airflow-mcp-server/Dockerfile +++ /dev/null @@ -1,27 +0,0 @@ -# Use a Python image with uv pre-installed -FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv - -WORKDIR /app - -ENV UV_COMPILE_BYTECODE=1 -ENV UV_LINK_MODE=copy - -RUN --mount=type=cache,target=/root/.cache/uv \ - --mount=type=bind,source=uv.lock,target=uv.lock \ - --mount=type=bind,source=pyproject.toml,target=pyproject.toml \ - uv sync --frozen --no-install-project --no-dev --no-editable - -ADD . /app -RUN --mount=type=cache,target=/root/.cache/uv \ - uv sync --frozen --no-dev --no-editable - -FROM python:3.12-slim-bookworm - -WORKDIR /app - -COPY --from=uv /root/.local /root/.local -COPY --from=uv --chown=app:app /app/.venv /app/.venv - -ENV PATH="/app/.venv/bin:$PATH" - -ENTRYPOINT ["airflow-mcp-server"] diff --git a/airflow-mcp-server/README.md b/airflow-mcp-server/README.md deleted file mode 100644 index 722735c..0000000 --- a/airflow-mcp-server/README.md +++ /dev/null @@ -1,66 +0,0 @@ -# airflow-mcp-server: An MCP Server for controlling Airflow - - -## Overview -A [Model Context Protocol](https://modelcontextprotocol.io/) server for controlling Airflow via Airflow APIs. - - -## Setup - -### Usage with Claude Desktop - -```json -{ - "mcpServers": { - "airflow-mcp-server": { - "command": "uvx", - "args": [ - "airflow-mcp-server" - ], - "env": { - "AIRFLOW_BASE_URL": "http:///api/v1", - "AUTH_TOKEN": "" - } - } - } -} -``` - -### Operation Modes - -The server supports two operation modes: - -- **Safe Mode** (`--safe`): Only allows read-only operations (GET requests). This is useful when you want to prevent any modifications to your Airflow instance. -- **Unsafe Mode** (`--unsafe`): Allows all operations including modifications. This is the default mode. - -To start in safe mode: -```bash -airflow-mcp-server --safe -``` - -To explicitly start in unsafe mode (though this is default): -```bash -airflow-mcp-server --unsafe -``` - -### Considerations - -The MCP Server expects environment variables to be set: -- `AIRFLOW_BASE_URL`: The base URL of the Airflow API -- `AUTH_TOKEN`: The token to use for authorization (_This should be base64 encoded username:password_) -- `OPENAPI_SPEC`: The path to the OpenAPI spec file (_Optional_) (_defaults to latest stable release_) - -*Currently, only Basic Auth is supported.* - -**Page Limit** - -The default is 100 items, but you can change it using `maximum_page_limit` option in [api] section in the `airflow.cfg` file. - -## Tasks - -- [x] First API -- [x] Parse OpenAPI Spec -- [x] Safe/Unsafe mode implementation -- [ ] Parse proper description with list_tools. -- [ ] Airflow config fetch (_specifically for page limit_) -- [ ] Env variables optional (_env variables might not be ideal for airflow plugins_) diff --git a/airflow_settings.yaml b/airflow_settings.yaml deleted file mode 100644 index 1c16dc0..0000000 --- a/airflow_settings.yaml +++ /dev/null @@ -1,25 +0,0 @@ -# This file allows you to configure Airflow Connections, Pools, and Variables in a single place for local development only. -# NOTE: json dicts can be added to the conn_extra field as yaml key value pairs. See the example below. - -# For more information, refer to our docs: https://www.astronomer.io/docs/astro/cli/develop-project#configure-airflow_settingsyaml-local-development-only -# For questions, reach out to: https://support.astronomer.io -# For issues create an issue ticket here: https://github.com/astronomer/astro-cli/issues - -airflow: - connections: - - conn_id: - conn_type: - conn_host: - conn_schema: - conn_login: - conn_password: - conn_port: - conn_extra: - example_extra_field: example-value - pools: - - pool_name: - pool_slot: - pool_description: - variables: - - variable_name: - variable_value: diff --git a/dags/.airflowignore b/dags/.airflowignore deleted file mode 100644 index e69de29..0000000 diff --git a/dags/exampledag.py b/dags/exampledag.py deleted file mode 100644 index 8b08b7b..0000000 --- a/dags/exampledag.py +++ /dev/null @@ -1,100 +0,0 @@ -""" -## Astronaut ETL example DAG - -This DAG queries the list of astronauts currently in space from the -Open Notify API and prints each astronaut's name and flying craft. - -There are two tasks, one to get the data from the API and save the results, -and another to print the results. Both tasks are written in Python using -Airflow's TaskFlow API, which allows you to easily turn Python functions into -Airflow tasks, and automatically infer dependencies and pass data. - -The second task uses dynamic task mapping to create a copy of the task for -each Astronaut in the list retrieved from the API. This list will change -depending on how many Astronauts are in space, and the DAG will adjust -accordingly each time it runs. - -For more explanation and getting started instructions, see our Write your -first DAG tutorial: https://www.astronomer.io/docs/learn/get-started-with-airflow - -![Picture of the ISS](https://www.esa.int/var/esa/storage/images/esa_multimedia/images/2010/02/space_station_over_earth/10293696-3-eng-GB/Space_Station_over_Earth_card_full.jpg) -""" - -from airflow import Dataset -from airflow.decorators import dag, task -from pendulum import datetime -import requests - - -# Define the basic parameters of the DAG, like schedule and start_date -@dag( - start_date=datetime(2024, 1, 1), - schedule="@daily", - catchup=False, - doc_md=__doc__, - default_args={"owner": "Astro", "retries": 3}, - tags=["example"], -) -def example_astronauts(): - # Define tasks - @task( - # Define a dataset outlet for the task. This can be used to schedule downstream DAGs when this task has run. - outlets=[Dataset("current_astronauts")] - ) # Define that this task updates the `current_astronauts` Dataset - def get_astronauts(**context) -> list[dict]: - """ - This task uses the requests library to retrieve a list of Astronauts - currently in space. The results are pushed to XCom with a specific key - so they can be used in a downstream pipeline. The task returns a list - of Astronauts to be used in the next task. - """ - try: - r = requests.get("http://api.open-notify.org/astros.json") - r.raise_for_status() - number_of_people_in_space = r.json()["number"] - list_of_people_in_space = r.json()["people"] - except: - print("API currently not available, using hardcoded data instead.") - number_of_people_in_space = 12 - list_of_people_in_space = [ - {"craft": "ISS", "name": "Oleg Kononenko"}, - {"craft": "ISS", "name": "Nikolai Chub"}, - {"craft": "ISS", "name": "Tracy Caldwell Dyson"}, - {"craft": "ISS", "name": "Matthew Dominick"}, - {"craft": "ISS", "name": "Michael Barratt"}, - {"craft": "ISS", "name": "Jeanette Epps"}, - {"craft": "ISS", "name": "Alexander Grebenkin"}, - {"craft": "ISS", "name": "Butch Wilmore"}, - {"craft": "ISS", "name": "Sunita Williams"}, - {"craft": "Tiangong", "name": "Li Guangsu"}, - {"craft": "Tiangong", "name": "Li Cong"}, - {"craft": "Tiangong", "name": "Ye Guangfu"}, - ] - - context["ti"].xcom_push( - key="number_of_people_in_space", value=number_of_people_in_space - ) - return list_of_people_in_space - - @task - def print_astronaut_craft(greeting: str, person_in_space: dict) -> None: - """ - This task creates a print statement with the name of an - Astronaut in space and the craft they are flying on from - the API request results of the previous task, along with a - greeting which is hard-coded in this example. - """ - craft = person_in_space["craft"] - name = person_in_space["name"] - - print(f"{name} is currently in space flying on the {craft}! {greeting}") - - # Use dynamic task mapping to run the print_astronaut_craft task for each - # Astronaut in space - print_astronaut_craft.partial(greeting="Hello! :)").expand( - person_in_space=get_astronauts() # Define dependencies using TaskFlow API syntax - ) - - -# Instantiate the DAG -example_astronauts() diff --git a/packages.txt b/packages.txt deleted file mode 100644 index e69de29..0000000 diff --git a/airflow-mcp-server/pyproject.toml b/pyproject.toml similarity index 74% rename from airflow-mcp-server/pyproject.toml rename to pyproject.toml index 671a747..e3f292a 100644 --- a/airflow-mcp-server/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,12 @@ [project] name = "airflow-mcp-server" -version = "0.2.0" +version = "0.3.0" description = "MCP Server for Airflow" +readme = "README.md" requires-python = ">=3.11" +authors = [ + {name = "Abhishek Bhakat", email = "abhishek.bhakat@hotmail.com"} +] dependencies = [ "aiofiles>=24.1.0", "aiohttp>=3.11.11", @@ -13,6 +17,19 @@ dependencies = [ "pydantic>=2.10.5", "pyyaml>=6.0.0", ] +classifiers = [ + "Development Status :: 3 - Alpha", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.10", +] +license = "MIT" +license-files = ["LICEN[CS]E*"] + +[project.urls] +GitHub = "https://github.com/abhishekbhakat/airflow-mcp-server" +Issues = "https://github.com/abhishekbhakat/airflow-mcp-server/issues" [project.scripts] airflow-mcp-server = "airflow_mcp_server.__main__:main" @@ -40,11 +57,10 @@ exclude = [ [tool.hatch.build.targets.wheel] packages = ["src/airflow_mcp_server"] +package-data = {"airflow_mcp_server"= ["*.yaml"]} [tool.hatch.build.targets.wheel.sources] -"src/airflow_mcp_server" = [ - "*.yaml", -] +"src/airflow_mcp_server" = "airflow_mcp_server" [tool.pytest.ini_options] pythonpath = ["src"] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 1bb359b..0000000 --- a/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -# Astro Runtime includes the following pre-installed providers packages: https://www.astronomer.io/docs/astro/runtime-image-architecture#provider-packages diff --git a/airflow-mcp-server/src/airflow_mcp_server/__init__.py b/src/airflow_mcp_server/__init__.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/__init__.py rename to src/airflow_mcp_server/__init__.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/__main__.py b/src/airflow_mcp_server/__main__.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/__main__.py rename to src/airflow_mcp_server/__main__.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/client/__init__.py b/src/airflow_mcp_server/client/__init__.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/client/__init__.py rename to src/airflow_mcp_server/client/__init__.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/client/airflow_client.py b/src/airflow_mcp_server/client/airflow_client.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/client/airflow_client.py rename to src/airflow_mcp_server/client/airflow_client.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/parser/__init__.py b/src/airflow_mcp_server/parser/__init__.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/parser/__init__.py rename to src/airflow_mcp_server/parser/__init__.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/parser/operation_parser.py b/src/airflow_mcp_server/parser/operation_parser.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/parser/operation_parser.py rename to src/airflow_mcp_server/parser/operation_parser.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/resources/v1.yaml b/src/airflow_mcp_server/resources/v1.yaml similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/resources/v1.yaml rename to src/airflow_mcp_server/resources/v1.yaml diff --git a/airflow-mcp-server/src/airflow_mcp_server/server.py b/src/airflow_mcp_server/server.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/server.py rename to src/airflow_mcp_server/server.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/server_safe.py b/src/airflow_mcp_server/server_safe.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/server_safe.py rename to src/airflow_mcp_server/server_safe.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/server_unsafe.py b/src/airflow_mcp_server/server_unsafe.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/server_unsafe.py rename to src/airflow_mcp_server/server_unsafe.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/tools/__init__.py b/src/airflow_mcp_server/tools/__init__.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/tools/__init__.py rename to src/airflow_mcp_server/tools/__init__.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/tools/airflow_tool.py b/src/airflow_mcp_server/tools/airflow_tool.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/tools/airflow_tool.py rename to src/airflow_mcp_server/tools/airflow_tool.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/tools/base_tools.py b/src/airflow_mcp_server/tools/base_tools.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/tools/base_tools.py rename to src/airflow_mcp_server/tools/base_tools.py diff --git a/airflow-mcp-server/src/airflow_mcp_server/tools/tool_manager.py b/src/airflow_mcp_server/tools/tool_manager.py similarity index 100% rename from airflow-mcp-server/src/airflow_mcp_server/tools/tool_manager.py rename to src/airflow_mcp_server/tools/tool_manager.py diff --git a/airflow-mcp-server/tests/__init__.py b/tests/__init__.py similarity index 100% rename from airflow-mcp-server/tests/__init__.py rename to tests/__init__.py diff --git a/airflow-mcp-server/tests/client/test_airflow_client.py b/tests/client/test_airflow_client.py similarity index 100% rename from airflow-mcp-server/tests/client/test_airflow_client.py rename to tests/client/test_airflow_client.py diff --git a/airflow-mcp-server/tests/conftest.py b/tests/conftest.py similarity index 100% rename from airflow-mcp-server/tests/conftest.py rename to tests/conftest.py diff --git a/tests/dags/test_dag_example.py b/tests/dags/test_dag_example.py deleted file mode 100644 index 6ff3552..0000000 --- a/tests/dags/test_dag_example.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Example DAGs test. This test ensures that all Dags have tags, retries set to two, and no import errors. This is an example pytest and may not be fit the context of your DAGs. Feel free to add and remove tests.""" - -import os -import logging -from contextlib import contextmanager -import pytest -from airflow.models import DagBag - - -@contextmanager -def suppress_logging(namespace): - logger = logging.getLogger(namespace) - old_value = logger.disabled - logger.disabled = True - try: - yield - finally: - logger.disabled = old_value - - -def get_import_errors(): - """ - Generate a tuple for import errors in the dag bag - """ - with suppress_logging("airflow"): - dag_bag = DagBag(include_examples=False) - - def strip_path_prefix(path): - return os.path.relpath(path, os.environ.get("AIRFLOW_HOME")) - - # prepend "(None,None)" to ensure that a test object is always created even if it's a no op. - return [(None, None)] + [ - (strip_path_prefix(k), v.strip()) for k, v in dag_bag.import_errors.items() - ] - - -def get_dags(): - """ - Generate a tuple of dag_id, in the DagBag - """ - with suppress_logging("airflow"): - dag_bag = DagBag(include_examples=False) - - def strip_path_prefix(path): - return os.path.relpath(path, os.environ.get("AIRFLOW_HOME")) - - return [(k, v, strip_path_prefix(v.fileloc)) for k, v in dag_bag.dags.items()] - - -@pytest.mark.parametrize( - "rel_path,rv", get_import_errors(), ids=[x[0] for x in get_import_errors()] -) -def test_file_imports(rel_path, rv): - """Test for import errors on a file""" - if rel_path and rv: - raise Exception(f"{rel_path} failed to import with message \n {rv}") - - -APPROVED_TAGS = {} - - -@pytest.mark.parametrize( - "dag_id,dag,fileloc", get_dags(), ids=[x[2] for x in get_dags()] -) -def test_dag_tags(dag_id, dag, fileloc): - """ - test if a DAG is tagged and if those TAGs are in the approved list - """ - assert dag.tags, f"{dag_id} in {fileloc} has no tags" - if APPROVED_TAGS: - assert not set(dag.tags) - APPROVED_TAGS - - -@pytest.mark.parametrize( - "dag_id,dag, fileloc", get_dags(), ids=[x[2] for x in get_dags()] -) -def test_dag_retries(dag_id, dag, fileloc): - """ - test if a DAG has retries set - """ - assert ( - dag.default_args.get("retries", None) >= 2 - ), f"{dag_id} in {fileloc} must have task retries >= 2." diff --git a/airflow-mcp-server/tests/parser/test_operation_parser.py b/tests/parser/test_operation_parser.py similarity index 100% rename from airflow-mcp-server/tests/parser/test_operation_parser.py rename to tests/parser/test_operation_parser.py diff --git a/airflow-mcp-server/tests/tools/__init__.py b/tests/tools/__init__.py similarity index 100% rename from airflow-mcp-server/tests/tools/__init__.py rename to tests/tools/__init__.py diff --git a/airflow-mcp-server/tests/tools/test_airflow_tool.py b/tests/tools/test_airflow_tool.py similarity index 100% rename from airflow-mcp-server/tests/tools/test_airflow_tool.py rename to tests/tools/test_airflow_tool.py diff --git a/airflow-mcp-server/tests/tools/test_models.py b/tests/tools/test_models.py similarity index 100% rename from airflow-mcp-server/tests/tools/test_models.py rename to tests/tools/test_models.py diff --git a/airflow-mcp-server/tests/tools/test_tool_manager.py b/tests/tools/test_tool_manager.py similarity index 100% rename from airflow-mcp-server/tests/tools/test_tool_manager.py rename to tests/tools/test_tool_manager.py diff --git a/airflow-mcp-server/uv.lock b/uv.lock similarity index 100% rename from airflow-mcp-server/uv.lock rename to uv.lock