@@ -1,2 +0,0 @@
|
|||||||
project:
|
|
||||||
name: airflow-mcp-server
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
# Add dag files to exempt from parse test below. ex: dags/<test-file>
|
|
||||||
@@ -1,141 +0,0 @@
|
|||||||
"""Test the validity of all DAGs. **USED BY DEV PARSE COMMAND DO NOT EDIT**"""
|
|
||||||
|
|
||||||
from contextlib import contextmanager
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from airflow.models import DagBag, Variable, Connection
|
|
||||||
from airflow.hooks.base import BaseHook
|
|
||||||
from airflow.utils.db import initdb
|
|
||||||
|
|
||||||
# init airflow database
|
|
||||||
initdb()
|
|
||||||
|
|
||||||
# The following code patches errors caused by missing OS Variables, Airflow Connections, and Airflow Variables
|
|
||||||
|
|
||||||
|
|
||||||
# =========== MONKEYPATCH BaseHook.get_connection() ===========
|
|
||||||
def basehook_get_connection_monkeypatch(key: str, *args, **kwargs):
|
|
||||||
print(
|
|
||||||
f"Attempted to fetch connection during parse returning an empty Connection object for {key}"
|
|
||||||
)
|
|
||||||
return Connection(key)
|
|
||||||
|
|
||||||
|
|
||||||
BaseHook.get_connection = basehook_get_connection_monkeypatch
|
|
||||||
# # =========== /MONKEYPATCH BASEHOOK.GET_CONNECTION() ===========
|
|
||||||
|
|
||||||
|
|
||||||
# =========== MONKEYPATCH OS.GETENV() ===========
|
|
||||||
def os_getenv_monkeypatch(key: str, *args, **kwargs):
|
|
||||||
default = None
|
|
||||||
if args:
|
|
||||||
default = args[0] # os.getenv should get at most 1 arg after the key
|
|
||||||
if kwargs:
|
|
||||||
default = kwargs.get(
|
|
||||||
"default", None
|
|
||||||
) # and sometimes kwarg if people are using the sig
|
|
||||||
|
|
||||||
env_value = os.environ.get(key, None)
|
|
||||||
|
|
||||||
if env_value:
|
|
||||||
return env_value # if the env_value is set, return it
|
|
||||||
if (
|
|
||||||
key == "JENKINS_HOME" and default is None
|
|
||||||
): # fix https://github.com/astronomer/astro-cli/issues/601
|
|
||||||
return None
|
|
||||||
if default:
|
|
||||||
return default # otherwise return whatever default has been passed
|
|
||||||
return f"MOCKED_{key.upper()}_VALUE" # if absolutely nothing has been passed - return the mocked value
|
|
||||||
|
|
||||||
|
|
||||||
os.getenv = os_getenv_monkeypatch
|
|
||||||
# # =========== /MONKEYPATCH OS.GETENV() ===========
|
|
||||||
|
|
||||||
# =========== MONKEYPATCH VARIABLE.GET() ===========
|
|
||||||
|
|
||||||
|
|
||||||
class magic_dict(dict):
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
self.update(*args, **kwargs)
|
|
||||||
|
|
||||||
def __getitem__(self, key):
|
|
||||||
return {}.get(key, "MOCKED_KEY_VALUE")
|
|
||||||
|
|
||||||
|
|
||||||
_no_default = object() # allow falsey defaults
|
|
||||||
|
|
||||||
|
|
||||||
def variable_get_monkeypatch(key: str, default_var=_no_default, deserialize_json=False):
|
|
||||||
print(
|
|
||||||
f"Attempted to get Variable value during parse, returning a mocked value for {key}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if default_var is not _no_default:
|
|
||||||
return default_var
|
|
||||||
if deserialize_json:
|
|
||||||
return magic_dict()
|
|
||||||
return "NON_DEFAULT_MOCKED_VARIABLE_VALUE"
|
|
||||||
|
|
||||||
|
|
||||||
Variable.get = variable_get_monkeypatch
|
|
||||||
# # =========== /MONKEYPATCH VARIABLE.GET() ===========
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def suppress_logging(namespace):
|
|
||||||
"""
|
|
||||||
Suppress logging within a specific namespace to keep tests "clean" during build
|
|
||||||
"""
|
|
||||||
logger = logging.getLogger(namespace)
|
|
||||||
old_value = logger.disabled
|
|
||||||
logger.disabled = True
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
logger.disabled = old_value
|
|
||||||
|
|
||||||
|
|
||||||
def get_import_errors():
|
|
||||||
"""
|
|
||||||
Generate a tuple for import errors in the dag bag, and include DAGs without errors.
|
|
||||||
"""
|
|
||||||
with suppress_logging("airflow"):
|
|
||||||
dag_bag = DagBag(include_examples=False)
|
|
||||||
|
|
||||||
def strip_path_prefix(path):
|
|
||||||
return os.path.relpath(path, os.environ.get("AIRFLOW_HOME"))
|
|
||||||
|
|
||||||
# Initialize an empty list to store the tuples
|
|
||||||
result = []
|
|
||||||
|
|
||||||
# Iterate over the items in import_errors
|
|
||||||
for k, v in dag_bag.import_errors.items():
|
|
||||||
result.append((strip_path_prefix(k), v.strip()))
|
|
||||||
|
|
||||||
# Check if there are DAGs without errors
|
|
||||||
for file_path in dag_bag.dags:
|
|
||||||
# Check if the file_path is not in import_errors, meaning no errors
|
|
||||||
if file_path not in dag_bag.import_errors:
|
|
||||||
result.append((strip_path_prefix(file_path), "No import errors"))
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"rel_path, rv", get_import_errors(), ids=[x[0] for x in get_import_errors()]
|
|
||||||
)
|
|
||||||
def test_file_imports(rel_path, rv):
|
|
||||||
"""Test for import errors on a file"""
|
|
||||||
if os.path.exists(".astro/dag_integrity_exceptions.txt"):
|
|
||||||
with open(".astro/dag_integrity_exceptions.txt", "r") as f:
|
|
||||||
exceptions = f.readlines()
|
|
||||||
print(f"Exceptions: {exceptions}")
|
|
||||||
if (rv != "No import errors") and rel_path not in exceptions:
|
|
||||||
# If rv is not "No import errors," consider it a failed test
|
|
||||||
raise Exception(f"{rel_path} failed to import with message \n {rv}")
|
|
||||||
else:
|
|
||||||
# If rv is "No import errors," consider it a passed test
|
|
||||||
print(f"{rel_path} passed the import test")
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
astro
|
|
||||||
.git
|
|
||||||
.env
|
|
||||||
airflow_settings.yaml
|
|
||||||
logs/
|
|
||||||
.venv
|
|
||||||
airflow.db
|
|
||||||
airflow.cfg
|
|
||||||
airflow-mcp-server/
|
|
||||||
resources/
|
|
||||||
assets/
|
|
||||||
README.md
|
|
||||||
3
.github/workflows/python-publish.yml
vendored
3
.github/workflows/python-publish.yml
vendored
@@ -31,7 +31,6 @@ jobs:
|
|||||||
pip install uv
|
pip install uv
|
||||||
|
|
||||||
- name: Build release distributions
|
- name: Build release distributions
|
||||||
working-directory: airflow-mcp-server
|
|
||||||
run: |
|
run: |
|
||||||
uv pip install --system build
|
uv pip install --system build
|
||||||
python -m build
|
python -m build
|
||||||
@@ -40,7 +39,7 @@ jobs:
|
|||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: release-dists
|
name: release-dists
|
||||||
path: airflow-mcp-server/dist/
|
path: dist/
|
||||||
|
|
||||||
pypi-publish:
|
pypi-publish:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|||||||
28
Dockerfile
28
Dockerfile
@@ -1 +1,27 @@
|
|||||||
FROM quay.io/astronomer/astro-runtime:12.6.0
|
# Use a Python image with uv pre-installed
|
||||||
|
FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV UV_COMPILE_BYTECODE=1
|
||||||
|
ENV UV_LINK_MODE=copy
|
||||||
|
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||||
|
--mount=type=bind,source=uv.lock,target=uv.lock \
|
||||||
|
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
||||||
|
uv sync --frozen --no-install-project --no-dev --no-editable
|
||||||
|
|
||||||
|
ADD . /app
|
||||||
|
RUN --mount=type=cache,target=/root/.cache/uv \
|
||||||
|
uv sync --frozen --no-dev --no-editable
|
||||||
|
|
||||||
|
FROM python:3.12-slim-bookworm
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
COPY --from=uv /root/.local /root/.local
|
||||||
|
COPY --from=uv --chown=app:app /app/.venv /app/.venv
|
||||||
|
|
||||||
|
ENV PATH="/app/.venv/bin:$PATH"
|
||||||
|
|
||||||
|
ENTRYPOINT ["airflow-mcp-server"]
|
||||||
|
|||||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2025 Abhishek
|
Copyright (c) 2025 Abhishek Bhakat
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
47
README.md
47
README.md
@@ -8,12 +8,13 @@
|
|||||||
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
A Model Context Protocol server for controlling Airflow via Airflow APIs.
|
A [Model Context Protocol](https://modelcontextprotocol.io/) server for controlling Airflow via Airflow APIs.
|
||||||
|
|
||||||
## Demo Video
|
## Demo Video
|
||||||
|
|
||||||
https://github.com/user-attachments/assets/f3e60fff-8680-4dd9-b08e-fa7db655a705
|
https://github.com/user-attachments/assets/f3e60fff-8680-4dd9-b08e-fa7db655a705
|
||||||
|
|
||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
|
|
||||||
### Usage with Claude Desktop
|
### Usage with Claude Desktop
|
||||||
@@ -35,13 +36,41 @@ https://github.com/user-attachments/assets/f3e60fff-8680-4dd9-b08e-fa7db655a705
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Operation Modes
|
||||||
|
|
||||||
# Scope
|
The server supports two operation modes:
|
||||||
|
|
||||||
2 different streams in which Airflow MCP Server can be used:
|
- **Safe Mode** (`--safe`): Only allows read-only operations (GET requests). This is useful when you want to prevent any modifications to your Airflow instance.
|
||||||
- Adding Airflow to AI (_complete access to an Airflow deployment_)
|
- **Unsafe Mode** (`--unsafe`): Allows all operations including modifications. This is the default mode.
|
||||||
- This will enable AI to be able to write DAGs and just do things in a schedule on its own.
|
|
||||||
- Use command `airflow-mcp-server` or `airflow-mcp-server --unsafe`.
|
To start in safe mode:
|
||||||
- Adding AI to Airflow (_read-only access using Airflow Plugin_)
|
```bash
|
||||||
- This stream can enable Users to be able to get a better understanding about their deployment. Specially in cases where teams have hundreds, if not thousands of dags.
|
airflow-mcp-server --safe
|
||||||
- Use command `airflow-mcp-server --safe`.
|
```
|
||||||
|
|
||||||
|
To explicitly start in unsafe mode (though this is default):
|
||||||
|
```bash
|
||||||
|
airflow-mcp-server --unsafe
|
||||||
|
```
|
||||||
|
|
||||||
|
### Considerations
|
||||||
|
|
||||||
|
The MCP Server expects environment variables to be set:
|
||||||
|
- `AIRFLOW_BASE_URL`: The base URL of the Airflow API
|
||||||
|
- `AUTH_TOKEN`: The token to use for authorization (_This should be base64 encoded username:password_)
|
||||||
|
- `OPENAPI_SPEC`: The path to the OpenAPI spec file (_Optional_) (_defaults to latest stable release_)
|
||||||
|
|
||||||
|
*Currently, only Basic Auth is supported.*
|
||||||
|
|
||||||
|
**Page Limit**
|
||||||
|
|
||||||
|
The default is 100 items, but you can change it using `maximum_page_limit` option in [api] section in the `airflow.cfg` file.
|
||||||
|
|
||||||
|
## Tasks
|
||||||
|
|
||||||
|
- [x] First API
|
||||||
|
- [x] Parse OpenAPI Spec
|
||||||
|
- [x] Safe/Unsafe mode implementation
|
||||||
|
- [ ] Parse proper description with list_tools.
|
||||||
|
- [ ] Airflow config fetch (_specifically for page limit_)
|
||||||
|
- [ ] Env variables optional (_env variables might not be ideal for airflow plugins_)
|
||||||
|
|||||||
@@ -1,27 +0,0 @@
|
|||||||
# Use a Python image with uv pre-installed
|
|
||||||
FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim AS uv
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
ENV UV_COMPILE_BYTECODE=1
|
|
||||||
ENV UV_LINK_MODE=copy
|
|
||||||
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
|
||||||
--mount=type=bind,source=uv.lock,target=uv.lock \
|
|
||||||
--mount=type=bind,source=pyproject.toml,target=pyproject.toml \
|
|
||||||
uv sync --frozen --no-install-project --no-dev --no-editable
|
|
||||||
|
|
||||||
ADD . /app
|
|
||||||
RUN --mount=type=cache,target=/root/.cache/uv \
|
|
||||||
uv sync --frozen --no-dev --no-editable
|
|
||||||
|
|
||||||
FROM python:3.12-slim-bookworm
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
COPY --from=uv /root/.local /root/.local
|
|
||||||
COPY --from=uv --chown=app:app /app/.venv /app/.venv
|
|
||||||
|
|
||||||
ENV PATH="/app/.venv/bin:$PATH"
|
|
||||||
|
|
||||||
ENTRYPOINT ["airflow-mcp-server"]
|
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
# airflow-mcp-server: An MCP Server for controlling Airflow
|
|
||||||
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
A [Model Context Protocol](https://modelcontextprotocol.io/) server for controlling Airflow via Airflow APIs.
|
|
||||||
|
|
||||||
|
|
||||||
## Setup
|
|
||||||
|
|
||||||
### Usage with Claude Desktop
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"mcpServers": {
|
|
||||||
"airflow-mcp-server": {
|
|
||||||
"command": "uvx",
|
|
||||||
"args": [
|
|
||||||
"airflow-mcp-server"
|
|
||||||
],
|
|
||||||
"env": {
|
|
||||||
"AIRFLOW_BASE_URL": "http://<host:port>/api/v1",
|
|
||||||
"AUTH_TOKEN": "<base64_encoded_username_password>"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Operation Modes
|
|
||||||
|
|
||||||
The server supports two operation modes:
|
|
||||||
|
|
||||||
- **Safe Mode** (`--safe`): Only allows read-only operations (GET requests). This is useful when you want to prevent any modifications to your Airflow instance.
|
|
||||||
- **Unsafe Mode** (`--unsafe`): Allows all operations including modifications. This is the default mode.
|
|
||||||
|
|
||||||
To start in safe mode:
|
|
||||||
```bash
|
|
||||||
airflow-mcp-server --safe
|
|
||||||
```
|
|
||||||
|
|
||||||
To explicitly start in unsafe mode (though this is default):
|
|
||||||
```bash
|
|
||||||
airflow-mcp-server --unsafe
|
|
||||||
```
|
|
||||||
|
|
||||||
### Considerations
|
|
||||||
|
|
||||||
The MCP Server expects environment variables to be set:
|
|
||||||
- `AIRFLOW_BASE_URL`: The base URL of the Airflow API
|
|
||||||
- `AUTH_TOKEN`: The token to use for authorization (_This should be base64 encoded username:password_)
|
|
||||||
- `OPENAPI_SPEC`: The path to the OpenAPI spec file (_Optional_) (_defaults to latest stable release_)
|
|
||||||
|
|
||||||
*Currently, only Basic Auth is supported.*
|
|
||||||
|
|
||||||
**Page Limit**
|
|
||||||
|
|
||||||
The default is 100 items, but you can change it using `maximum_page_limit` option in [api] section in the `airflow.cfg` file.
|
|
||||||
|
|
||||||
## Tasks
|
|
||||||
|
|
||||||
- [x] First API
|
|
||||||
- [x] Parse OpenAPI Spec
|
|
||||||
- [x] Safe/Unsafe mode implementation
|
|
||||||
- [ ] Parse proper description with list_tools.
|
|
||||||
- [ ] Airflow config fetch (_specifically for page limit_)
|
|
||||||
- [ ] Env variables optional (_env variables might not be ideal for airflow plugins_)
|
|
||||||
@@ -1,25 +0,0 @@
|
|||||||
# This file allows you to configure Airflow Connections, Pools, and Variables in a single place for local development only.
|
|
||||||
# NOTE: json dicts can be added to the conn_extra field as yaml key value pairs. See the example below.
|
|
||||||
|
|
||||||
# For more information, refer to our docs: https://www.astronomer.io/docs/astro/cli/develop-project#configure-airflow_settingsyaml-local-development-only
|
|
||||||
# For questions, reach out to: https://support.astronomer.io
|
|
||||||
# For issues create an issue ticket here: https://github.com/astronomer/astro-cli/issues
|
|
||||||
|
|
||||||
airflow:
|
|
||||||
connections:
|
|
||||||
- conn_id:
|
|
||||||
conn_type:
|
|
||||||
conn_host:
|
|
||||||
conn_schema:
|
|
||||||
conn_login:
|
|
||||||
conn_password:
|
|
||||||
conn_port:
|
|
||||||
conn_extra:
|
|
||||||
example_extra_field: example-value
|
|
||||||
pools:
|
|
||||||
- pool_name:
|
|
||||||
pool_slot:
|
|
||||||
pool_description:
|
|
||||||
variables:
|
|
||||||
- variable_name:
|
|
||||||
variable_value:
|
|
||||||
@@ -1,100 +0,0 @@
|
|||||||
"""
|
|
||||||
## Astronaut ETL example DAG
|
|
||||||
|
|
||||||
This DAG queries the list of astronauts currently in space from the
|
|
||||||
Open Notify API and prints each astronaut's name and flying craft.
|
|
||||||
|
|
||||||
There are two tasks, one to get the data from the API and save the results,
|
|
||||||
and another to print the results. Both tasks are written in Python using
|
|
||||||
Airflow's TaskFlow API, which allows you to easily turn Python functions into
|
|
||||||
Airflow tasks, and automatically infer dependencies and pass data.
|
|
||||||
|
|
||||||
The second task uses dynamic task mapping to create a copy of the task for
|
|
||||||
each Astronaut in the list retrieved from the API. This list will change
|
|
||||||
depending on how many Astronauts are in space, and the DAG will adjust
|
|
||||||
accordingly each time it runs.
|
|
||||||
|
|
||||||
For more explanation and getting started instructions, see our Write your
|
|
||||||
first DAG tutorial: https://www.astronomer.io/docs/learn/get-started-with-airflow
|
|
||||||
|
|
||||||

|
|
||||||
"""
|
|
||||||
|
|
||||||
from airflow import Dataset
|
|
||||||
from airflow.decorators import dag, task
|
|
||||||
from pendulum import datetime
|
|
||||||
import requests
|
|
||||||
|
|
||||||
|
|
||||||
# Define the basic parameters of the DAG, like schedule and start_date
|
|
||||||
@dag(
|
|
||||||
start_date=datetime(2024, 1, 1),
|
|
||||||
schedule="@daily",
|
|
||||||
catchup=False,
|
|
||||||
doc_md=__doc__,
|
|
||||||
default_args={"owner": "Astro", "retries": 3},
|
|
||||||
tags=["example"],
|
|
||||||
)
|
|
||||||
def example_astronauts():
|
|
||||||
# Define tasks
|
|
||||||
@task(
|
|
||||||
# Define a dataset outlet for the task. This can be used to schedule downstream DAGs when this task has run.
|
|
||||||
outlets=[Dataset("current_astronauts")]
|
|
||||||
) # Define that this task updates the `current_astronauts` Dataset
|
|
||||||
def get_astronauts(**context) -> list[dict]:
|
|
||||||
"""
|
|
||||||
This task uses the requests library to retrieve a list of Astronauts
|
|
||||||
currently in space. The results are pushed to XCom with a specific key
|
|
||||||
so they can be used in a downstream pipeline. The task returns a list
|
|
||||||
of Astronauts to be used in the next task.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
r = requests.get("http://api.open-notify.org/astros.json")
|
|
||||||
r.raise_for_status()
|
|
||||||
number_of_people_in_space = r.json()["number"]
|
|
||||||
list_of_people_in_space = r.json()["people"]
|
|
||||||
except:
|
|
||||||
print("API currently not available, using hardcoded data instead.")
|
|
||||||
number_of_people_in_space = 12
|
|
||||||
list_of_people_in_space = [
|
|
||||||
{"craft": "ISS", "name": "Oleg Kononenko"},
|
|
||||||
{"craft": "ISS", "name": "Nikolai Chub"},
|
|
||||||
{"craft": "ISS", "name": "Tracy Caldwell Dyson"},
|
|
||||||
{"craft": "ISS", "name": "Matthew Dominick"},
|
|
||||||
{"craft": "ISS", "name": "Michael Barratt"},
|
|
||||||
{"craft": "ISS", "name": "Jeanette Epps"},
|
|
||||||
{"craft": "ISS", "name": "Alexander Grebenkin"},
|
|
||||||
{"craft": "ISS", "name": "Butch Wilmore"},
|
|
||||||
{"craft": "ISS", "name": "Sunita Williams"},
|
|
||||||
{"craft": "Tiangong", "name": "Li Guangsu"},
|
|
||||||
{"craft": "Tiangong", "name": "Li Cong"},
|
|
||||||
{"craft": "Tiangong", "name": "Ye Guangfu"},
|
|
||||||
]
|
|
||||||
|
|
||||||
context["ti"].xcom_push(
|
|
||||||
key="number_of_people_in_space", value=number_of_people_in_space
|
|
||||||
)
|
|
||||||
return list_of_people_in_space
|
|
||||||
|
|
||||||
@task
|
|
||||||
def print_astronaut_craft(greeting: str, person_in_space: dict) -> None:
|
|
||||||
"""
|
|
||||||
This task creates a print statement with the name of an
|
|
||||||
Astronaut in space and the craft they are flying on from
|
|
||||||
the API request results of the previous task, along with a
|
|
||||||
greeting which is hard-coded in this example.
|
|
||||||
"""
|
|
||||||
craft = person_in_space["craft"]
|
|
||||||
name = person_in_space["name"]
|
|
||||||
|
|
||||||
print(f"{name} is currently in space flying on the {craft}! {greeting}")
|
|
||||||
|
|
||||||
# Use dynamic task mapping to run the print_astronaut_craft task for each
|
|
||||||
# Astronaut in space
|
|
||||||
print_astronaut_craft.partial(greeting="Hello! :)").expand(
|
|
||||||
person_in_space=get_astronauts() # Define dependencies using TaskFlow API syntax
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Instantiate the DAG
|
|
||||||
example_astronauts()
|
|
||||||
@@ -1,8 +1,12 @@
|
|||||||
[project]
|
[project]
|
||||||
name = "airflow-mcp-server"
|
name = "airflow-mcp-server"
|
||||||
version = "0.2.0"
|
version = "0.3.0"
|
||||||
description = "MCP Server for Airflow"
|
description = "MCP Server for Airflow"
|
||||||
|
readme = "README.md"
|
||||||
requires-python = ">=3.11"
|
requires-python = ">=3.11"
|
||||||
|
authors = [
|
||||||
|
{name = "Abhishek Bhakat", email = "abhishek.bhakat@hotmail.com"}
|
||||||
|
]
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aiofiles>=24.1.0",
|
"aiofiles>=24.1.0",
|
||||||
"aiohttp>=3.11.11",
|
"aiohttp>=3.11.11",
|
||||||
@@ -13,6 +17,19 @@ dependencies = [
|
|||||||
"pydantic>=2.10.5",
|
"pydantic>=2.10.5",
|
||||||
"pyyaml>=6.0.0",
|
"pyyaml>=6.0.0",
|
||||||
]
|
]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 3 - Alpha",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
]
|
||||||
|
license = "MIT"
|
||||||
|
license-files = ["LICEN[CS]E*"]
|
||||||
|
|
||||||
|
[project.urls]
|
||||||
|
GitHub = "https://github.com/abhishekbhakat/airflow-mcp-server"
|
||||||
|
Issues = "https://github.com/abhishekbhakat/airflow-mcp-server/issues"
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
airflow-mcp-server = "airflow_mcp_server.__main__:main"
|
airflow-mcp-server = "airflow_mcp_server.__main__:main"
|
||||||
@@ -40,11 +57,10 @@ exclude = [
|
|||||||
|
|
||||||
[tool.hatch.build.targets.wheel]
|
[tool.hatch.build.targets.wheel]
|
||||||
packages = ["src/airflow_mcp_server"]
|
packages = ["src/airflow_mcp_server"]
|
||||||
|
package-data = {"airflow_mcp_server"= ["*.yaml"]}
|
||||||
|
|
||||||
[tool.hatch.build.targets.wheel.sources]
|
[tool.hatch.build.targets.wheel.sources]
|
||||||
"src/airflow_mcp_server" = [
|
"src/airflow_mcp_server" = "airflow_mcp_server"
|
||||||
"*.yaml",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
pythonpath = ["src"]
|
pythonpath = ["src"]
|
||||||
@@ -1 +0,0 @@
|
|||||||
# Astro Runtime includes the following pre-installed providers packages: https://www.astronomer.io/docs/astro/runtime-image-architecture#provider-packages
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
"""Example DAGs test. This test ensures that all Dags have tags, retries set to two, and no import errors. This is an example pytest and may not be fit the context of your DAGs. Feel free to add and remove tests."""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
from contextlib import contextmanager
|
|
||||||
import pytest
|
|
||||||
from airflow.models import DagBag
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def suppress_logging(namespace):
|
|
||||||
logger = logging.getLogger(namespace)
|
|
||||||
old_value = logger.disabled
|
|
||||||
logger.disabled = True
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
logger.disabled = old_value
|
|
||||||
|
|
||||||
|
|
||||||
def get_import_errors():
|
|
||||||
"""
|
|
||||||
Generate a tuple for import errors in the dag bag
|
|
||||||
"""
|
|
||||||
with suppress_logging("airflow"):
|
|
||||||
dag_bag = DagBag(include_examples=False)
|
|
||||||
|
|
||||||
def strip_path_prefix(path):
|
|
||||||
return os.path.relpath(path, os.environ.get("AIRFLOW_HOME"))
|
|
||||||
|
|
||||||
# prepend "(None,None)" to ensure that a test object is always created even if it's a no op.
|
|
||||||
return [(None, None)] + [
|
|
||||||
(strip_path_prefix(k), v.strip()) for k, v in dag_bag.import_errors.items()
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def get_dags():
|
|
||||||
"""
|
|
||||||
Generate a tuple of dag_id, <DAG objects> in the DagBag
|
|
||||||
"""
|
|
||||||
with suppress_logging("airflow"):
|
|
||||||
dag_bag = DagBag(include_examples=False)
|
|
||||||
|
|
||||||
def strip_path_prefix(path):
|
|
||||||
return os.path.relpath(path, os.environ.get("AIRFLOW_HOME"))
|
|
||||||
|
|
||||||
return [(k, v, strip_path_prefix(v.fileloc)) for k, v in dag_bag.dags.items()]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"rel_path,rv", get_import_errors(), ids=[x[0] for x in get_import_errors()]
|
|
||||||
)
|
|
||||||
def test_file_imports(rel_path, rv):
|
|
||||||
"""Test for import errors on a file"""
|
|
||||||
if rel_path and rv:
|
|
||||||
raise Exception(f"{rel_path} failed to import with message \n {rv}")
|
|
||||||
|
|
||||||
|
|
||||||
APPROVED_TAGS = {}
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"dag_id,dag,fileloc", get_dags(), ids=[x[2] for x in get_dags()]
|
|
||||||
)
|
|
||||||
def test_dag_tags(dag_id, dag, fileloc):
|
|
||||||
"""
|
|
||||||
test if a DAG is tagged and if those TAGs are in the approved list
|
|
||||||
"""
|
|
||||||
assert dag.tags, f"{dag_id} in {fileloc} has no tags"
|
|
||||||
if APPROVED_TAGS:
|
|
||||||
assert not set(dag.tags) - APPROVED_TAGS
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"dag_id,dag, fileloc", get_dags(), ids=[x[2] for x in get_dags()]
|
|
||||||
)
|
|
||||||
def test_dag_retries(dag_id, dag, fileloc):
|
|
||||||
"""
|
|
||||||
test if a DAG has retries set
|
|
||||||
"""
|
|
||||||
assert (
|
|
||||||
dag.default_args.get("retries", None) >= 2
|
|
||||||
), f"{dag_id} in {fileloc} must have task retries >= 2."
|
|
||||||
0
airflow-mcp-server/uv.lock → uv.lock
generated
0
airflow-mcp-server/uv.lock → uv.lock
generated
Reference in New Issue
Block a user