Compare commits

...

10 Commits

Author SHA1 Message Date
b0b663b7d8 dev: add dev tools, pre-commit, Makefile, contributing, changelog, CI, and fix python version 2025-06-18 06:01:01 +02:00
2557aacd5d Refactor DockerComposeManager usage in basic_example and enhance error handling for service, network, config, and secret management 2025-06-18 05:05:29 +02:00
c8e262a736 Remove copyright header and ensure flexibility in VolumeConfig by allowing extra fields 2025-06-18 04:47:48 +02:00
6a70beb156 Enhance configuration classes with detailed docstrings and improve YAML saving functionality in DockerComposeManager 2025-06-18 04:44:14 +02:00
2996016aee Add logging to basic_example and update PostgreSQL database name 2025-06-18 04:41:49 +02:00
2e000017e4 Enhance DockerComposeManager with detailed service, volume, and network configurations using Pydantic models 2025-06-18 04:41:32 +02:00
f0ee0bcac6 Implement volume and network management methods in DockerComposeManager and enhance service creation with additional parameters 2025-06-18 04:14:51 +02:00
be43c468a4 Refactor pyproject.toml and enhance DockerComposeManager with type hints
- Updated dependencies and added Ruff configuration for linting in pyproject.toml.
- Improved type hinting in DockerComposeManager methods for better clarity.
- Created an empty __init__.py file in tests directory.
2025-06-18 03:49:26 +02:00
63160d682f Add initial implementation of Docker Compose manager and example usage
- Introduced DockerComposeManager class for programmatically creating and managing Docker Compose YAML files.
- Added example script demonstrating usage of DockerComposeManager.
- Created tests for service creation, modification, and removal.
- Included project metadata in pyproject.toml and added linting instructions in copilot-instructions.md.
2025-06-18 03:45:48 +02:00
49e72e82a0 Update .gitignore 2025-06-18 03:45:23 +02:00
15 changed files with 1115 additions and 5 deletions

7
.github/copilot-instructions.md vendored Normal file
View File

@ -0,0 +1,7 @@
This Python library is used to create Docker compose.yaml files from Python classes.
It is designed to simplify the process of defining and managing Docker Compose configurations programmatically.
Uses uv for Python dependency management. `uv sync`, `uv add <package>`.
Ruff is used for linting and formatting. Use `ruff check .` to check the code and `ruff format .` to format it.

88
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,88 @@
name: CI
on:
push:
branches: [ main, develop ]
pull_request:
branches: [ main ]
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"]
steps:
- uses: actions/checkout@v4
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
version: "latest"
- name: Cache dependencies
uses: actions/cache@v3
with:
path: |
.venv
.cache/uv
key: ${{ runner.os }}-uv-${{ hashFiles('**/pyproject.toml') }}
restore-keys: |
${{ runner.os }}-uv-
- name: Install dependencies
run: |
uv pip install -e ".[dev]"
- name: Run linting
run: |
uv run ruff check src/ tests/ example/
- name: Run formatting check
run: |
uv run ruff format --check src/ tests/ example/
- name: Run type checking
run: |
uv run mypy src/
- name: Run tests
run: |
uv run pytest --cov=src --cov-report=xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
flags: unittests
name: codecov-umbrella
fail_ci_if_error: false
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.13"
- name: Install uv
uses: astral-sh/setup-uv@v3
with:
version: "latest"
- name: Install dependencies
run: |
uv pip install -e ".[dev]"
- name: Run pre-commit
run: |
uv run pre-commit run --all-files

9
.gitignore vendored
View File

@ -86,27 +86,27 @@ ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
#uv.lock
uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
@ -173,4 +173,3 @@ cython_debug/
# PyPI configuration file
.pypirc

36
.pre-commit-config.yaml Normal file
View File

@ -0,0 +1,36 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: check-merge-conflict
- id: check-case-conflict
- id: check-docstring-first
- id: debug-statements
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.0
hooks:
- id: ruff
args: [--fix]
- id: ruff-format
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.8.0
hooks:
- id: mypy
additional_dependencies: [types-PyYAML]
args: [--ignore-missing-imports]
- repo: local
hooks:
- id: pytest
name: pytest
entry: pytest
language: system
pass_filenames: false
always_run: true
stages: [manual]

12
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,12 @@
{
"cSpell.words": [
"docstrings",
"healthcheck",
"isort",
"pycodestyle",
"pydantic",
"pydocstyle",
"pytest",
"pyyaml"
]
}

42
CHANGELOG.md Normal file
View File

@ -0,0 +1,42 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
### Added
- Initial project structure
- DockerComposeManager class for programmatic Docker Compose file management
- Support for services, volumes, networks, configs, and secrets
- Basic example demonstrating usage
- Comprehensive test suite
- Development tools configuration (Ruff, MyPy, Pre-commit)
- Makefile for common development commands
- Contributing guidelines
### Changed
- Lowered Python version requirement from 3.13+ to 3.9+ for broader compatibility
### Fixed
- None yet
## [0.1.0] - 2024-01-XX
### Added
- Initial release
- Core DockerComposeManager functionality
- Support for basic Docker Compose features:
- Services with full configuration options
- Volumes
- Networks
- Configs
- Secrets
- Context manager support for auto-saving
- Pydantic models for type safety
- YAML file generation and parsing
[Unreleased]: https://github.com/yourusername/compose/compare/v0.1.0...HEAD
[0.1.0]: https://github.com/yourusername/compose/releases/tag/v0.1.0

146
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,146 @@
# Contributing to Compose
Thank you for your interest in contributing to the Compose project! This document provides guidelines and information for contributors.
## Development Setup
1. **Clone the repository:**
```bash
git clone <repository-url>
cd compose
```
2. **Install development dependencies:**
```bash
uv pip install -e ".[dev]"
```
3. **Install pre-commit hooks:**
```bash
make pre-commit-install
```
## Development Workflow
### Code Quality
We use several tools to maintain code quality:
- **Ruff**: For linting and formatting
- **MyPy**: For type checking
- **Pre-commit**: For automated checks on commit
### Running Checks
```bash
# Run all checks
make check-all
# Run individual checks
make lint # Linting
make format # Code formatting
make type-check # Type checking
make test # Tests
make test-cov # Tests with coverage
```
### Pre-commit Hooks
Pre-commit hooks will automatically run on each commit. To run them manually:
```bash
make pre-commit-run
```
## Testing
### Running Tests
```bash
# Run all tests
make test
# Run tests with coverage
make test-cov
# Run specific test file
uv run pytest tests/test_specific.py
# Run tests with verbose output
uv run pytest -v
```
### Writing Tests
- Place all tests in the `tests/` directory
- Use descriptive test names
- Follow the existing test patterns
- Include both positive and negative test cases
- Test edge cases and error conditions
## Code Style
### Python Code
- Follow PEP 8 style guidelines
- Use type hints for all function parameters and return values
- Write docstrings for all public functions and classes
- Use Google-style docstrings
### Commit Messages
- Use clear, descriptive commit messages
- Start with a verb in present tense (e.g., "Add", "Fix", "Update")
- Keep the first line under 50 characters
- Add more details in the body if needed
Example:
```
Add support for Docker Compose volumes
- Implement VolumeConfig class
- Add add_volume and remove_volume methods
- Include comprehensive tests
```
## Pull Request Process
1. **Create a feature branch** from the main branch
2. **Make your changes** following the coding guidelines
3. **Write tests** for new functionality
4. **Run all checks** to ensure code quality
5. **Update documentation** if needed
6. **Submit a pull request** with a clear description
### Pull Request Checklist
- [ ] Code follows the project's style guidelines
- [ ] Tests pass and coverage is maintained
- [ ] Documentation is updated
- [ ] Pre-commit hooks pass
- [ ] Type checking passes
- [ ] Linting passes
## Reporting Issues
When reporting issues, please include:
- A clear description of the problem
- Steps to reproduce the issue
- Expected behavior
- Actual behavior
- Environment details (Python version, OS, etc.)
- Any relevant error messages or logs
## Getting Help
If you need help or have questions:
- Check the existing documentation
- Look at existing issues and pull requests
- Create a new issue for bugs or feature requests
- Ask questions in discussions
## License
By contributing to this project, you agree that your contributions will be licensed under the same license as the project.

49
Makefile Normal file
View File

@ -0,0 +1,49 @@
.PHONY: help install install-dev test test-cov lint format type-check clean build publish
help: ## Show this help message
@echo "Available commands:"
@grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-20s\033[0m %s\n", $$1, $$2}'
install: ## Install the package in development mode
uv pip install -e .
install-dev: ## Install the package with development dependencies
uv pip install -e ".[dev]"
test: ## Run tests
uv run pytest
test-cov: ## Run tests with coverage
uv run pytest --cov=src --cov-report=html --cov-report=term-missing
lint: ## Run linting checks
uv run ruff check src/ tests/ example/
format: ## Format code
uv run ruff format src/ tests/ example/
type-check: ## Run type checking
uv run mypy src/
clean: ## Clean up build artifacts
rm -rf build/
rm -rf dist/
rm -rf *.egg-info/
rm -rf .pytest_cache/
rm -rf .coverage
rm -rf htmlcov/
rm -rf .mypy_cache/
build: ## Build the package
uv run python -m build
publish: ## Publish to PyPI (requires authentication)
uv run python -m twine upload dist/*
pre-commit-install: ## Install pre-commit hooks
uv run pre-commit install
pre-commit-run: ## Run pre-commit on all files
uv run pre-commit run --all-files
check-all: lint type-check test ## Run all checks (lint, type-check, test)

0
example/__init__.py Normal file
View File

58
example/basic_example.py Normal file
View File

@ -0,0 +1,58 @@
"""Example usage of DockerComposeManager to generate a docker-compose.yaml file."""
from __future__ import annotations
import logging
from compose import DockerComposeManager
logging.basicConfig(level=logging.INFO)
logger: logging.Logger = logging.getLogger("docker-compose-example")
if __name__ == "__main__":
# Path to the compose file to generate
compose_path = "docker-compose.yaml"
# Using DockerComposeManager as a context manager
with DockerComposeManager(compose_path) as manager:
# Add top-level networks, volumes, configs, and secrets
manager.add_network("my_network")
manager.add_volume("db_data")
manager.add_config("my_config", config={"file": "./config.json"})
manager.add_secret("my_secret", config={"file": "./secret.txt"})
# Add a simple web service
manager.create_service(
name="web",
image="nginx:alpine",
ports=["8080:80"],
environment={"NGINX_HOST": "localhost"},
networks=["my_network"],
)
# Add a database service that depends on the web service
manager.create_service(
name="db",
image="postgres:15-alpine",
environment={
"POSTGRES_USER": "user",
"POSTGRES_PASSWORD": "password",
"POSTGRES_DB": "example_db",
},
ports=["5432:5432"],
volumes=["db_data:/var/lib/postgresql/data"],
networks=["my_network"],
depends_on={"web": {"condition": "service_started"}},
)
# Modify the web service
manager.modify_service("web", ports=["8081:80"])
# Add another service and then remove it
manager.create_service("temp_service", image="alpine:latest")
manager.remove_service("temp_service")
# Remove a network
manager.remove_network("my_network")
logger.info("docker-compose.yaml generated at %s", compose_path)

128
pyproject.toml Normal file
View File

@ -0,0 +1,128 @@
[project]
name = "compose"
version = "0.1.0"
description = "A simple Python package for managing Docker Compose files"
readme = "README.md"
requires-python = ">=3.9"
dependencies = [
"pydantic>=2.11.7",
"pytest>=8.4.0",
"pyyaml>=6.0.2",
"ruff>=0.12.0",
]
[project.optional-dependencies]
dev = [
"mypy>=1.8.0",
"pre-commit>=3.6.0",
"pytest-cov>=4.1.0",
"pytest-mock>=3.12.0",
"hypothesis>=6.98.0",
"types-PyYAML>=6.0.12",
]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.ruff]
preview = true
line-length = 120
lint.select = ["ALL"]
lint.pydocstyle.convention = "google"
lint.isort.required-imports = ["from __future__ import annotations"]
lint.pycodestyle.ignore-overlong-task-comments = true
lint.ignore = [
"ANN201", # Checks that public functions and methods have return type annotations.
"ARG001", # Checks for the presence of unused arguments in function definitions.
"B008", # Allow Form() as a default value
"CPY001", # Missing copyright notice at top of file
"D100", # Checks for undocumented public module definitions.
"D101", # Checks for undocumented public class definitions.
"D102", # Checks for undocumented public method definitions.
"D104", # Missing docstring in public package.
"D105", # Missing docstring in magic method.
"D105", # pydocstyle - missing docstring in magic method
"D106", # Checks for undocumented public class definitions, for nested classes.
"ERA001", # Found commented-out code
"FBT003", # Checks for boolean positional arguments in function calls.
"FIX002", # Line contains TODO
"G002", # Allow % in logging
"PGH003", # Check for type: ignore annotations that suppress all type warnings, as opposed to targeting specific type warnings.
"PLR6301", # Checks for the presence of unused self parameter in methods definitions.
"RUF029", # Checks for functions declared async that do not await or otherwise use features requiring the function to be declared async.
"TD003", # Checks that a TODO comment is associated with a link to a relevant issue or ticket.
"PLR0913", # Checks for function definitions that include too many arguments.
"PLR0917", # Checks for function definitions that include too many positional arguments.
# Conflicting lint rules when using Ruff's formatter
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"COM812", # Checks for the absence of trailing commas.
"COM819", # Checks for the presence of prohibited trailing commas.
"D206", # Checks for docstrings that are indented with tabs.
"D300", # Checks for docstrings that use '''triple single quotes''' instead of """triple double quotes""".
"E111", # Checks for indentation with a non-multiple of 4 spaces.
"E114", # Checks for indentation of comments with a non-multiple of 4 spaces.
"E117", # Checks for over-indented code.
"ISC001", # Checks for implicitly concatenated strings on a single line.
"ISC002", # Checks for implicitly concatenated strings that span multiple lines.
"Q000", # Checks for inline strings that use single quotes or double quotes, depending on the value of the lint.flake8-quotes.inline-quotes option.
"Q001", # Checks for multiline strings that use single quotes or double quotes, depending on the value of the lint.flake8-quotes.multiline-quotes setting.
"Q002", # Checks for docstrings that use single quotes or double quotes, depending on the value of the lint.flake8-quotes.docstring-quotes setting.
"Q003", # Checks for strings that include escaped quotes, and suggests changing the quote style to avoid the need to escape them.
"W191", # Checks for indentation that uses tabs.
]
[tool.ruff.lint.per-file-ignores]
"tests/*" = ["S101", "D103", "PLR2004"]
[tool.mypy]
python_version = "3.9"
warn_return_any = true
warn_unused_configs = true
disallow_untyped_defs = true
disallow_incomplete_defs = true
check_untyped_defs = true
disallow_untyped_decorators = true
no_implicit_optional = true
warn_redundant_casts = true
warn_unused_ignores = true
warn_no_return = true
warn_unreachable = true
strict_equality = true
[tool.pytest.ini_options]
testpaths = ["tests"]
python_files = ["test_*.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]
addopts = [
"--strict-markers",
"--strict-config",
"--cov=src",
"--cov-report=term-missing",
"--cov-report=html",
"--cov-report=xml",
]
[tool.coverage.run]
source = ["src"]
omit = [
"*/tests/*",
"*/test_*",
]
[tool.coverage.report]
exclude_lines = [
"pragma: no cover",
"def __repr__",
"if self.debug:",
"if settings.DEBUG",
"raise AssertionError",
"raise NotImplementedError",
"if 0:",
"if __name__ == .__main__.:",
"class .*\\bProtocol\\):",
"@(abc\\.)?abstractmethod",
]

394
src/compose/__init__.py Normal file
View File

@ -0,0 +1,394 @@
"""Docker Compose YAML file generator.
This package provides utilities for programmatically creating and managing Docker Compose
configuration files through Python classes, simplifying the process of defining
and managing Docker Compose configurations.
"""
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING, Any, Self
import yaml
from pydantic import BaseModel, ValidationError
class ServiceConfig(BaseModel):
image: str = ""
ports: list[str] | None = None
environment: dict[str, str] | None = None
volumes: list[str] | None = None
networks: list[str] | None = None
command: str | list[str] | None = None
entrypoint: str | list[str] | None = None
build: dict[str, Any] | str | None = None
healthcheck: dict[str, Any] | None = None
restart: str | None = None
labels: dict[str, str] | list[str] | None = None
depends_on: list[str] | dict[str, dict[str, str]] | None = None
configs: list[dict[str, Any]] | None = None
secrets: list[dict[str, Any]] | None = None
deploy: dict[str, Any] | None = None
resources: dict[str, Any] | None = None
# Allow extra fields for flexibility and to support arbitrary Docker Compose extensions.
model_config = {"extra": "allow"}
class VolumeConfig(BaseModel):
"""Configuration for a Docker Compose volume.
Represents the configuration options for defining a volume in a Docker Compose file.
"""
driver: str | None = None
driver_opts: dict[str, Any] | None = None
external: bool | dict[str, Any] | None = None
labels: dict[str, str] | list[str] | None = None
name: str | None = None
# Allow extra fields for flexibility and to support arbitrary Docker Compose extensions.
model_config = {"extra": "allow"}
class NetworkConfig(BaseModel):
"""Represents a network configuration for a Docker Compose file."""
driver: str | None = None
driver_opts: dict[str, Any] | None = None
attachable: bool | None = None
external: bool | dict[str, Any] | None = None
labels: dict[str, str] | list[str] | None = None
name: str | None = None
# Allow extra fields for flexibility and to support arbitrary Docker Compose extensions.
model_config = {"extra": "allow"}
class ConfigConfig(BaseModel):
"""Represents a config configuration for a Docker Compose file."""
file: str | None = None
external: bool | None = None
name: str | None = None
# Allow extra fields for flexibility and to support arbitrary Docker Compose extensions.
model_config = {"extra": "allow"}
class SecretConfig(BaseModel):
"""Represents a secret configuration for a Docker Compose file."""
file: str | None = None
external: bool | None = None
name: str | None = None
# Allow extra fields for flexibility and to support arbitrary Docker Compose extensions.
model_config = {"extra": "allow"}
if TYPE_CHECKING:
from types import TracebackType
class DockerComposeManager:
"""A class to create and modify Docker Compose YAML files programmatically.
Supports context manager usage for auto-saving.
"""
def add_volume(self, name: str, config: VolumeConfig | dict[str, Any] | None = None) -> DockerComposeManager:
"""Add a top-level volume definition.
Returns:
DockerComposeManager: self (for chaining)
"""
if "volumes" not in self._data or not isinstance(self._data["volumes"], dict):
self._data["volumes"] = {}
if config is None:
self._data["volumes"][name] = {}
else:
if isinstance(config, dict):
config = VolumeConfig(**config)
self._data["volumes"][name] = config.model_dump(exclude_none=True)
self._dirty = True
return self
def remove_volume(self, name: str) -> DockerComposeManager:
"""Remove a top-level volume definition.
Returns:
DockerComposeManager: self (for chaining)
Raises:
KeyError: If the volume does not exist.
"""
if "volumes" not in self._data or name not in self._data["volumes"]:
msg = f"Volume '{name}' not found."
raise KeyError(msg)
del self._data["volumes"][name]
if not self._data["volumes"]:
del self._data["volumes"]
self._dirty = True
return self
def add_network(self, name: str, config: NetworkConfig | dict[str, Any] | None = None) -> DockerComposeManager:
"""Add a top-level network definition.
Returns:
DockerComposeManager: self (for chaining)
"""
if "networks" not in self._data or not isinstance(self._data["networks"], dict):
self._data["networks"] = {}
if config is None:
self._data["networks"][name] = {}
else:
if isinstance(config, dict):
config = NetworkConfig(**config)
self._data["networks"][name] = config.model_dump(exclude_none=True)
self._dirty = True
return self
def remove_network(self, name: str) -> DockerComposeManager:
"""Remove a top-level network definition.
Returns:
DockerComposeManager: self (for chaining)
Raises:
KeyError: If the network does not exist.
"""
if "networks" not in self._data or name not in self._data["networks"]:
msg = f"Network '{name}' not found."
raise KeyError(msg)
del self._data["networks"][name]
if not self._data["networks"]:
del self._data["networks"]
self._dirty = True
return self
def add_config(self, name: str, config: ConfigConfig | dict[str, Any] | None = None) -> DockerComposeManager:
"""Add a top-level config definition.
Returns:
DockerComposeManager: self (for chaining)
"""
if "configs" not in self._data or not isinstance(self._data["configs"], dict):
self._data["configs"] = {}
if config is None:
self._data["configs"][name] = {}
else:
if isinstance(config, dict):
config = ConfigConfig(**config)
self._data["configs"][name] = config.model_dump(exclude_none=True)
self._dirty = True
return self
def remove_config(self, name: str) -> DockerComposeManager:
"""Remove a top-level config definition.
Returns:
DockerComposeManager: self (for chaining)
Raises:
KeyError: If the config does not exist.
"""
if "configs" not in self._data or name not in self._data["configs"]:
msg = f"Config '{name}' not found."
raise KeyError(msg)
del self._data["configs"][name]
if not self._data["configs"]:
del self._data["configs"]
self._dirty = True
return self
def add_secret(self, name: str, config: SecretConfig | dict[str, Any] | None = None) -> DockerComposeManager:
"""Add a top-level secret definition.
Returns:
DockerComposeManager: self (for chaining)
"""
if "secrets" not in self._data or not isinstance(self._data["secrets"], dict):
self._data["secrets"] = {}
if config is None:
self._data["secrets"][name] = {}
else:
if isinstance(config, dict):
config = SecretConfig(**config)
self._data["secrets"][name] = config.model_dump(exclude_none=True)
self._dirty = True
return self
def remove_secret(self, name: str) -> DockerComposeManager:
"""Remove a top-level secret definition.
Returns:
DockerComposeManager: self (for chaining)
Raises:
KeyError: If the secret does not exist.
"""
if "secrets" not in self._data or name not in self._data["secrets"]:
msg = f"Secret '{name}' not found."
raise KeyError(msg)
del self._data["secrets"][name]
if not self._data["secrets"]:
del self._data["secrets"]
self._dirty = True
return self
def __init__(self, path: str, version: str = "3.8") -> None:
"""Initialize the manager with a YAML file path. Loads existing file or creates a new one."""
self.path: str = path
self.version: str = version
self._data: dict[str, Any] = {}
self._dirty: bool = False
self._load()
def _load(self) -> None:
if Path(self.path).exists():
with Path(self.path).open("r", encoding="utf-8") as f:
self._data = yaml.safe_load(f) or {}
if not self._data:
self._data = {"version": self.version, "services": {}}
if "services" not in self._data:
self._data["services"] = {}
if not isinstance(self._data["services"], dict):
self._data["services"] = {}
def create_service(
self,
name: str,
*,
config: ServiceConfig | dict[str, Any] | None = None,
image: str = "",
ports: list[str] | None = None,
environment: dict[str, str] | None = None,
volumes: list[str] | None = None,
networks: list[str] | None = None,
command: str | list[str] | None = None,
entrypoint: str | list[str] | None = None,
build: dict[str, Any] | str | None = None,
healthcheck: dict[str, Any] | None = None,
restart: str | None = None,
labels: dict[str, str] | list[str] | None = None,
depends_on: list[str] | dict[str, dict[str, str]] | None = None,
configs: list[dict[str, Any]] | None = None,
secrets: list[dict[str, Any]] | None = None,
deploy: dict[str, Any] | None = None,
resources: dict[str, Any] | None = None,
**kwargs: object,
) -> DockerComposeManager:
"""Create a new service in the compose file.
Returns:
DockerComposeManager: self (for chaining)
Raises:
ValueError: If the service config is invalid.
"""
services = self._data["services"]
if config is not None:
if isinstance(config, dict):
config = ServiceConfig(**config)
service = config.model_dump(exclude_none=True)
service.update(kwargs)
else:
try:
service = ServiceConfig(
image=image,
ports=ports,
environment=environment,
volumes=volumes,
networks=networks,
command=command,
entrypoint=entrypoint,
build=build,
healthcheck=healthcheck,
restart=restart,
labels=labels,
depends_on=depends_on,
configs=configs,
secrets=secrets,
deploy=deploy,
resources=resources,
**kwargs,
).model_dump(exclude_none=True)
except ValidationError as e:
msg = f"Invalid service config: {e}"
raise ValueError(msg) from e
services[name] = service
self._dirty = True
return self
def modify_service(self, name: str, **kwargs: object) -> DockerComposeManager:
"""Modify an existing service. Raises KeyError if not found.
Args:
name (str): Name of the service to modify.
**kwargs: Key-value pairs to update in the service configuration.
Raises:
KeyError: If the service with the given name does not exist.
Returns:
DockerComposeManager: self (for chaining)
"""
services: dict[str, dict[str, Any]] = self._data["services"]
if name not in services:
msg: str = f"Service '{name}' not found."
raise KeyError(msg)
services[name].update(kwargs)
self._dirty = True
return self
def remove_service(self, name: str) -> DockerComposeManager:
"""Remove a service from the compose file.
Returns:
DockerComposeManager: self (for chaining)
Raises:
KeyError: If the service does not exist.
"""
services: dict[str, dict[str, Any]] = self._data["services"]
if name not in services:
msg: str = f"Service '{name}' not found."
raise KeyError(msg)
del services[name]
self._dirty = True
return self
def save(self) -> None:
"""Save the current state to the YAML file."""
with Path(self.path).open("w", encoding="utf-8") as f:
yaml.dump(self._data, f, sort_keys=False, indent=2, default_flow_style=False)
self._dirty = False
def __enter__(self) -> Self:
"""Enter the context manager and return self.
Returns:
DockerComposeManager: The instance itself for context management.
"""
return self
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_val: BaseException | None,
exc_tb: TracebackType | None,
) -> None:
"""Exit the context manager and save the file if changes were made.
Args:
exc_type: The exception type if an exception was raised, None otherwise.
exc_val: The exception value if an exception was raised, None otherwise.
exc_tb: The traceback if an exception was raised, None otherwise.
"""
if self._dirty:
self.save()

0
src/compose/py.typed Normal file
View File

1
tests/__init__.py Normal file
View File

@ -0,0 +1 @@
"""Test package for the compose library."""

View File

@ -0,0 +1,150 @@
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING
import pytest
import yaml
from compose import DockerComposeManager
if TYPE_CHECKING:
from pathlib import Path
def test_create_and_save_service(tmp_path: Path) -> None:
compose_file: Path = tmp_path / "docker-compose.yml"
manager = DockerComposeManager(str(compose_file))
manager.create_service(
name="web",
image="nginx:latest",
ports=["80:80"],
environment={"ENV_VAR": "value"},
).save()
# Check file exists and content
assert compose_file.exists()
with compose_file.open() as f:
data = yaml.safe_load(f)
assert "web" in data["services"]
assert data["services"]["web"]["image"] == "nginx:latest"
assert data["services"]["web"]["ports"] == ["80:80"]
assert data["services"]["web"]["environment"] == {"ENV_VAR": "value"}
def test_modify_service(tmp_path: Path) -> None:
compose_file: Path = tmp_path / "docker-compose.yml"
manager = DockerComposeManager(str(compose_file))
manager.create_service(name="web", image="nginx:latest").save()
manager.modify_service(name="web", image="nginx:1.19", ports=["8080:80"]).save()
with compose_file.open() as f:
data = yaml.safe_load(f)
assert data["services"]["web"]["image"] == "nginx:1.19"
assert data["services"]["web"]["ports"] == ["8080:80"]
def test_remove_service(tmp_path: Path) -> None:
compose_file: Path = tmp_path / "docker-compose.yml"
manager = DockerComposeManager(str(compose_file))
manager.create_service(name="web", image="nginx:latest").save()
manager.remove_service("web").save()
with compose_file.open() as f:
data = yaml.safe_load(f)
assert "web" not in data["services"]
def test_context_manager(tmp_path: Path) -> None:
compose_file: Path = tmp_path / "docker-compose.yml"
with DockerComposeManager(str(compose_file)) as manager:
manager.create_service(name="web", image="nginx:latest")
with compose_file.open() as f:
data = yaml.safe_load(f)
assert "web" in data["services"]
def test_modify_nonexistent_service(tmp_path: Path) -> None:
compose_file: Path = tmp_path / "docker-compose.yml"
manager = DockerComposeManager(str(compose_file))
with pytest.raises(KeyError):
manager.modify_service("notfound", image="nginx:latest")
def test_remove_nonexistent_service(tmp_path: Path) -> None:
compose_file: Path = tmp_path / "docker-compose.yml"
manager = DockerComposeManager(str(compose_file))
with pytest.raises(KeyError):
manager.remove_service("notfound")
def test_create_service_with_extra_kwargs(tmp_path: Path) -> None:
compose_file: Path = tmp_path / "docker-compose.yml"
manager = DockerComposeManager(str(compose_file))
manager.create_service(
name="db",
image="postgres:latest",
environment={"POSTGRES_PASSWORD": "example"},
volumes=["db_data:/var/lib/postgresql/data"],
depends_on=["web"],
).save()
with compose_file.open() as f:
data = yaml.safe_load(f)
assert "db" in data["services"]
assert data["services"]["db"]["volumes"] == ["db_data:/var/lib/postgresql/data"]
assert data["services"]["db"]["depends_on"] == ["web"]
def test_create_service_minimal(tmp_path: Path) -> None:
compose_file: Path = tmp_path / "docker-compose.yml"
manager = DockerComposeManager(str(compose_file))
manager.create_service(name="worker", image="busybox").save()
with compose_file.open() as f:
data = yaml.safe_load(f)
assert "worker" in data["services"]
assert data["services"]["worker"]["image"] == "busybox"
assert "ports" not in data["services"]["worker"]
assert "environment" not in data["services"]["worker"]
def test_create_service_all_fields(tmp_path: Path) -> None:
compose_file: Path = tmp_path / "docker-compose.yml"
manager = DockerComposeManager(str(compose_file))
manager.create_service(
name="full",
image="alpine:latest",
ports=["1234:1234"],
environment={"FOO": "bar"},
volumes=["data:/data"],
networks=["default", "custom"],
command=["echo", "hello"],
entrypoint=["/bin/sh", "-c"],
build={"context": ".", "dockerfile": "Dockerfile"},
healthcheck={"test": ["CMD", "true"], "interval": "1m"},
restart="always",
labels={"com.example": "label"},
depends_on=["db"],
configs=[{"source": "my_config", "target": "/etc/config"}],
secrets=[{"source": "my_secret", "target": "/run/secret"}],
deploy={"replicas": 2},
resources={"limits": {"cpus": "0.5", "memory": "50M"}},
extra_field="extra_value",
).save()
with compose_file.open() as f:
data = yaml.safe_load(f)
svc = data["services"]["full"]
assert svc["image"] == "alpine:latest"
assert svc["ports"] == ["1234:1234"]
assert svc["environment"] == {"FOO": "bar"}
assert svc["volumes"] == ["data:/data"]
assert svc["networks"] == ["default", "custom"]
assert svc["command"] == ["echo", "hello"]
assert svc["entrypoint"] == ["/bin/sh", "-c"]
assert svc["build"] == {"context": ".", "dockerfile": "Dockerfile"}
assert svc["healthcheck"] == {"test": ["CMD", "true"], "interval": "1m"}
assert svc["restart"] == "always"
assert svc["labels"] == {"com.example": "label"}
assert svc["depends_on"] == ["db"]
assert svc["configs"] == [{"source": "my_config", "target": "/etc/config"}]
assert svc["secrets"] == [{"source": "my_secret", "target": "/run/secret"}]
assert svc["deploy"] == {"replicas": 2}
assert svc["resources"] == {"limits": {"cpus": "0.5", "memory": "50M"}}
assert svc["extra_field"] == "extra_value"