diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..f2f6bb9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,65 @@ +# ------------------------- +# Python / App artifacts +# ------------------------- + +__pycache__/ +*.pyc +*.pyo +*.pyd +*.swp +*.log + +# virtual environments +.venv/ +env/ +venv/ + +# pip build folders +build/ +dist/ +*.egg-info/ + +# pytest cache +.pytest_cache/ +.coverage +htmlcov/ + +# fastapi auto-generated openapi cache +.openapi_schema_cache/ + +# ------------------------- +# Docker / Compose +# ------------------------- + +# local DB volume (if stored on host) +postgres-data/ +pgdata/ +data/ + +# Docker developer junk +*.pid +*.sock + +# ------------------------- +# IDE / OS junk +# ------------------------- + +# VS Code +.vscode/ +.settings/ + +# PyCharm / JetBrains +.idea/ + +# macOS +.DS_Store +.Trashes +.AppleDouble +._* + +# Windows +Thumbs.db +Desktop.ini + +# Linux +*~ diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..782fc97 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +# Python base image +FROM python:3.11-slim + +# Environment settings: +# - no .pyc files +# - unbuffered output (logs show up immediately in docker logs) +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 \ + PIP_NO_CACHE_DIR=1 + +# All paths in the container are relative to /app +WORKDIR /app + +# Install Python dependencies first to leverage Docker layer caching +COPY requirements.txt ./ +RUN pip install --upgrade pip && \ + pip install -r requirements.txt + +# Now copy the rest of the source code +COPY . . + +# Document the port uvicorn will listen on +EXPOSE 8000 + +# Default command - used by api service (unless overridden) and when running image directly +CMD ["uvicorn", "app.api:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/README.md b/README.md index 3145d38..e826a6c 100644 --- a/README.md +++ b/README.md @@ -1,31 +1,276 @@ -# Instructions +# 🖥️ Server Inventory Challenge -You are developing an inventory management software solution for a cloud services company that provisions servers in multiple data centers. You must build a CRUD app for tracking the state of all the servers. +[![Made with FastAPI](https://img.shields.io/badge/Made%20with-FastAPI-009688?logo=fastapi&logoColor=white)]() +[![Postgres](https://img.shields.io/badge/Database-PostgreSQL-336791?logo=postgresql&logoColor=white)]() +[![Docker Compose](https://img.shields.io/badge/Run%20With-Docker%20Compose-2496ED?logo=docker&logoColor=white)]() +[![Tests](https://img.shields.io/badge/Tested%20with-Pytest-0A9EDC?logo=pytest&logoColor=white)]() -Deliverables: -- PR to https://github.com/Mathpix/hiring-challenge-devops-python that includes: -- API code -- CLI code -- pytest test suite -- Working Docker Compose stack +A simple CRUD system for tracking cloud server inventory — implemented with FastAPI, PostgreSQL, raw SQL access, and tested with pytest. +The **entire solution runs inside Docker**; nothing needs to be installed locally beyond Docker. -Short API.md on how to run everything, also a short API and CLI spec +--- -Required endpoints: -- POST /servers → create a server -- GET /servers → list all servers -- GET /servers/{id} → get one server -- PUT /servers/{id} → update server -- DELETE /servers/{id} → delete server +## Features -Requirements: -- Use FastAPI or Flask -- Store data in PostgreSQL -- Use raw SQL +* FastAPI REST API +* CLI client tool +* PostgreSQL backing store +* Raw SQL queries (no ORM) +* Input validation +* Uniqueness of server name and IP address (extended requirement) +* Automated tests via pytest +* Containerized execution (everything runs inside Docker) -Validate that: -- hostname is unique -- IP address looks like an IP +--- -State is one of: active, offline, retired +## Getting Started +### Build and launch stack + +```bash +docker compose up --build +``` + +This: + +- builds the application image +- starts PostgreSQL +- starts the API container +- puts everything on an internal docker network + +API becomes available at: + +* http://localhost:8000 +* Swagger UI: http://localhost:8000/docs + +--- + +## Running the CLI (inside Docker) + +Each command runs in a one-off CLI container. + +#### List servers + +```bash +docker compose run --rm cli python3 -m app.cli list +``` + +#### Create a server + +```bash +docker compose run --rm cli python3 -m app.cli create server-1 1.1.1.1 active +docker compose run --rm cli python3 -m app.cli create server-2 2.2.2.2 offline +``` + +#### Fetch a server by ID + +```bash +docker compose run --rm cli python3 -m app.cli get 1 +``` + +#### Update a server + +```bash +docker compose run --rm cli python3 -m app.cli update 1 server-1 1.1.1.1 retired +``` + +#### Delete a server + +```bash +docker compose run --rm cli python3 -m app.cli delete 1 +``` + +#### Notes +- CLI runs **inside container** +- No host Python setup required + +--- + +## Running Tests + +All tests run inside the API container: + +```bash +docker compose run --rm api pytest -vv +``` + +--- + +## Shutdown + +```bash +docker compose down +``` + +To remove stored database files: + +```bash +docker compose down -v +``` +--- + +## API Overview + +| Method | Path | Description | +|--------|-----------------|------------------------| +| POST | `/servers` | Create server | +| GET | `/servers` | List all servers | +| GET | `/servers/{id}` | Get one server | +| PUT | `/servers/{id}` | Update existing server | +| DELETE | `/servers/{id}` | Remove server | + +--- + +# Design Overview + +This project implements a minimal CRUD system for tracking servers in multiple data centers. +It includes: + +- **FastAPI** backend (`app/api.py`) +- **PostgreSQL** database with schema initialization (`init.sql`) +- **Python CLI** tool (`app/cli.py`) +- **Full pytest suite** (`tests/`) +- **Docker Compose stack** for running API, DB, CLI, and tests + +--- + +## Data Model + +Each server entry contains: + +- `id` – primary key +- `hostname` – **unique**, required +- `ip_address` – **unique**, validated IPv4/IPv6 +- `state` – one of: `active`, `offline`, `retired` +- `created_at` – timestamp +- `updated_at` – timestamp + +Database schema: + +```sql +CREATE TABLE IF NOT EXISTS servers ( + id SERIAL PRIMARY KEY, + hostname TEXT NOT NULL UNIQUE, + ip_address TEXT NOT NULL UNIQUE, + state TEXT NOT NULL CHECK (state IN ('active', 'offline', 'retired')), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); +``` + +--- + +## Pydantic Models + +`ServerIn` is used for input; `ServerOut` is used for API responses. + +```python +class ServerIn(BaseModel): + hostname: str + ip_address: str + state: Literal["active", "offline", "retired"] +``` + +`ServerOut` extends this with: + +```python +class ServerOut(ServerIn): + id: int + created_at: datetime + updated_at: datetime +``` + +### IP Address Validation + +```python +@field_validator("ip_address") +def validate_ip(cls, v): + ipaddress.ip_address(v) # raises ValueError on invalid IP +``` + +Invalid IPs result in: + +- HTTP 422 on API +- Clean “Invalid IP address format” message on CLI + +--- + +## Uniqueness Handling + +Uniqueness of `hostname` and `ip_address` is enforced **at the database layer**. + +Integrity errors are mapped into friendly FastAPI HTTP responses: + +| Constraint | HTTP Status | Message | +|-----------|-------------|---------| +| Duplicate hostname | 409 | “Hostname must be unique” | +| Duplicate IP | 409 | “IP address must be unique” | + +--- + +## API Flow Example (POST /servers) + +``` +CLI → FastAPI → ServerIn validation → SQL INSERT → ServerOut → CLI output +``` + +1. CLI sends JSON `{hostname, ip_address, state}` +2. FastAPI constructs `ServerIn` +3. Pydantic validates: + - hostname present + - state allowed + - ip_address is syntactically valid +4. SQL insert executed +5. PostgreSQL ensures hostname/IP uniqueness +6. Result mapped into `ServerOut` +7. JSON returned to CLI + +--- + +## Error Flows + +### Invalid IP format + +- Rejected at model layer +- Returns HTTP 422 +- CLI prints clean error message + +### Duplicate hostname / IP + +- Caught as database `IntegrityError` +- Mapped to HTTP 409 with specific message +- CLI prints: + +``` +Error: HTTP 409 - IP address must be unique +``` + +### Not found (GET/PUT/DELETE) + +``` +HTTP 404 - Server not found +``` + +## Repository Structure + +``` +app/ + __init__.py + api.py + cli.py + db.py + models.py +tests/ + __init__.py + test_servers_api.py +init.sql +docker-compose.yml +Dockerfile +README.md +.gitignore +requirements.txt +``` + +## License + +MIT — do anything, just give attribution. diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..b9d56a4 --- /dev/null +++ b/app/__init__.py @@ -0,0 +1 @@ +# app package diff --git a/app/api.py b/app/api.py new file mode 100644 index 0000000..b61d949 --- /dev/null +++ b/app/api.py @@ -0,0 +1,118 @@ +from typing import List + +from fastapi import FastAPI, HTTPException +from psycopg2 import IntegrityError, errors as pg_errors + +from .db import get_connection, dict_cursor +from .models import ServerIn, ServerOut + +app = FastAPI(title="Server Inventory API") + + +def map_row_to_server(row) -> ServerOut: + """Convert a DB row (RealDictRow) into a ServerOut model.""" + created = row.get("created_at") + updated = row.get("updated_at") + return ServerOut( + id=row["id"], + hostname=row["hostname"], + ip_address=row["ip_address"], + state=row["state"], + created_at=created.isoformat() if hasattr(created, "isoformat") else created, + updated_at=updated.isoformat() if hasattr(updated, "isoformat") else updated, + ) + + +def handle_integrity_error(e: IntegrityError) -> HTTPException: + """Map a psycopg2 IntegrityError to a clean HTTPException. + + This handles: + - hostname uniqueness + - ip_address uniqueness + - generic integrity constraint issues + """ + constraint = getattr(getattr(e, "diag", None), "constraint_name", "") or "" + # Default message + detail = "Invalid data: integrity constraint violated" + + # These names are generated by Postgres for UNIQUE(hostname) and UNIQUE(ip_address) + if constraint == "servers_hostname_key": + detail = "Hostname must be unique" + elif constraint == "servers_ip_address_key": + detail = "IP address must be unique" + elif getattr(e, "pgcode", None) == pg_errors.UNIQUE_VIOLATION: + # Fallback for any other unique-constraint issue + detail = "Hostname and IP address must be unique" + + return HTTPException(status_code=409, detail=detail) + +@app.get("/servers", response_model=List[ServerOut]) +def list_servers(): + with get_connection() as conn, dict_cursor(conn) as cur: + cur.execute("SELECT * FROM servers ORDER BY id;") + rows = cur.fetchall() + return [map_row_to_server(r) for r in rows] + + +@app.get("/servers/{server_id}", response_model=ServerOut) +def get_server(server_id: int): + with get_connection() as conn, dict_cursor(conn) as cur: + cur.execute("SELECT * FROM servers WHERE id = %s;", (server_id,)) + row = cur.fetchone() + if not row: + raise HTTPException(status_code=404, detail="Server not found") + return map_row_to_server(row) + + +@app.post("/servers", response_model=ServerOut, status_code=201) +def create_server(server: ServerIn): + try: + with get_connection() as conn, dict_cursor(conn) as cur: + cur.execute( + """INSERT INTO servers (hostname, ip_address, state) + VALUES (%s, %s, %s) + RETURNING *;""", + (server.hostname, server.ip_address, server.state), + ) + row = cur.fetchone() + conn.commit() + return map_row_to_server(row) + except IntegrityError as e: + raise handle_integrity_error(e) from e + + +@app.put("/servers/{server_id}", response_model=ServerOut) +def update_server(server_id: int, server: ServerIn): + with get_connection() as conn, dict_cursor(conn) as cur: + # Ensure the row exists first + cur.execute("SELECT id FROM servers WHERE id = %s;", (server_id,)) + if not cur.fetchone(): + raise HTTPException(status_code=404, detail="Server not found") + + try: + cur.execute( + """UPDATE servers + SET hostname = %s, + ip_address = %s, + state = %s, + updated_at = NOW() + WHERE id = %s + RETURNING *;""", + (server.hostname, server.ip_address, server.state, server_id), + ) + row = cur.fetchone() + conn.commit() + return map_row_to_server(row) + except IntegrityError as e: + raise handle_integrity_error(e) from e + + +@app.delete("/servers/{server_id}", status_code=204) +def delete_server(server_id: int): + with get_connection() as conn, dict_cursor(conn) as cur: + cur.execute("DELETE FROM servers WHERE id = %s;", (server_id,)) + if cur.rowcount == 0: + raise HTTPException(status_code=404, detail="Server not found") + conn.commit() + # 204 No Content -> just return None + return diff --git a/app/cli.py b/app/cli.py new file mode 100644 index 0000000..59c8b5a --- /dev/null +++ b/app/cli.py @@ -0,0 +1,170 @@ +import argparse +import os +import sys +import requests + +API_BASE_URL = os.getenv("API_BASE_URL", "http://localhost:8000") + +def handle_response(resp): + """ + Centralized response handling: + - Parse JSON if available + - Print friendly errors instead of raising exceptions + - Return data for successful calls + """ + try: + data = resp.json() + except Exception: + data = None + + if resp.status_code >= 400: + detail = None + if isinstance(data, dict): + detail = data.get("detail") + msg = f"Error: HTTP {resp.status_code}" + if detail: + msg += f" - {detail}" + print(msg, file=sys.stderr) + return None + + return data + + +def cmd_list(args): + url = f"{API_BASE_URL}/servers" + try: + r = requests.get(url, timeout=5) + except requests.RequestException as e: + print(f"Error: failed to contact API: {e}", file=sys.stderr) + return + + data = handle_response(r) + if data is None: + return + + if not data: + print("No servers found.") + return + + for s in data: + print(f"{s['id']}: {s['hostname']} {s['ip_address']} [{s['state']}]") + + + +def cmd_get(args): + url = f"{API_BASE_URL}/servers/{args.id}" + try: + r = requests.get(url, timeout=5) + except requests.RequestException as e: + print(f"Error: failed to contact API: {e}", file=sys.stderr) + return + + data = handle_response(r) + if data is None: + return + + print(data) + + + +def cmd_create(args): + url = f"{API_BASE_URL}/servers" + payload = { + "hostname": args.hostname, + "ip_address": args.ip_address, + "state": args.state, + } + + try: + r = requests.post(url, json=payload, timeout=5) + except requests.RequestException as e: + print(f"Error: failed to contact API: {e}", file=sys.stderr) + return + + data = handle_response(r) + if data is not None: + print("Created:", data) + + + +def cmd_update(args): + url = f"{API_BASE_URL}/servers/{args.id}" + payload = { + "hostname": args.hostname, + "ip_address": args.ip_address, + "state": args.state, + } + + try: + r = requests.put(url, json=payload, timeout=5) + except requests.RequestException as e: + print(f"Error: failed to contact API: {e}", file=sys.stderr) + return + + data = handle_response(r) + if data is not None: + print("Updated:", data) + + + +def cmd_delete(args): + url = f"{API_BASE_URL}/servers/{args.id}" + try: + r = requests.delete(url, timeout=5) + except requests.RequestException as e: + print(f"Error: failed to contact API: {e}", file=sys.stderr) + return + + if r.status_code == 204: + print(f"Deleted server {args.id}") + else: + handle_response(r) + + + +def build_parser(): + parser = argparse.ArgumentParser(description="Server inventory CLI") + sub = parser.add_subparsers(dest="command", required=True) + + # list + p_list = sub.add_parser("list", help="List servers") + p_list.set_defaults(func=cmd_list) + + # get + p_get = sub.add_parser("get", help="Get a server") + p_get.add_argument("id", type=int) + p_get.set_defaults(func=cmd_get) + + # create + p_create = sub.add_parser("create", help="Create a server") + p_create.add_argument("hostname") + p_create.add_argument("ip_address") + p_create.add_argument("state") + p_create.set_defaults(func=cmd_create) + + # update + p_update = sub.add_parser("update", help="Update a server") + p_update.add_argument("id", type=int) + p_update.add_argument("hostname") + p_update.add_argument("ip_address") + p_update.add_argument("state") + p_update.set_defaults(func=cmd_update) + + # delete + p_delete = sub.add_parser("delete", help="Delete a server") + p_delete.add_argument("id", type=int) + p_delete.set_defaults(func=cmd_delete) + + return parser + + + +def main(argv=None): + parser = build_parser() + args = parser.parse_args(argv) + args.func(args) + + + +if __name__ == "__main__": + main() diff --git a/app/db.py b/app/db.py new file mode 100644 index 0000000..a82730d --- /dev/null +++ b/app/db.py @@ -0,0 +1,23 @@ +import os +from contextlib import contextmanager + +import psycopg2 +import psycopg2.extras + +DATABASE_URL = os.getenv( + "DATABASE_URL", + "postgresql://postgres:postgres@db:5432/postgres", +) + + +@contextmanager +def get_connection(): + conn = psycopg2.connect(DATABASE_URL) + try: + yield conn + finally: + conn.close() + + +def dict_cursor(conn): + return conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) diff --git a/app/models.py b/app/models.py new file mode 100644 index 0000000..da59ab3 --- /dev/null +++ b/app/models.py @@ -0,0 +1,37 @@ +from datetime import datetime +from typing import Literal + +from pydantic import BaseModel, field_validator +import ipaddress + + +class ServerIn(BaseModel): + """Input model for creating/updating a server.""" + + hostname: str + ip_address: str + state: Literal["active", "offline", "retired"] + + @field_validator("ip_address") + @classmethod + def validate_ip(cls, v: str) -> str: + """Validate that ip_address looks like a real IPv4/IPv6 address. + + On failure: + - print an error message (visible in logs) + - raise ValueError so FastAPI returns HTTP 422 + """ + try: + ipaddress.ip_address(v) + except ValueError: + print(f"[ERROR] Invalid IP address provided: {v}") + raise ValueError("Invalid IP address format") + return v + + +class ServerOut(ServerIn): + """Output model including DB-managed fields.""" + + id: int + created_at: datetime + updated_at: datetime diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..af03106 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,51 @@ +services: + # PostgreSQL database + db: + image: postgres:16 + container_name: server_inventory_db + environment: + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + POSTGRES_DB: postgres + ports: + - "5432:5432" + volumes: + - ./init.sql:/docker-entrypoint-initdb.d/init.sql:ro + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 3s + timeout: 3s + retries: 5 + + + # FastAPI application (starts with `docker compose up`) + api: + build: . + container_name: server_inventory_api + depends_on: + db: + condition: service_healthy + environment: + # Note: hostname is "db" inside the Docker network + DATABASE_URL: postgres://postgres:postgres@db:5432/postgres + ports: + - "8000:8000" + command: > + uvicorn app.api:app + --host 0.0.0.0 + --port 8000 + + # CLI helper (runs when `docker compose run cli ...` is called) + cli: + build: . + container_name: server_inventory_cli + # NOTE: CLI does not start with `docker compose up` + # It can be run on demand by calling: + # docker compose run --rm cli python3 -m app.cli list + depends_on: + api: + condition: service_started + environment: + API_BASE_URL: http://api:8000 + DATABASE_URL: postgres://postgres:postgres@db:5432/postgres + # `command` is not specified here so it can be passed at runtime diff --git a/init.sql b/init.sql new file mode 100644 index 0000000..5dda112 --- /dev/null +++ b/init.sql @@ -0,0 +1,8 @@ +CREATE TABLE IF NOT EXISTS servers ( + id SERIAL PRIMARY KEY, + hostname TEXT NOT NULL UNIQUE, + ip_address TEXT NOT NULL UNIQUE, + state TEXT NOT NULL CHECK (state IN ('active', 'offline', 'retired')), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() +); diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..bff0763 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,7 @@ +fastapi +uvicorn[standard] +psycopg2-binary +pydantic +requests +pytest +httpx \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_servers_api.py b/tests/test_servers_api.py new file mode 100644 index 0000000..cbf59c9 --- /dev/null +++ b/tests/test_servers_api.py @@ -0,0 +1,161 @@ +import psycopg2 +import pytest +from fastapi.testclient import TestClient + +from app.api import app +from app.db import DATABASE_URL + +client = TestClient(app) + + +@pytest.fixture(autouse=True) +def clean_db(): + conn = psycopg2.connect(DATABASE_URL) + cur = conn.cursor() + cur.execute("TRUNCATE TABLE servers RESTART IDENTITY;") + conn.commit() + cur.close() + conn.close() + yield + + +def test_create_and_get_server(): + payload = { + "hostname": "api-1", + "ip_address": "10.0.0.1", + "state": "active", + } + r = client.post("/servers", json=payload) + assert r.status_code == 201 + data = r.json() + assert data["id"] == 1 + assert data["hostname"] == "api-1" + assert data["ip_address"] == "10.0.0.1" + assert data["state"] == "active" + + r2 = client.get("/servers/1") + assert r2.status_code == 200 + data2 = r2.json() + assert data2["hostname"] == "api-1" + + +def test_hostname_must_be_unique(): + payload = { + "hostname": "db-1", + "ip_address": "10.0.0.2", + "state": "offline", + } + r1 = client.post("/servers", json=payload) + assert r1.status_code == 201 + + r2 = client.post("/servers", json=payload) + assert r2.status_code == 409 + assert "hostname" in r2.json()["detail"].lower() + + +def test_ip_must_be_unique(): + """Creating two servers with the same IP should fail on the second request. + + The first POST /servers with a given IP should succeed (201). + The second POST /servers with the same IP but a different hostname + should return HTTP 409 and a clear error message. + """ + payload1 = { + "hostname": "ip-unique-host-1", + "ip_address": "192.0.2.10", # TEST-NET-1 address, valid IPv4 + "state": "active", + } + payload2 = { + "hostname": "ip-unique-host-2", + "ip_address": "192.0.2.10", # same IP as above + "state": "offline", + } + + # First server with this IP should be created successfully + r1 = client.post("/servers", json=payload1) + assert r1.status_code == 201, r1.text + + # Second server with the same IP should hit the UNIQUE(ip_address) constraint + r2 = client.post("/servers", json=payload2) + assert r2.status_code == 409, r2.text + + body = r2.json() + assert "detail" in body + # The API code maps the specific unique constraint to this message + assert "IP address must be unique" in body["detail"] + + +def test_invalid_ip_rejected(): + payload = { + "hostname": "bad-ip", + "ip_address": "not-an-ip", + "state": "active", + } + r = client.post("/servers", json=payload) + assert r.status_code == 422 + + +def test_invalid_state_rejected(): + payload = { + "hostname": "weird-state", + "ip_address": "192.168.0.10", + "state": "broken", + } + r = client.post("/servers", json=payload) + assert r.status_code == 422 + + +def test_list_servers(): + for i in range(3): + payload = { + "hostname": f"host-{i}", + "ip_address": f"10.0.0.{i}", + "state": "active", + } + assert client.post("/servers", json=payload).status_code == 201 + + r = client.get("/servers") + assert r.status_code == 200 + data = r.json() + assert len(data) == 3 + assert [s["hostname"] for s in data] == ["host-0", "host-1", "host-2"] + + +def test_update_server(): + payload = { + "hostname": "host-1", + "ip_address": "10.0.0.5", + "state": "offline", + } + r = client.post("/servers", json=payload) + assert r.status_code == 201 + server = r.json() + + update_payload = { + "hostname": "host-1-updated", + "ip_address": "10.0.0.6", + "state": "active", + } + r2 = client.put(f"/servers/{server['id']}", json=update_payload) + assert r2.status_code == 200 + updated = r2.json() + assert updated["hostname"] == "host-1-updated" + assert updated["ip_address"] == "10.0.0.6" + assert updated["state"] == "active" + + +def test_delete_server(): + payload = { + "hostname": "to-delete", + "ip_address": "10.0.0.9", + "state": "retired", + } + r = client.post("/servers", json=payload) + assert r.status_code == 201 + server_id = r.json()["id"] + + r2 = client.delete(f"/servers/{server_id}") + assert r2.status_code == 204 + + r3 = client.get(f"/servers/{server_id}") + assert r3.status_code == 404