Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
*.pyc
__pycache__
.venv
.deta
.env

_test.py
celerybeat-schedule.db
celerybeat-schedule-*
celerybeat-schedule
45 changes: 13 additions & 32 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ on:
jobs:
check-requirements:
name: Check Requirements
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Set Version Tag
run: echo "API_TAG=$(echo $GITHUB_REF | awk -F '/' '{print $NF}')" >> $GITHUB_ENV
Expand All @@ -17,25 +17,31 @@ jobs:

build-image:
name: Build Image
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
needs: check-requirements
steps:
- uses: actions/checkout@v4
- name: Parse API Version
run: echo "API_VERSION=$(echo $GITHUB_REF | awk -F '/' '{print $NF}' | cut -c 2-)" >> $GITHUB_ENV
- name: Docker Login
run: echo ${{ secrets.DOCKER_ACCESS_TOKEN }} | docker login -u ${{ secrets.DOCKER_USERNAME }} --password-stdin
run: |
echo ${{ secrets.DOCKER_ACCESS_TOKEN }} | docker login -u ${{ secrets.DOCKER_USERNAME }} --password-stdin
echo ${{ secrets.GHCRIO_ACCESS_TOKEN }} | docker login ghcr.io -u ${{ secrets.GHCRIO_USERNAME }} --password-stdin
- name: Build Image
run: docker build -t steamcmd/api:latest .
run: docker build -t steamcmd/api:latest -t ghcr.io/steamcmd/api:latest .
# deploy
- name: Tag Image
run: docker tag steamcmd/api:latest steamcmd/api:$API_VERSION
run: |
docker tag steamcmd/api:latest steamcmd/api:$API_VERSION
docker tag ghcr.io/steamcmd/api:latest ghcr.io/steamcmd/api:$API_VERSION
- name: Push Image
run: docker push steamcmd/api --all-tags
run: |
docker push steamcmd/api --all-tags
docker push ghcr.io/steamcmd/api --all-tags

update-readme:
name: Update Readme
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- name: Update Docker Hub Description
Expand All @@ -44,28 +50,3 @@ jobs:
DOCKERHUB_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKERHUB_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
DOCKERHUB_REPOSITORY: steamcmd/api

deploy-fly:
name: Deploy Fly.io
runs-on: ubuntu-22.04
needs: [check-requirements, build-image]
steps:
- uses: actions/checkout@v4
- uses: superfly/flyctl-actions/setup-flyctl@master
- name: Parse API Version
run: echo "API_VERSION=$(echo $GITHUB_REF | awk -F '/' '{print $NF}' | cut -c 2-)" >> $GITHUB_ENV
- name: Deploy API on Fly.io
run: flyctl deploy --app steamcmd --image steamcmd/api:${{ env.API_VERSION }} -e VERSION=${{ env.API_VERSION }}
env:
FLY_API_TOKEN: ${{ secrets.FLY_ACCESS_TOKEN }}

deploy-render:
name: Deploy Render.com
runs-on: ubuntu-22.04
needs: [check-requirements, build-image]
steps:
- uses: actions/checkout@v4
- name: Parse API Version
run: echo "API_VERSION=$(echo $GITHUB_REF | awk -F '/' '{print $NF}' | cut -c 2-)" >> $GITHUB_ENV
- name: Deploy API on Render.com
run: curl https://api.render.com/deploy/${{ secrets.RENDER_SERVICE_ID }}?key=${{ secrets.RENDER_API_KEY }}&imgURL=docker.io%2Fsteamcmd%2Fapi%40${{ env.API_VERSION }}
4 changes: 2 additions & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@ on:
jobs:
test-image:
name: Test Image
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- name: Build Image
run: docker build -t steamcmd/api:latest .

python-lint:
name: Python Lint
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v4
- uses: jpetrucciani/ruff-check@main
Expand Down
9 changes: 8 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
.DS_Store
.ruff_cache

*.pyc
__pycache__
.venv
.deta
.env

_test.py
celerybeat-schedule.db
celerybeat-schedule-*
celerybeat-schedule
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -29,4 +29,4 @@ COPY --chown=$USER:$USER src/ $HOME/
##################### INSTALLATION END #####################

# Set default container command
CMD exec gunicorn main:app --max-requests 3000 --max-requests-jitter 150 --workers $WORKERS --worker-class uvicorn.workers.UvicornWorker --bind 0.0.0.0:$PORT
CMD exec gunicorn web:app --max-requests 3000 --max-requests-jitter 150 --workers $WORKERS --worker-class uvicorn.workers.UvicornWorker --bind 0.0.0.0:$PORT
96 changes: 30 additions & 66 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,64 +4,20 @@
[![Discord Online](https://img.shields.io/discord/928592378711912488.svg)](https://discord.steamcmd.net)
[![Mastodon Follow](https://img.shields.io/mastodon/follow/109302774947550572?domain=https%3A%2F%2Ffosstodon.org&style=flat)](https://fosstodon.org/@steamcmd)
[![Image Size](https://img.shields.io/docker/image-size/steamcmd/api/latest.svg)](https://hub.docker.com/r/steamcmd/api)
[![Better Uptime](https://betteruptime.com/status-badges/v1/monitor/ln3p.svg)](https://status.steamcmd.net)
[![Uptime Robot Uptime](https://img.shields.io/uptimerobot/ratio/m782827237-5067fd1d69e3b1b2e4e40fff)](https://status.steamcmd.net)
[![GitHub Release](https://img.shields.io/github/v/release/steamcmd/api?label=version)](https://github.com/steamcmd/api/releases)
[![GitHub Sponsors](https://img.shields.io/github/sponsors/steamcmd)](https://github.com/sponsors/steamcmd)
[![MIT License](https://img.shields.io/badge/license-MIT-blue.svg)](LICENSE)

# SteamCMD API

Read-only API interface for steamcmd app_info. Updates of this code are
automatically deployed via [Github Actions](https://github.com/steamcmd/api/actions)
when a new version has been created on Github.
Read-only API interface for steamcmd app_info. The official API is reachable on
[api.steamcmd.net](https://api.steamcmd.net) and it's documentation can be found
on [www.steamcmd.net](https://www.steamcmd.net).

## Self-hosting

The easiest way to host the API yourself is using the free cloud platform
[Fly.io](https://fly.io). Install the CLI according to the documentation:
[https://fly.io/docs/hands-on/install-flyctl/](https://fly.io/docs/hands-on/install-flyctl/).

After installing, authenticate locally with the `flyctl` cli:
```bash
fly auth login
```
Create the app and redis instances (choose your own names):
```bash
fly apps create <app-name>
fly redis create <redis-name>
```
Retrieve the Redis connection URL (you will need this later):
```bash
fly redis status <redis-name>

Redis
ID = xxxxxxxxxxxxxxxxxx
Name = api
Plan = Free
Primary Region = ams
Read Regions = None
Eviction = Enabled
Private URL = redis://default:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx@fly-api.upstash.io <== Write the password down
```
Set the required configuration environment variables:
```bash
fly secrets set --app <app-name> \
CACHE=True \
CACHE_TYPE=redis \
CACHE_EXPIRATION=120 \
REDIS_HOST="fly-api.upstash.io" \
REDIS_PORT=6379 \
REDIS_PASSWORD="xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
```
Finally deploy the API Docker image with the latest code:
```bash
fly deploy --app <app-name> --image steamcmd/api:latest -e VERSION=1.0.0
```
The version is optional and currently only required for the `/v1/version` endpoint.

## Container

The API can easily be run via a Docker image which contains the API code and the
The API can easily be run via a container image which contains the API code and the
`uvicorn` tool to be able to respond to web requests. With every new version of
the API the Docker images is automatically rebuild and pushed to the Docker Hub:
```bash
Expand All @@ -73,8 +29,16 @@ docker pull steamcmd/api:1.10.0
```bash
docker run -p 8000:8000 -d steamcmd/api:latest
```
However during development, using Docker Compose is preferred. See the
[Development](#development) section for information.
The API consists of 2 services; the **Web** and the **Job** service and the Redis
cache. The **Job** service and the Redis cache are both optional but are both required
if you want to run the **Job** service.

Details on how the official API is hosted can be found in the
[platform](https://github.com/steamcmd/platform) repository. This repository contains
all the infrastructure as code that is used to deploy the API on a Kubernetes cluster.

See the [Development](#development) section for more information on running
the API and Job services directly via Python.

## Configuration

Expand All @@ -89,7 +53,7 @@ that you will need to set the corresponding cache settings for that type as well
when using the **redis** type).

All the available options in an `.env` file:
```
```shell
# general
VERSION=1.0.0

Expand All @@ -109,34 +73,34 @@ REDIS_URL="redis://YourUsername:YourRedisP@ssword!@your.redis.host.example.com:6

# logging
LOG_LEVEL=info

# deta
DETA_BASE_NAME="steamcmd"
DETA_PROJECT_KEY="YourDet@ProjectKey!"
```

## Development

Run the api locally by installing a web server like uvicorn and running it:
To develop locally start by creating a Python virtual environment and install the prerequisites:
```bash
python3 -m venv .venv
source .venv/bin/activate
pip install -r requirements.txt
pip install uvicorn
cd src/
uvicorn main:app --reload
```

The easiest way to spin up a complete development environment is using Docker
compose. This will build the image locally, mount the correct directory (`src`)
and set the required environment variables. If you are on windows you should
store the repository in the WSL filesystem or it will fail. Execute compose up
in the root:
Run the Web Service (FastAPI) locally by running the FastAPI development server:
```bash
docker compose up
source .venv/bin/activate
cd src/
fastapi dev web.py
```
Now you can reach the SteamCMD API locally on [http://localhost:8000](http://localhost:8000).

Run the Job Service (Celery) locally by running celery directly:
```bash
python3 -m venv .venv
source .venv/bin/activate
pip install -r requirements.txt
cd src/
celery -A job worker --loglevel=info --concurrency=2 --beat
```

### Black

To keep things simple, [Black](https://github.com/python/black) is used for code
Expand Down
4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
services:
web:
build: .
command: "gunicorn main:app --worker-class uvicorn.workers.UvicornWorker --bind 0.0.0.0:8000 --reload"
command: "gunicorn web:app --worker-class uvicorn.workers.UvicornWorker --bind 0.0.0.0:8000 --reload"
ports:
- "8000:8000"
volumes:
Expand All @@ -10,7 +10,7 @@ services:
PORT: 8000
WORKERS: 4
VERSION: 9.9.9
CACHE: True
CACHE: "True"
CACHE_TYPE: redis
CACHE_EXPIRATION: 120
REDIS_HOST: redis
Expand Down
17 changes: 12 additions & 5 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,10 +1,17 @@
fastapi
redis
deta

## general
semver
python-dotenv
logfmter

## web
fastapi[standard]
redis
minio

## steam
steam[client]
gevent

## job
celery
celery-singleton
flower
69 changes: 69 additions & 0 deletions src/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import utils.general
import utils.helper
import logging
from dotenv import load_dotenv
from logfmter import Logfmter

# fmt: off

# Load values from .env file
load_dotenv()

# Set variables based on environment
cache = utils.helper.read_env("CACHE", "False", choices=[ "True", "False" ])
cache_type = utils.helper.read_env("CACHE_TYPE", "redis", choices=[ "redis" ])
cache_expiration = utils.helper.read_env("CACHE_EXPIRATION", "120")

redis_url = utils.helper.read_env("REDIS_URL")
redis_host = utils.helper.read_env("REDIS_HOST", "localhost")
redis_port = utils.helper.read_env("REDIS_PORT", "6379")
redis_password = utils.helper.read_env("REDIS_PASSWORD")
redis_database = utils.helper.read_env("REDIS_DATABASE", "0")

storage_type = utils.helper.read_env("STORAGE_TYPE", "local", choices=[ "local", "object" ])
storage_directory = utils.helper.read_env("STORAGE_DIRECTORY", "data/", dependency={ "STORAGE_TYPE": "local" })
storage_object_endpoint = utils.helper.read_env("STORAGE_OBJECT_ENDPOINT", dependency={ "STORAGE_TYPE": "object" })
storage_object_access_key = utils.helper.read_env("STORAGE_OBJECT_ACCESS_KEY", dependency={ "STORAGE_TYPE": "object" })
storage_object_secret_key = utils.helper.read_env("STORAGE_OBJECT_SECRET_KEY", dependency={ "STORAGE_TYPE": "object" })
storage_object_bucket = utils.helper.read_env("STORAGE_OBJECT_BUCKET", dependency={ "STORAGE_TYPE": "object" })
storage_object_secure = utils.helper.read_env("STORAGE_OBJECT_SECURE", True)
storage_object_region = utils.helper.read_env("STORAGE_OBJECT_REGION", False)

log_level = utils.helper.read_env("LOG_LEVEL", "info", choices=[ "debug", "info", "warning", "error", "critical" ])
version = utils.helper.read_env("VERSION", "9.9.9")

# Set general settings
chunk_size = 10

# Logging configuration
formatter = Logfmter(keys=["level"], mapping={"level": "levelname"})
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logging.basicConfig(handlers=[handler], level=utils.general.log_level(log_level))

# Set Celery configuration
timezone = "UTC"
broker_url = redis_url
broker_connection_retry_on_startup = True
beat_schedule = {
"check-changelist-every-5-seconds": {
"task": "check_changelist",
"schedule": 5.0
},
#"check-missing-apps-every-30-minutes": {
# "task": "check_missing_apps",
# "schedule": 1800.0,
#},
"check-incorrect-apps-every-30-minutes": {
"task": "check_incorrect_apps",
"schedule": 1800.0,
},
"check-deadlocks-every-1-hour": {
"task": "check_deadlocks",
"schedule": 3600.0,
},
}
worker_concurrency = 4

# Dynamically import all tasks files
imports = utils.helper.list_tasks()
Loading