Overview
This app is a proxy service which proxies requests to a healthy privacy-focused alternative front-end for Reddit.
Project Structure
proxolotl/
├── .github/
│ └── workflows/
│ └── docker-publish.yml
├── app/
│ ├── __init__.py
│ ├── instance_manager.py
│ ├── main.py
│ └── templates/
│ └── config.html
├── data/
│ └── config.json
├── Dockerfile
├── docker-compose.yml
├── requirements.txt
└── README.md
Code
app/instance_manager.py
import asyncio
import time
import logging
import httpx
from typing import List, Dict, Tuple, Optional, Set
from apscheduler.schedulers.asyncio import AsyncIOScheduler
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
TEDDIT_INSTANCES_URL = "https://codeberg.org/teddit/teddit/raw/branch/main/instances.json"
REDLIB_INSTANCES_URL = "https://raw.githubusercontent.com/redlib-org/redlib-instances/refs/heads/main/instances.json"
class Instance:
def __init__(self, url: str, is_cloudflare: bool = False, is_anubis: bool = False, latency: float = float('inf')):
self.url = url
self.is_cloudflare = is_cloudflare
self.is_anubis = is_anubis
self.latency = latency
def __repr__(self):
return f"Instance(url='{self.url}', latency={self.latency:.2f}ms, cloudflare={self.is_cloudflare}, anubis={self.is_anubis})"
class InstanceManager:
def __init__(self):
self._all_instances: Dict[str, Instance] = {}
self._sorted_instances: List[Instance] = []
self._temp_down_instances: Set[str] = set()
self.scheduler = AsyncIOScheduler()
self.allow_cloudflare = True
self.avoid_anubis = False
async def fetch_and_normalize_instances(self) -> List[Dict]:
raw_instances = []
async with httpx.AsyncClient(timeout=10) as client:
try:
teddit_resp = await client.get(TEDDIT_INSTANCES_URL)
teddit_resp.raise_for_status()
for item in teddit_resp.json():
if item.get("url"):
raw_instances.append({"url": item["url"], "cloudflare": None})
except Exception as e:
logger.error(f"Failed to fetch Teddit instances: {e}")
try:
redlib_resp = await client.get(REDLIB_INSTANCES_URL)
redlib_resp.raise_for_status()
for item in redlib_resp.json().get("instances", []):
if item.get("url"):
raw_instances.append({"url": item["url"], "cloudflare": item.get("cloudflare", False)})
except Exception as e:
logger.error(f"Failed to fetch Redlib instances: {e}")
unique_instances = {item['url'].rstrip('/'): item for item in raw_instances}.values()
return list(unique_instances)
async def test_instance(self, instance_data: Dict, client: httpx.AsyncClient) -> Optional[Instance]:
url = instance_data["url"]
is_cloudflare = instance_data["cloudflare"]
is_anubis = False
try:
start_time = time.monotonic()
async with client.stream("GET", url, follow_redirects=True, timeout=5) as response:
latency = (time.monotonic() - start_time) * 1000
response.raise_for_status()
body_chunk = await response.aread(1024)
body_text = body_chunk.decode('utf-8', errors='ignore').lower()
if 'anubis' in body_text or 'proof-of-work' in body_text:
is_anubis = True
if is_cloudflare is None:
is_cloudflare = "cloudflare" in response.headers.get("server", "").lower()
return Instance(url=url, is_cloudflare=is_cloudflare, is_anubis=is_anubis, latency=latency)
except (httpx.RequestError, httpx.HTTPStatusError) as e:
logger.warning(f"Instance {url} failed test: {e}")
return None
async def update_instances(self):
logger.info("Starting scheduled instance update...")
raw_instances = await self.fetch_and_normalize_instances()
async with httpx.AsyncClient() as client:
tasks = [self.test_instance(inst, client) for inst in raw_instances]
results = await asyncio.gather(*tasks)
live_instances = [inst for inst in results if inst is not None]
filtered_instances = live_instances
if not self.allow_cloudflare:
filtered_instances = [inst for inst in filtered_instances if not inst.is_cloudflare]
if self.avoid_anubis:
filtered_instances = [inst for inst in filtered_instances if not inst.is_anubis]
self._all_instances = {inst.url: inst for inst in live_instances}
self._sorted_instances = sorted(filtered_instances, key=lambda x: x.latency)
self._temp_down_instances.clear()
if self._sorted_instances:
logger.info(f"Instance update complete. Found {len(self._sorted_instances)} healthy, filtered instances.")
logger.info(f"Fastest instance: {self._sorted_instances[0]}")
else:
logger.warning("Instance update complete. No healthy instances matching criteria found.")
def get_fastest_instance(self) -> Optional[Instance]:
for instance in self._sorted_instances:
if instance.url not in self._temp_down_instances:
return instance
return None
def mark_instance_down(self, url: str):
logger.warning(f"Marking {url} as temporarily down.")
self._temp_down_instances.add(url)
def _refilter_and_sort(self):
filtered_list = list(self._all_instances.values())
if not self.allow_cloudflare:
filtered_list = [inst for inst in filtered_list if not inst.is_cloudflare]
if self.avoid_anubis:
filtered_list = [inst for inst in filtered_list if not inst.is_anubis]
self._sorted_instances = sorted(filtered_list, key=lambda x: x.latency)
logger.info(f"Instance list re-filtered. {len(self._sorted_instances)} instances active.")
def set_cloudflare_policy(self, allow: bool):
if self.allow_cloudflare != allow:
self.allow_cloudflare = allow
self._refilter_and_sort()
def set_anubis_policy(self, avoid: bool):
if self.avoid_anubis != avoid:
self.avoid_anubis = avoid
self._refilter_and_sort()
def start_scheduler(self, interval_minutes: int = 60):
self.scheduler.add_job(self.update_instances, 'interval', minutes=interval_minutes)
self.scheduler.start()
logger.info(f"Scheduler started. Instances will be updated every {interval_minutes} minutes.")app/main.py
import json
import logging
from pathlib import Path
from fastapi import FastAPI, Request, Form
from fastapi.responses import RedirectResponse, HTMLResponse, Response
from fastapi.templating import Jinja2Templates
from .instance_manager import InstanceManager
app = FastAPI(title="Proxolotl")
instance_manager = InstanceManager()
templates = Jinja2Templates(directory="app/templates")
CONFIG_FILE = Path("data/config.json")
logger = logging.getLogger(__name__)
def load_config():
if CONFIG_FILE.exists():
with open(CONFIG_FILE, 'r') as f:
config = json.load(f)
instance_manager.set_cloudflare_policy(config.get("allow_cloudflare", True))
instance_manager.set_anubis_policy(config.get("avoid_anubis", False))
else:
save_config({"allow_cloudflare": True, "avoid_anubis": False})
def save_config(config: dict):
CONFIG_FILE.parent.mkdir(exist_ok=True)
with open(CONFIG_FILE, 'w') as f:
json.dump(config, f, indent=2)
@app.on_event("startup")
async def startup_event():
load_config()
await instance_manager.update_instances()
instance_manager.start_scheduler(interval_minutes=60)
@app.get("/--/config", response_class=HTMLResponse)
async def get_config_page(request: Request):
return templates.TemplateResponse("config.html", {
"request": request,
"allow_cloudflare": instance_manager.allow_cloudflare,
"avoid_anubis": instance_manager.avoid_anubis,
})
@app.post("/--/config")
async def update_config(request: Request, allow_cloudflare: bool = Form(False), avoid_anubis: bool = Form(False)):
new_config = {"allow_cloudflare": allow_cloudflare, "avoid_anubis": avoid_anubis}
save_config(new_config)
instance_manager.set_cloudflare_policy(allow_cloudflare)
instance_manager.set_anubis_policy(avoid_anubis)
return templates.TemplateResponse("config.html", {
"request": request,
"allow_cloudflare": instance_manager.allow_cloudflare,
"avoid_anubis": instance_manager.avoid_anubis,
"message": "Settings updated successfully!"
})
@app.get("/{path:path}")
async def proxy_redirect(request: Request, path: str):
instance = instance_manager.get_fastest_instance()
if not instance:
return Response(content="503 Service Unavailable: No healthy Reddit front-end instances found matching your criteria.", status_code=503)
query_params = request.url.query
redirect_url = f"{instance.url}/{path}"
if query_params:
redirect_url += f"?{query_params}"
logger.info(f"Redirecting /{path} to {redirect_url}")
return RedirectResponse(url=redirect_url, status_code=307)app/templates/config.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Proxolotl Configuration</title>
<style>
body { font-family: sans-serif; background-color: #f4f4f9; color: #333; max-width: 600px; margin: 2em auto; padding: 2em; border: 1px solid #ddd; border-radius: 8px; }
h1 { color: #ff4500; }
.setting { margin-top: 1em; padding: 1em; background: #fff; border-radius: 4px; }
label { display: flex; align-items: center; cursor: pointer; font-size: 1.1em; }
input[type="checkbox"] { width: 20px; height: 20px; margin-right: 10px; }
button { background-color: #007bff; color: white; padding: 10px 15px; border: none; border-radius: 4px; font-size: 1em; cursor: pointer; }
button:hover { background-color: #0056b3; }
.message { margin-top: 1em; color: green; font-weight: bold; }
</style>
</head>
<body>
<h1>Proxolotl Configuration</h1>
{% if message %}
<p class="message">{{ message }}</p>
{% endif %}
<form action="/--/config" method="post">
<div class="setting">
<label>
<input type="checkbox" name="allow_cloudflare" value="true" {% if allow_cloudflare %}checked{% endif %}>
Allow instances protected by Cloudflare
</label>
<p style="font-size: 0.9em; color: #666;">Instances behind Cloudflare can be faster, but disabling this may improve privacy.</p>
</div>
<div class="setting">
<label>
<input type="checkbox" name="avoid_anubis" value="true" {% if avoid_anubis %}checked{% endif %}>
Avoid instances protected by Anubis (Proof-of-Work)
</label>
<p style="font-size: 0.9em; color: #666;">Anubis-protected sites require your browser to solve a brief computational challenge. Check this to avoid them entirely.</p>
</div>
<input type="hidden" name="allow_cloudflare" value="false" />
<input type="hidden" name="avoid_anubis" value="false" />
<button type="submit">Save Settings</button>
</form>
</body>
</html>requirements.txt
fastapi
uvicorn[standard]
httpx
jinja2
apschedulerDockerfile
FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY ./app /app/
EXPOSE 8000
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]docker-compose.yml
version: '3.8'
services:
proxolotl:
# Use the pre-built image from GHCR
# Replace 'your-github-username' with your actual username
image: ghcr.io/your-github-username/proxolotl:latest
container_name: proxolotl
restart: unless-stopped
ports:
# Expose on port 8080 on the host, maps to 8000 in the container
- "8080:8000"
volumes:
# Mount a local directory to persist the configuration
- ./data:/app/data
environment:
# Optional: Set timezone for logs
- TZ=Etc/UTC.github/workflows/docker-publish.yml
name: Docker Image CI
on:
push:
branches: [ "main" ]
jobs:
build_and_push:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Log in to the GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v4
with:
images: ghcr.io/${{ github.repository }}
- name: Build and push Docker image
uses: docker/build-push-action@v4
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}README.md
# Proxolotl

Proxolotl is a smart, self-healing, and privacy-focused proxy for Reddit. It automatically redirects any Reddit URL to a working, low-latency instance of alternative front-ends like [Teddit](https://codeberg.org/teddit/teddit) and [Redlib](https://github.com/redlib-org/redlib).
## Features
- **Automatic Redirects**: Simply point your browser to your Proxolotl instance with a Reddit path (e.g., `http://localhost:8080/r/selfhosted`).
- **Latency Prioritization**: Periodically tests a list of public front-end instances and prioritizes them based on the fastest response time from your server.
- **Self-Healing**: If the current fastest instance goes down, Proxolotl automatically fails over to the next-fastest one.
- **Always Up-to-Date**: Fetches the latest list of available instances from their official sources on a regular schedule.
- **Configurable**: Includes a simple web UI to configure settings, such as allowing Cloudflare or avoiding Anubis-protected instances.
- **Easy to Deploy**: Runs as a single, lightweight Docker container.
## Quick Start with Docker Compose
This is the recommended method for running Proxolotl.
1. Create a `docker-compose.yml` file with the following content.
```yaml
# docker-compose.yml
version: '3.8'
services:
proxolotl:
# To use a pre-built image from GHCR (replace with the actual repo)
image: ghcr.io/your-github-username/proxolotl:latest
# Or, to build from source in the current directory:
# build: .
container_name: proxolotl
restart: unless-stopped
ports:
- "8080:8000"
volumes:
# This persists your configuration settings
- ./data:/app/data
environment:
- TZ=Etc/UTC
```
2. Create an empty `data` directory next to your `docker-compose.yml` file. This is where your `config.json` will be stored.
```bash
mkdir data
```
3. Run the container:
```bash
docker-compose up -d
```
## Usage
Once running, you can use the service immediately.
- **To browse Reddit**: Take any Reddit URL like `https://www.reddit.com/r/docker/` and replace the domain with your Proxolotl address: `http://localhost:8080/r/docker/`.
- **To configure**: Open your browser to `http://localhost:8080/--/config`.
## Configuration
The service has a simple web interface for configuration, accessible at the `/--/config` path of your instance.
- **Allow Cloudflare Instances**: Check this box to include instances that are behind Cloudflare in the pool of available redirects. These can often be fast and reliable, but some users may prefer to avoid them for privacy reasons.
- **Avoid Anubis Instances**: Check this to filter out instances that use Anubis (a proof-of-work challenge). This prevents you from being redirected to a site that will ask your browser to solve a computational puzzle before showing content.
Settings are saved to the `data/config.json` file in the volume you mounted, so they will persist across container restarts.
## Building From Source
If you prefer to build the image yourself:
1. Clone this repository.
2. Navigate to the repository directory.
3. Build the Docker image:
```bash
docker build -t proxolotl:local .
```
4. Run the container, replacing the `image` in your `docker-compose.yml` with `proxolotl:local`.