When a CAPTCHA solve fails after all retries, the task data is lost unless you capture it. A dead-letter queue (DLQ) stores failed tasks for later retry, analysis, or alerting — so no work is silently dropped.
When tasks fail
Common reasons a CAPTCHA task ends up in the DLQ:
ERROR_CAPTCHA_UNSOLVABLE— The solver couldn't complete the challengeERROR_NO_SLOT_AVAILABLE— All workers busy, retries exhausted- Timeout — Solver didn't return a result within the deadline
- Network errors — Connection dropped during polling
Without a DLQ, these failures produce a log line and are forgotten.
Python: In-memory DLQ with retry
import time
import json
import requests
from collections import deque
from dataclasses import dataclass, asdict
from typing import Optional
API_KEY = "YOUR_API_KEY"
SUBMIT_URL = "https://ocr.captchaai.com/in.php"
RESULT_URL = "https://ocr.captchaai.com/res.php"
@dataclass
class FailedTask:
sitekey: str
page_url: str
error: str
attempts: int
timestamp: float
task_id: Optional[str] = None
class DeadLetterQueue:
def __init__(self, max_size=1000, max_retries=3):
self._queue = deque(maxlen=max_size)
self.max_retries = max_retries
def push(self, task: FailedTask):
self._queue.append(task)
print(f"[dlq] Added: {task.error} (attempts: {task.attempts})")
def pop(self) -> Optional[FailedTask]:
return self._queue.popleft() if self._queue else None
def size(self) -> int:
return len(self._queue)
def peek_all(self) -> list:
return [asdict(t) for t in self._queue]
def export_json(self, path: str):
with open(path, "w") as f:
json.dump(self.peek_all(), f, indent=2)
print(f"[dlq] Exported {self.size()} tasks to {path}")
dlq = DeadLetterQueue(max_retries=3)
def solve_captcha(sitekey, page_url, max_retries=3):
for attempt in range(max_retries + 1):
try:
resp = requests.post(SUBMIT_URL, data={
"key": API_KEY,
"method": "userrecaptcha",
"googlekey": sitekey,
"pageurl": page_url,
"json": "1",
}, timeout=15)
data = resp.json()
if data["status"] != 1:
raise Exception(data["request"])
task_id = data["request"]
for _ in range(24):
time.sleep(5)
poll = requests.get(RESULT_URL, params={
"key": API_KEY, "action": "get",
"id": task_id, "json": "1",
}, timeout=15).json()
if poll["status"] == 1:
return poll["request"]
if poll["request"] != "CAPCHA_NOT_READY":
raise Exception(poll["request"])
raise TimeoutError(f"Task {task_id} timed out")
except Exception as e:
if attempt == max_retries:
dlq.push(FailedTask(
sitekey=sitekey,
page_url=page_url,
error=str(e),
attempts=attempt + 1,
timestamp=time.time(),
))
return None
time.sleep(2 ** attempt)
return None
# Process a batch
urls = [f"https://example.com/page/{i}" for i in range(5)]
for url in urls:
token = solve_captcha("6Le-SITEKEY", url)
if token:
print(f"Solved: {token[:40]}...")
print(f"\nDLQ size: {dlq.size()}")
Expected output:
Solved: 03AGdBq26ZfPxL...
Solved: 03AGdBq27AbCdE...
[dlq] Added: ERROR_CAPTCHA_UNSOLVABLE (attempts: 4)
Solved: 03AGdBq28FgHiJ...
[dlq] Added: Task 71823460 timed out (attempts: 4)
DLQ size: 2
Retrying from the DLQ
def retry_dlq(dlq: DeadLetterQueue, max_retries=2):
retried = 0
recovered = 0
while dlq.size() > 0:
task = dlq.pop()
if task.attempts >= dlq.max_retries + max_retries:
print(f"[dlq] Permanently failed: {task.sitekey} — {task.error}")
continue
retried += 1
token = solve_captcha(
task.sitekey, task.page_url, max_retries=max_retries
)
if token:
recovered += 1
print(f"[dlq-retry] Recovered: {token[:40]}...")
print(f"[dlq] Retried: {retried}, Recovered: {recovered}")
# Run DLQ retry after main batch
retry_dlq(dlq)
JavaScript: DLQ with file persistence
const fs = require('fs');
const axios = require('axios');
const API_KEY = 'YOUR_API_KEY';
const DLQ_FILE = './captcha-dlq.json';
class DeadLetterQueue {
constructor(maxRetries = 3) {
this.maxRetries = maxRetries;
this.queue = this._load();
}
push(task) {
this.queue.push({
...task,
timestamp: Date.now(),
});
this._save();
console.log(`[dlq] Added: ${task.error} (attempts: ${task.attempts})`);
}
pop() {
const task = this.queue.shift();
if (task) this._save();
return task || null;
}
size() {
return this.queue.length;
}
_load() {
try {
return JSON.parse(fs.readFileSync(DLQ_FILE, 'utf8'));
} catch {
return [];
}
}
_save() {
fs.writeFileSync(DLQ_FILE, JSON.stringify(this.queue, null, 2));
}
}
const dlq = new DeadLetterQueue(3);
async function solveCaptcha(sitekey, pageurl, maxRetries = 3) {
for (let attempt = 0; attempt <= maxRetries; attempt++) {
try {
const submit = await axios.post('https://ocr.captchaai.com/in.php', null, {
params: { key: API_KEY, method: 'userrecaptcha', googlekey: sitekey, pageurl, json: 1 }
});
if (submit.data.status !== 1) throw new Error(submit.data.request);
const taskId = submit.data.request;
for (let i = 0; i < 24; i++) {
await new Promise(r => setTimeout(r, 5000));
const poll = await axios.get('https://ocr.captchaai.com/res.php', {
params: { key: API_KEY, action: 'get', id: taskId, json: 1 }
});
if (poll.data.status === 1) return poll.data.request;
if (poll.data.request !== 'CAPCHA_NOT_READY') throw new Error(poll.data.request);
}
throw new Error(`Task ${taskId} timed out`);
} catch (err) {
if (attempt === maxRetries) {
dlq.push({ sitekey, pageurl, error: err.message, attempts: attempt + 1 });
return null;
}
await new Promise(r => setTimeout(r, 2 ** attempt * 1000));
}
}
}
// Process tasks
(async () => {
for (let i = 0; i < 5; i++) {
const token = await solveCaptcha('6Le-SITEKEY', `https://example.com/page/${i}`);
if (token) console.log(`Solved: ${token.substring(0, 40)}...`);
}
console.log(`DLQ size: ${dlq.size()}`);
})();
DLQ analysis
Export and analyze failed tasks to find patterns:
# Export DLQ for analysis
dlq.export_json("failed-tasks.json")
# Analyze error distribution
from collections import Counter
errors = Counter(t["error"] for t in dlq.peek_all())
for error, count in errors.most_common():
print(f" {error}: {count}")
Use this data to:
- Identify sitekeys that consistently fail → check if parameters are correct
- Spot timeouts during specific hours → correlate with API load
- Find network errors → check proxy health
Troubleshooting
| Problem | Cause | Fix |
|---|---|---|
| DLQ grows indefinitely | Not processing retries | Schedule periodic DLQ drain with retry_dlq() |
| Same task retried forever | No max-attempt cap | Check task.attempts before re-queuing |
| DLQ file corrupted | Concurrent writes | Use file locking or switch to Redis/database |
| Lost tasks on crash | In-memory DLQ only | Use file-based or Redis-backed DLQ |
FAQ
Should I use an in-memory or persistent DLQ?
Use in-memory for short-running scripts. Use file-based or Redis-backed for long-running services where a process restart would lose queued tasks.
When should I permanently discard a task?
After 2-3 DLQ retries (on top of the original retries). If a task fails 6+ times total, the parameters are likely wrong — log it and move on.
Can I combine this with the circuit breaker pattern?
Yes. The circuit breaker prevents sending requests during an outage, and the DLQ captures any tasks that fail before the circuit trips. See Circuit Breaker Pattern.
Never lose a failed CAPTCHA task again with CaptchaAI
Get your API key at captchaai.com.
Discussions (0)
Join the conversation
Sign in to share your opinion.
Sign InNo comments yet.