Go's goroutines and channels make it one of the best languages for CAPTCHA solving at scale. Thousands of concurrent solves use minimal memory, and the standard library handles HTTP without external dependencies.
Basic solver
package main
import (
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"time"
)
const (
submitURL = "https://ocr.captchaai.com/in.php"
resultURL = "https://ocr.captchaai.com/res.php"
apiKey = "YOUR_API_KEY"
)
type APIResponse struct {
Status int `json:"status"`
Request string `json:"request"`
}
func solveCaptcha(method string, params map[string]string) (string, error) {
// Submit
data := url.Values{
"key": {apiKey},
"method": {method},
"json": {"1"},
}
for k, v := range params {
data.Set(k, v)
}
resp, err := http.PostForm(submitURL, data)
if err != nil {
return "", fmt.Errorf("submit request failed: %w", err)
}
defer resp.Body.Close()
var submitResult APIResponse
if err := json.NewDecoder(resp.Body).Decode(&submitResult); err != nil {
return "", fmt.Errorf("submit decode failed: %w", err)
}
if submitResult.Status != 1 {
return "", fmt.Errorf("submit error: %s", submitResult.Request)
}
taskID := submitResult.Request
// Poll
for i := 0; i < 24; i++ {
time.Sleep(5 * time.Second)
pollURL := fmt.Sprintf("%s?key=%s&action=get&id=%s&json=1",
resultURL, apiKey, taskID)
pollResp, err := http.Get(pollURL)
if err != nil {
continue
}
body, _ := io.ReadAll(pollResp.Body)
pollResp.Body.Close()
var pollResult APIResponse
if err := json.Unmarshal(body, &pollResult); err != nil {
continue
}
if pollResult.Status == 1 {
return pollResult.Request, nil
}
if pollResult.Request != "CAPCHA_NOT_READY" {
return "", fmt.Errorf("poll error: %s", pollResult.Request)
}
}
return "", fmt.Errorf("timeout: task %s", taskID)
}
func main() {
token, err := solveCaptcha("userrecaptcha", map[string]string{
"googlekey": "6Le-SITEKEY",
"pageurl": "https://example.com",
})
if err != nil {
fmt.Printf("Error: %v\n", err)
return
}
fmt.Printf("Token: %.50s...\n", token)
}
Expected output:
Token: 03AGdBq26ZfPxL...
Concurrent solving with WaitGroup
package main
import (
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"sync"
"time"
)
const (
submitURL = "https://ocr.captchaai.com/in.php"
resultURL = "https://ocr.captchaai.com/res.php"
apiKey = "YOUR_API_KEY"
maxConcurrent = 10
)
type APIResponse struct {
Status int `json:"status"`
Request string `json:"request"`
}
type SolveResult struct {
Index int
Token string
Error error
}
func solveCaptcha(method string, params map[string]string) (string, error) {
data := url.Values{"key": {apiKey}, "method": {method}, "json": {"1"}}
for k, v := range params {
data.Set(k, v)
}
resp, err := http.PostForm(submitURL, data)
if err != nil {
return "", err
}
defer resp.Body.Close()
var result APIResponse
json.NewDecoder(resp.Body).Decode(&result)
if result.Status != 1 {
return "", fmt.Errorf(result.Request)
}
taskID := result.Request
for i := 0; i < 24; i++ {
time.Sleep(5 * time.Second)
pollResp, err := http.Get(fmt.Sprintf("%s?key=%s&action=get&id=%s&json=1",
resultURL, apiKey, taskID))
if err != nil {
continue
}
body, _ := io.ReadAll(pollResp.Body)
pollResp.Body.Close()
var pr APIResponse
json.Unmarshal(body, &pr)
if pr.Status == 1 {
return pr.Request, nil
}
if pr.Request != "CAPCHA_NOT_READY" {
return "", fmt.Errorf(pr.Request)
}
}
return "", fmt.Errorf("timeout")
}
func main() {
var wg sync.WaitGroup
sem := make(chan struct{}, maxConcurrent)
results := make(chan SolveResult, 20)
// Launch 20 concurrent solves
for i := 0; i < 20; i++ {
wg.Add(1)
go func(index int) {
defer wg.Done()
sem <- struct{}{} // acquire
defer func() { <-sem }() // release
token, err := solveCaptcha("userrecaptcha", map[string]string{
"googlekey": fmt.Sprintf("6Le-SITEKEY-%d", index),
"pageurl": fmt.Sprintf("https://example.com/page/%d", index),
})
results <- SolveResult{Index: index, Token: token, Error: err}
}(i)
}
// Close results channel when all done
go func() {
wg.Wait()
close(results)
}()
// Collect results
solved, failed := 0, 0
for r := range results {
if r.Error != nil {
fmt.Printf("[task-%d] Failed: %v\n", r.Index, r.Error)
failed++
} else {
fmt.Printf("[task-%d] Solved: %.40s...\n", r.Index, r.Token)
solved++
}
}
fmt.Printf("\nResults: %d solved, %d failed\n", solved, failed)
}
Expected output:
[task-0] Solved: 03AGdBq26ZfPxL...
[task-3] Solved: 03AGdBq27AbCdE...
...
Results: 18 solved, 2 failed
Channel-based worker pool
For production, use a worker pool with input/output channels:
type CaptchaTask struct {
Index int
Method string
Params map[string]string
}
func worker(id int, tasks <-chan CaptchaTask, results chan<- SolveResult) {
for task := range tasks {
token, err := solveCaptcha(task.Method, task.Params)
results <- SolveResult{Index: task.Index, Token: token, Error: err}
}
}
func runWorkerPool(taskList []CaptchaTask, workerCount int) []SolveResult {
tasks := make(chan CaptchaTask, len(taskList))
results := make(chan SolveResult, len(taskList))
// Start workers
for i := 0; i < workerCount; i++ {
go worker(i, tasks, results)
}
// Send tasks
for _, t := range taskList {
tasks <- t
}
close(tasks)
// Collect results
var all []SolveResult
for i := 0; i < len(taskList); i++ {
all = append(all, <-results)
}
return all
}
HTTP client tuning
Use a shared client with connection pooling:
var client = &http.Client{
Transport: &http.Transport{
MaxIdleConns: 100,
MaxIdleConnsPerHost: 50,
IdleConnTimeout: 30 * time.Second,
},
Timeout: 15 * time.Second,
}
Replace http.PostForm and http.Get with client.PostForm and client.Get.
Troubleshooting
| Problem | Cause | Fix |
|---|---|---|
connection reset |
Too many concurrent connections | Lower maxConcurrent, tune MaxIdleConnsPerHost |
| Goroutine leak | No timeout on HTTP calls | Set http.Client.Timeout |
| All tasks timeout | API under load | Increase poll loop from 24 to 36 iterations |
| Panic on closed channel | Writing to closed results channel | Ensure close() is called after all writes |
FAQ
How many goroutines can I run?
Go supports thousands of goroutines. The limiting factor is CaptchaAI's concurrency limit, not Go. Use the semaphore pattern to match your account's capacity.
Should I use goroutines or the worker pool pattern?
Use goroutines with semaphore for simple batches. Use the worker pool for long-running services where tasks arrive continuously.
How does this compare to Python asyncio?
Go goroutines are lighter than Python coroutines and handle more concurrent I/O with less overhead. For pure HTTP-based CAPTCHA solving, Go is typically 2-3x faster at high concurrency.
Start high-throughput CAPTCHA solving with CaptchaAI
Get your API key at captchaai.com.
Discussions (0)
Join the conversation
Sign in to share your opinion.
Sign InNo comments yet.