HTTP Proxy
HTTP proxy is the most commonly used proxy protocol for AdminCloud, supporting HTTP and HTTPS traffic, suitable for most web scraping and API calling scenarios.
Basic Configuration
Proxy Address
Server: gtx.paopaous.net
Port: 38082Authentication Format (k:v-k:v)
Username: myuser:password-ip:acpsres-c:US
Password: (leave empty or any value)Usage Examples
Curl Command
curl -x gtx.paopaous.net:38082 \
--proxy-user "myuser:password-ip:acpsres-c:US" \
-L https://httpbin.org/ipPython Code
import requests
from bs4 import BeautifulSoup
import urllib3
# Disable SSL warnings (use with caution in production)
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# Proxy configuration
proxy_host = "gtx.paopaous.net"
proxy_port = 38082
proxy_user = "myuser:password-ip:acpsres-c:US"
# Set proxy
proxies = {
"http": f"http://{proxy_user}@{proxy_host}:{proxy_port}",
"https": f"http://{proxy_user}@{proxy_host}:{proxy_port}"
}
# Basic IP test
try:
response = requests.get("https://httpbin.org/ip", proxies=proxies)
print(f"Status code: {response.status_code}")
print(f"IP address: {response.json()}")
except Exception as e:
print(f"IP test failed: {e}")
# Web scraping example
try:
response = requests.get("https://example.com", proxies=proxies, verify=False)
print(f"Status code: {response.status_code}")
if response.status_code == 200:
soup = BeautifulSoup(response.content, 'html.parser')
title = soup.find('title')
if title:
print(f"Page title: {title.get_text()}")
# Extract all links
links = soup.find_all('a')
print(f"Found {len(links)} links")
for link in links[:5]: # Show first 5 links
href = link.get('href')
text = link.get_text().strip()
if href and text:
print(f"- {text}: {href}")
except Exception as e:
print(f"Web scraping failed: {e}")
# API call example
try:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
}
response = requests.get("https://httpbin.org/headers",
proxies=proxies,
headers=headers,
verify=False)
print(f"API status code: {response.status_code}")
print(f"Response headers: {response.json()}")
except Exception as e:
print(f"API call failed: {e}")Node.js Code
const axios = require('axios');
const { HttpsProxyAgent } = require('https-proxy-agent');
// Proxy configuration
const proxyUrl = 'http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082';
const httpsAgent = new HttpsProxyAgent(proxyUrl);
// Basic IP test
async function testIP() {
try {
const response = await axios.get('https://httpbin.org/ip', { httpsAgent });
console.log(`Status code: ${response.status}`);
console.log(`IP address: ${JSON.stringify(response.data)}`);
} catch (error) {
console.error(`IP test failed: ${error.message}`);
}
}
// Web content fetching
async function fetchWebPage() {
try {
const response = await axios.get('https://example.com', {
httpsAgent,
headers: {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
});
console.log(`Status code: ${response.status}`);
console.log(`Page title: ${response.data.match(/<title>(.*?)<\/title>/)?.[1] || 'Not found'}`);
console.log(`Content length: ${response.data.length} characters`);
} catch (error) {
console.error(`Web page fetch failed: ${error.message}`);
}
}
// API call test
async function testAPI() {
try {
const response = await axios.get('https://httpbin.org/headers', {
httpsAgent,
headers: {
'Content-Type': 'application/json',
'User-Agent': 'AdminCloud-Proxy/1.0'
}
});
console.log(`API status code: ${response.status}`);
console.log(`Request headers: ${JSON.stringify(response.data.headers, null, 2)}`);
} catch (error) {
console.error(`API call failed: ${error.message}`);
}
}
// Run all tests
async function runTests() {
console.log('=== Proxy Tests Started ===');
await testIP();
console.log('\n---');
await fetchWebPage();
console.log('\n---');
await testAPI();
console.log('\n=== Tests Completed ===');
}
runTests();Parameter Details
Username Parameters (k:v-k:v format)
myuser:password-ip:acpsres-c:US| Key-Value Pair | Description | Example | Required |
|---|---|---|---|
| myuser:password | Username and password | john_doe:abc123 | ✅ |
| ip:acpsres | Product type identifier | ip:acpsres, ip:acpsstatic, ip:acpsdc | ✅ |
| c:US | Country code | c:US, c:UK, c:JP | ✅ |
| id:12345 | Session ID | id:any_string | ❌ |
Supported Product Types
| Product Identifier | Description | Use Case |
|---|---|---|
| acpsres | ACPS Dynamic Residential IP | Web scraping, data collection |
| acpsstatic | ACPS Static Residential IP | Scenarios requiring fixed IP |
| acpsdc | ACPS Datacenter IP | High performance, low cost |
Supported Country Codes
| Region | Country Codes | Region | Country Codes |
|---|---|---|---|
| North America | US, CA | Europe | GB, DE, FR, IT, ES, NL |
| Asia | JP, KR, SG, HK, TW | Others | AU, BR, IN |
Advanced Configuration
Session Persistence
By default, each request may use a different IP. To maintain a session (use the same IP), add a session ID:
curl -x gtx.paopaous.net:38082 \
--proxy-user "myuser:password-ip:acpsres-c:US-id:12345" \
-L https://httpbin.org/ipRotation Strategies
- Default: Rotate IP for each request
- Session persistence: Same IP for same session ID
- Time-based rotation: Rotate at time intervals (contact customer service)
Common Application Scenarios
Web Scraping
import requests
from bs4 import BeautifulSoup
proxies = {
"http": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082",
"https": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082"
}
response = requests.get("https://example.com", proxies=proxies)
soup = BeautifulSoup(response.content, 'html.parser')API Calls
import requests
proxies = {
"http": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082",
"https": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082"
}
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
}
response = requests.get("https://api.example.com/data",
proxies=proxies,
headers=headers)Error Handling
407 Proxy Authentication Required
Cause: Authentication information error or missing Solution: Check username and password format
502 Bad Gateway
Cause: Proxy server upstream connection failed Solution: Try again later or contact customer service
Connection Timeout
Cause: Network or proxy server latency Solution: Increase timeout value
import requests
proxies = {
"http": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082",
"https": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082"
}
try:
response = requests.get("https://httpbin.org/ip",
proxies=proxies,
timeout=30)
print(response.json())
except requests.exceptions.ProxyError as e:
print(f"Proxy error: {e}")
except requests.exceptions.Timeout as e:
print(f"Request timeout: {e}")Performance Optimization
Connection Pool Configuration
import requests
from requests.adapters import HTTPAdapter
session = requests.Session()
session.mount('http://', HTTPAdapter(max_retries=3, pool_connections=10))
session.mount('https://', HTTPAdapter(max_retries=3, pool_connections=10))
proxies = {
"http": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082",
"https": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082"
}
response = session.get("https://httpbin.org/ip", proxies=proxies)Concurrent Requests
import concurrent.futures
import requests
def fetch_url(url):
proxies = {
"http": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082",
"https": "http://myuser:password-ip:acpsres-c:US@gtx.paopaous.net:38082"
}
return requests.get(url, proxies=proxies)
urls = ["https://httpbin.org/ip"] * 10
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
futures = [executor.submit(fetch_url, url) for url in urls]
for future in concurrent.futures.as_completed(futures):
print(future.result().json())const axios = require('axios');
// Proxy configuration const proxyConfig = { protocol: 'http', host: 'gtx.paopaous.net', port: 38082, auth: { username: 'myuser:ip:res-c:US', password: 'your_password_here' } };
// Send request axios.get('https://httpbin.org/ip', { proxy: proxyConfig }) .then(response => console.log(response.data)) .catch(error => console.error(error));
## Parameter Details
### Username Parametersmyuser:ip:res-c:US
| Parameter | Description | Example |
|-----------|-------------|---------|
| myuser | Your username | john_doe |
| ip | Fixed identifier | ip |
| res-c | Dynamic residential IP type | res-c |
| US | Country code | US, UK, JP, DE |
### Supported Country Codes
| Code | Country | Code | Country |
|------|---------|------|---------|
| US | United States | JP | Japan |
| UK | United Kingdom | DE | Germany |
| CA | Canada | FR | France |
| AU | Australia | SG | Singapore |
| NL | Netherlands | IT | Italy |
| ES | Spain | SE | Sweden |
## Advanced Configuration
### Session Persistence
By default, each request might use different IPs. To maintain a session (use same IP), add session ID to username:
```bash
curl -x gtx.paopaous.net:38082 \
--proxy-user "myuser:ip:res-c:US:sid-12345:your_password_here" \
-L https://httpbin.org/ipRotation Strategies
- Default: Rotate IP for each request
- Session Persistence: Same session ID uses same IP
- Time-based Rotation: Rotate by time interval (contact customer service to configure)
Common Application Scenarios
Web Scraping
import requests
from bs4 import BeautifulSoup
proxies = {
"http": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082",
"https": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082"
}
response = requests.get("https://example.com", proxies=proxies)
soup = BeautifulSoup(response.content, 'html.parser')API Calls
import requests
proxies = {
"http": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082",
"https": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082"
}
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36"
}
response = requests.get("https://api.example.com/data",
proxies=proxies,
headers=headers)Error Handling
Common Errors and Solutions
407 Proxy Authentication Required
Cause: Authentication information error Solution: Check username and password format
502 Bad Gateway
Cause: Proxy server issue Solution: Retry later or contact customer service
Connection Timeout
Cause: Network or proxy server delay Solution: Increase timeout
import requests
proxies = {
"http": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082",
"https": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082"
}
try:
response = requests.get("https://httpbin.org/ip",
proxies=proxies,
timeout=30)
print(response.json())
except requests.exceptions.ProxyError as e:
print(f"Proxy error: {e}")
except requests.exceptions.Timeout as e:
print(f"Request timeout: {e}")Performance Optimization
Connection Pool Configuration
import requests
from requests.adapters import HTTPAdapter
session = requests.Session()
session.mount('http://', HTTPAdapter(max_retries=3, pool_connections=10))
session.mount('https://', HTTPAdapter(max_retries=3, pool_connections=10))
proxies = {
"http": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082",
"https": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082"
}
response = session.get("https://httpbin.org/ip", proxies=proxies)Concurrent Requests
import concurrent.futures
import requests
def fetch_url(url):
proxies = {
"http": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082",
"https": "http://myuser:ip:res-c:US:your_password_here@gtx.paopaous.net:38082"
}
return requests.get(url, proxies=proxies)
urls = ["https://httpbin.org/ip"] * 10
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
futures = [executor.submit(fetch_url, url) for url in urls]
for future in concurrent.futures.as_completed(futures):
print(future.result().json())Next: SOCKS5 Proxy or Authentication