logo

Super (Residential & Mobile)

Using residential & mobile proxies

Every proxy is actually an IP address. Each IP address has ASN (Autonomous System Number) information. Anti-bot solutions can block you based on your IP quality using this ASN information. Scrape.do offers two different types of proxies:

Datacenter: Inexpensive but likely to be blocked by advanced bot solutions. We have over 90,000+ datacenter rotating proxy addresses.

Residential & Mobile: More expensive but less detectable by anti-bot services. More than 95,000,000+ proxies are hosted in our Residential & Mobile rotating proxy pool. To use them, simply pass the super=true parameter. By using the Geo Targeting feature, you can target the country the target website serves and increase your success rates even more.

  • If you do not use this feature, the system will use Datacenter Proxy by default.
  • With Residential & Mobile Proxy, each successful request consumes 10 API credits. If Headless Browser is used, each successful request will consume 25 API credits.
  • If you do not use Geo Targeting when you use Residential & Mobile proxy, our system will send requests via United States (geoCode=us) by default.
  • Our proxy pools are regularly checked, rotated, and performance monitored. They can be customized specifically for target websites or customer needs. We work hard to create the best quality and fastest proxies in the world.

Important: Super proxy infrastructure requires a minimum of Business Plan and above!

You can check credit usage for each request type in the Request Costs section.

Example Usage

curl --location --request GET 'https://api.scrape.do/?token=YOUR_TOKEN&url=https://httpbin.co/anything&super=true'
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
superParam = "true"
url = "http://api.scrape.do/?token={}&url={}&super={}".format(token, targetUrl,superParam)
response = requests.request("GET", url)
print(response.text)
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = encodeURIComponent("https://httpbin.co/anything");
const superParam = "true";
const config = {
    'method': 'GET',
    'url': `https://api.scrape.do/?token=${token}&url=${targetUrl}&super=${superParam}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do/?token=%s&url=%s&super=true", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
require "uri"
require "net/http"
require 'cgi'
str =  CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do/?url=" + str + "&token=YOUR_TOKEN&super=true")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do/?token=YOUR_TOKEN&url=" + encoded_url + "&super=true")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do/?token={token}&url={url}&super=true";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);

On this page