Scrape.do Documentation

You can find all the information you need to get started with Scrape.do in our documentation.

Quick Start

Scrape.do is a service that allows you to access raw data before the target website understands that you are sending bot traffic, bypassing the blocking problems you experience while scraping your target website, thanks to its rotating proxy, headless browser and captcha handling mechanisms. All the requests you send are upgraded and compared to the requests of a real user. The firewalls of the target websites have a hard time distinguishing these requests, and your success rate will thus exceed 99%.

With Scrape.do, you can use a datacenter, residential or mobile proxy from the region or country you want, manage the entire process, including headless browsers, according to the needs of the target website, and save you from spending your time on crawling processes.

  • Our service will return you raw data from the target web page. Therefore, if the target url returns you JSON, our service will return JSON, if it returns HTML, our service will return HTML.
  • Your API credits will only be used for successful requests. For more details, please visit the Status Codes and Request Costs area.
  • If you want to learn more about how Scrape.do works, you can take a look at our explanations in the How ​​it works? section and understand what it does technically.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/anything'
curl -k -x "http://YOUR_TOKEN:@proxy.scrape.do:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
url = "http://api.scrape.do?token={}&url={}".format(token, targetUrl)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:@proxy.scrape.do:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything"); 
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: ''
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var respponse = client.SendAsync(request).Result;
var content = respponse.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:@proxy.scrape.do:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url=" + encoded_url +"")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:@proxy.scrape.do:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:@proxy.scrape.do:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
str =  CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: ""
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

How it works?

Scrape.do is an advanced API service that bypasses anti-bot protection systems by performing advanced fingerprint manipulations through residential, mobile and datacenter rotating proxies and allows scraping the data you need on the target web page.

It simulates the web scraping request you send to the requests made by a real user, including the header information, starting from the TLS handshake process so that the target website is not detected by the firewall systems. You don’t have to deal with low-level bypass solutions and anti-bot systems.

Scrape.do is not just a proxy service. It works to protect you from the needs of target websites such as blocking, captcha resolution, headless browser.

It undertakes all the difficulties experienced by companies or people who need data while extracting data and allows you to do your main job. It is a system that we have designed to have a teammate who solves all the crawling stages on your behalf, like an employee from the team, for all problems in your scraping infrastructure.


Overview

You can view all the parameters of Scrape.do from the list below and have an overview of all of them quickly.

Parameter Type Default Description Details
token * string The token to use for authentication. More
url * string Target web page url. More
super bool false Use Residential & Mobile Proxy Networks More
geoCode string Choose right country for your target web page More
regionalGeoCode string Choose continent for your target web page More
sessionId int Use the same IP address continuously with using a session More
customHeaders bool false Handle all request headers for the target web page. More
extraHeaders bool false Use it if you want to change header values or if you want to add a new header over the ones we have added More
forwardHeaders bool false Allows you to forward your own headers to the target website More
setCookies string Set cookies for the target web page. More
disableRedirection bool false Disable request redirection for your use-case More
callback string Get results via a webhook without waiting for your requests More
timeout int 55000 Set maximum timeout for your requests More
retryTimeout int 15000 Set maximum timeout for our retry mechanism More
disableRetry bool false Disable retry mechanism for your use-case More
device string desktop It is the parameter that allows you to specify the device type. More
render bool false It is the parameter that allows you to specify the device type. More
waitUntil string domcontentloaded With this parameter, you can change this property and make the page load correctly according to your needs. More
customWait int 0 Sets the browser is waiting time on the target web page after content loaded More
waitSelector string CSS selector to wait for in the target web page More
width int 1920 Width, parameter in pixels of the browser More
height int 1080 Height, parameter in pixels of the browser More
blockResources bool true Block CSS and Image sources on your target web page More
screenShot bool false Return a screenshot from your target web page More
fullScreenShot bool false Return a full screenshot from your target web page More
particularScreenShot string Return a screenshot of a particular area from your target web page More
playWithBrowser string You can simulate browser actions like click, scroll, execute js etc. More
transparentResponse bool false Disable Scrape.do status code, body, error handling mechanism and return pure response from target web page for all requests More

Token

* required
token = string

All your requests are authorized with the token information of your account. Therefore, you need to own a token. If you do not have an account, you can immediately become a member here and obtain token information.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/anything'
curl -k -x "http://YOUR_TOKEN:@proxy.scrape.do:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
url = "http://api.scrape.do?token={}&url={}".format(token, targetUrl)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:@proxy.scrape.do:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything"); 
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: ''
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var respponse = client.SendAsync(request).Result;
var content = respponse.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:@proxy.scrape.do:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url=" + encoded_url +"")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:@proxy.scrape.do:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:@proxy.scrape.do:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
str =  CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: ""
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

URL

* required
& url = encoded target url

We are waiting for you to send the url information of the target website you want to scrape. If you are using API mode, you definitely need to encode the url parameter using the url encoder.

  • No need to encode url if you use Proxy Mode.
  • Supported protocols are HTTP and HTTPS

Why need the url encode?

When you write a url in the query parameter and do not use the url encode, it overflows the query param and therefore causes us to perceive it as a different parameter. In this scenario, we cannot correctly read the url of the target site. In order not to allow this, we check all parameters on your behalf and in such a case we return you 400 status code.

Example Code Snippet

sudo apt-get install gridsite-clients
urlencode "YOUR_URL"
import urllib.parse
encoded_url = urllib.parse.quote("YOUR_URL")
let encoded_url = encodeURIComponent("YOUR_URL")
var encoded_url = System.Web.HttpUtility.UrlEncode("YOUR_URL")
<?php
$url_encoded = urlencode("YOUR_URL");
?>
String encoded_url = URLEncoder.encode("YOUR_URL", "UTF-8");
package main

import (
	"fmt"
	"net/url"
)

func main() {
	encoded_url := url.QueryEscape("YOUR_URL")
	fmt.Println(string(encoded_url))
}
require 'cgi'
encoded_url =  CGI.escape str

Proxy Settings

Super (Residential & Mobile)

& super = true
default = false

Every proxy is actually an IP address. Each IP address has an ASN (Autonomous System Number) information. Anti bot solutions can block you based on your IP quality because they have this ASN information. It offers two different types of proxies on Scrape.do.

Datacenter: Inexpensive but likely to be blocked by advanced bot solutions. We have a total of over 90.000+ datacenter rotating proxy addresses.

Residential & Mobile: More expensive but less detectable by anti-bot services. More than 95.000.000+ proxies are hosted in our Residential & Mobile rotating proxy pool. To use it, it is sufficient to pass super=True parameter. By using the Geo Targeting feature, you can target the country the target website serves and increase your success rates even more.

  • If you do not use this feature, the system will use Datacenter Proxy by default.
  • With Residential & Mobile Proxy, each successful request consumes 10 Successful API credits. If Headless Browsers is used, each successful request will consume 25 Successful API credits.
  • If you do not use Geo Targeting when you use Residential & Mobile proxy, our system will send request via United States geoCode=us by default.
  • Our proxy pools are regularly checked, rotated and performance monitored. It can be customized specifically for the target website or for customer needs. We work hard to create the best quality and fastest proxies in the world.

Important: Super proxy infrastructure requires a minimum of Business Plan and above!

You can check credit usage for each request type in the Request Costs section.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/anything&super=true'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
superParam = "true"
url = "http://api.scrape.do?token={}&url={}&super={}".format(token, targetUrl,superParam)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var superParam = "true";
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&super=${superParam}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'super=true'
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&super=true";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";

var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "super=true")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
   "super" => "true"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url=" + encoded_url + "&super=true")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&super=true", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
str =  CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN&super=true")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "super=true"
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Geo Targeting

& geoCode = us

It may be considered unusual for some local websites to receive worldwide traffic. Therefore, they can ban traffic from other countries. By using geotargeting, you can counter potential blocking.

Allows you to send requests through supported countries using geotargeting. In this way, you can more easily scrape the targeted website by focusing on a specific country.

All you have to do is send a request by selecting a country like geoCode=us

To use this feature with DataCenter proxy, you must have a minimum Pro Plan subscription!

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?url=https://httpbin.co/anything&token=YOUR_TOKEN&geoCode=us'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
geoCode = "us"
url = "http://api.scrape.do?token={}&url={}&geoCode={}".format(token, targetUrl,geoCode)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var geoCode = "us";
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&geoCode=${geoCode}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'geoCode=us'
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&geoCode=us";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "geoCode=us")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result; 
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
   "geoCode" => "us"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?url=" + encoded_url + "&token=YOUR_TOKEN&geoCode=us")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&geoCode=us", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
str =  CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN&geoCode=us")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "geoCode=us"
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Supported Country Codes

If you are looking for a country code that is not listed above, you can contact us via [email protected]. We can add the country you need to our system.


Regional Geo Targeting

& regionalGeoCode = europe

It may be more efficient for you to scrape some websites by region. For example, scraping an e-commerce site serving in the European Union using any one of more than one European country can always help to keep your success rate higher.

This feature can only be used with Super Proxy!

You can access regionally with the regionalGeoCode=europe parameter.

Available Regions Region Code
Europe europe
Asia asia
Africa africa
Oceania oceania
North America northamerica
South America southamerica   

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?url=https://httpbin.co/anything&token=YOUR_TOKEN&super=true&regionalGeoCode=europe' \
curl -k -x "http://YOUR_TOKEN:super=true&[email protected]:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
superParam = "true"
regionalGeoCode = "europe"
url = "http://api.scrape.do?token={}&url={}&super={}&regionalGeoCode={}".format(token, targetUrl, superParam, regionalGeoCode)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:super=true&[email protected]:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var regionalGeoCode = "europe";
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&super=true&regionalGeoCode=${regionalGeoCode}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'super=true&regionalGeoCode=europe'
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&super=true&regionalGeoCode=europe";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "super=true&regionalGeoCode=europe")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
   "super" => "true",
   "regionalGeoCode" => "europe"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:super=true&[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?url=" + encoded_url + "&token=YOUR_TOKEN&super=true&regionalGeoCode=europe")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:super=true&[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_CODE"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&super=true&regionalGeoCode=europe", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:super=true&[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
str =  CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN&super=true&regionalGeoCode=europe")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "super=true&regionalGeoCode=europe"
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Sticky Sessions

& sessionId = 1234

When you want to use the same proxy address for a certain period of time, you can pass the sessionId=1234 parameter. It can be any integer value. Each unique integer value will be the ID of a session.

  • SessionId value must be between 0 and 1000000.
  • If no request is sent within 5 minutes, the session you created will be closed automatically.
  • If the request sent with SessionId fails, a new proxy address will be provided and the session will be changed.
  • If you need to use new proxy constantly, you don’t need to use session!
  • If used with Geo Targeting or Regional Geo Targeting, the session will be created only for the relevant country or region.
  • Sessions are created only for successful requests!

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/anything&sessionId=1234'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote( "https://httpbin.co/anything")
sessionId = "1234"
url = "http://api.scrape.do?token={}&url={}&sessionId={}".format(token, targetUrl,sessionId)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var sessionId = "1234"
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&sessionId=${sessionId}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'sessionId=1234'
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&sessionId=1234";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "sessionId=1234")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
   "sessionId" => "1234"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url=" + encoded_url + "&sessionId=1234")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&sessionId=1234", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
str =  CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN&sessionId=1234")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "sessionId=1234"
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Proxy Mode

What is Proxy Mode?

It may take time for companies or people currently working with a proxy provider to use the API system in our system. Therefore, you can easily switch from other proxy providers via Proxy Mode.

There is no difference between proxy mode and API mode other than the access method.

SSL verification with Proxy Mode must be disabled. Because we upgrade and forward your requests without sending them to the target site. This is how we prevent certificate issues from occurring when ssl verification is performed on the HTTPS protocol.

Proxy Information

protocol: http or https
host: proxy.scrape.do
port: 8080
username: YOUR_API_TOKEN
password: PARAMETER
Example format: http://token:[email protected]:8080

Replace PARAMETER with the parameters you want to use. If you don’t know what parameters are, you can being by using render=false parameter. You can pass all parameters(here) in documents as parameters.

Example format for using multiple parameters:

http://token:render=false&super=true&[email protected]:8080

Important Notes

  • Proxy Mode uses customHeaders=True by default. That’s why we recommend you to read the Custom Headers documents. If you want to disable it, just pass CustomHeaders=false.
  • You can use it by entering the same proxy information in places that accept HTTPS proxy.
  • If you are using your own browser automation via Proxy Mode, we do not recommend using Headless Browser features! (it would be more accurate to render=false)
  • Proxy Mode and API Mode use the same subscription package, there is no extra charge.

Example cURL

curl -k -x "http://YOUR_TOKEN:@proxy.scrape.do:8080" 'https://httpbin.co/anything' -v

Request

Scrape.do upgrades the requests you send so that they are not blocked by anti-bot solutions. In the meantime, there may be header, body or other information that you need to transmit to the target site. Or, if you know how the target website works, you can increase your success rates with the features here.

POST/PUT Request

You can change the method type for all the requests to send with Scrape.do. We support GET, POST, PUT, PATCH, HEAD and DELETE methods.

Example Code Snippet

curl --location --request POST 'https://api.scrape.do?token=YOUR_URL&url=https://httpbin.co/anything' \
--header 'Content-Type: application/json' \
--data-raw '{"test-key":"test-value"}'
curl -k -x "http://YOUR_TOKEN:@proxy.scrape.do:8080" 'https://httpbin.co/anything' -v  \
--header 'Content-Type: application/json' \
--data-raw '{"test-key":"test-value"}'
import requests
import json
import urllib.parse

token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
url = "http://api.scrape.do?token={}&url={}".format(token, targetUrl)
payload = json.dumps({
  "test-key": "test-value"
})
headers = {
  'Content-Type': 'application/json'
}
response = requests.request("POST", url, headers=headers, data=payload)
print(response.text)
import json
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:@proxy.scrape.do:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
payload = json.dumps({
  "test-key": "test-value"
})
headers = {
  'Content-Type': 'application/json'
}
response = requests.request("POST", url, proxies=proxies, verify=False, headers=headers, data=payload)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything"); 
var config = {
    'method': 'POST',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}`,
    'headers': {
        'Content-Type': 'application/json'
    },
    body: JSON.stringify({
        "test-key": "test-value"
    })
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"POST",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: ''
        }
    },
    headers: {
        'Content-Type': 'application/json'
    },
    body: JSON.stringify({
        "test-key": "test-value"
    })
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything") ;
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}";        
var request = new HttpRequestMessage(HttpMethod.Post, requestURL);
var body = @"{""test-key"":""test-value""}";
request.AddParameter("application/json", body,  ParameterType.RequestBody);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "")
};
var request = new HttpRequestMessage(HttpMethod.Post, url);
var body = @"{""test-key"":""test-value""}";
request.AddParameter("application/json", body,  ParameterType.RequestBody);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
$headers[] = 'Content-Type: application/json';
$body = [
    'test-key' => 'test-value',
  ];
$body = http_build_query($body);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_POSTFIELDS, $body);
curl_setopt($curl, CURLOPT_HTTPHEADER, $headers);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'POST');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
$headers[] = 'Content-Type: application/json';
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
curl_setopt($curl, CURLOPT_POSTFIELDS, '{"test-key":"test-value"}');
curl_setopt($curl, CURLOPT_HTTPHEADER, $headers);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:@proxy.scrape.do:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'POST');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("application/json");
RequestBody body = RequestBody.create(mediaType, "{\"test-key\":\"test-value\"}");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_URL&url=" + encoded_url + "")
  .method("POST", body)
  .addHeader("Content-Type", "application/json")
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        MediaType mediaType = MediaType.parse("application/json");
        RequestBody body = RequestBody.create(mediaType, "{\"test-key\":\"test-value\"}");
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.POST(url ,body)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
	"strings"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s", token, encoded_url)
	method := "POST"
	payload := strings.NewReader(`{"test-key":"test-value"}`)
	client := &http.Client{}
	req, err := http.NewRequest(method, url, payload)
	if err != nil {
		fmt.Println(err)
		return
	}
	req.Header.Add("Content-Type", "application/json")
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
	"strings"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:@proxy.scrape.do:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	payload := strings.NewReader(`{
		"test-key" : "test-value"		
	}`)
	req, err := http.NewRequest("POST", "https://httpbin.co/anything", payload)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	req.Header.Add("Content-Type", "application/json")
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
require 'json'
str = CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Post.new(url)
request.body = JSON.dump({"test-key": "test-value"})
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: ""
    })
    res["Content-Type"] = "application/json",
    res.body = JSON.dump({
      "test-key": "test-value"
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Custom Headers

& customHeaders = true
default = false

You can interfere with all headers sent to the target website via our service. Scrape.do sends User-Agent, Accept, Cookies, etc. from you. It takes all header parameters and forwards them. You must set customHeaders=True to use the feature.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/headers&customHeaders=True'
--header 'Test-Header-Key: TestValue'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/headers' -v
--header 'Test-Header-Key: TestValue'
import requests
import urllib.parse

token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/headers")
customHeaders = "true"
payload = {}
url = "http://api.scrape.do?token={}&url={}&customHeaders={}".format(token, targetUrl, customHeaders)
headers = {
  'Test-Header-Key': 'TestValue'
}
response = requests.request("GET", url, headers=headers, data=payload)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
url = "https://httpbin.co/headers"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
headers = {
  'Test-Header-Key': 'TestValue'
}
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False,headers = headers)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/headers");
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&customHeaders=true`,
    'headers': {
        'Test-Header-Key': 'TestValue',
    }
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'customHeaders=true'
        }
    },
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/headers");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&customHeaders=true";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
request.Headers.Add("Test-Header-Key", "TestValue");
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/headers";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "customHeaders=true")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
request.Headers.Add("Test-Header-Key", "TestValue");
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/headers",
   "token" => "YOUR_TOKEN",
   "customHeaders" => "true"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
    'Test-Header-Key: TestValue'
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/headers";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
    'Test-Header-Key: TestValue'
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/headers", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url="encoded_url"&customHeaders=True")
  .method("GET", body)
  .addHeader("Test-Header-Key", "TestValue")
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;

public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth).addHeader("Test-Header-Key", "TestValue")
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/headers")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&customHeaders=true", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	req.Header.Add("Test-Header-Key", "TestValue")
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/headers", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	req.Header.Add("Test-Header-Key", "TestValue")
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'

str = CGI.escape  "https://httpbin.co/headers"
url = URI("https://api.scrape.do?url=" + str  + "&token=YOUR_TOKEN&customHeaders=true")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
request['Test-Header-Key'] = 'TestValue'
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/headers', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "customHeaders=true"
    })
    res["Test-Header-Key"] = "TestValue"

    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

If you leave the User-Agent information blank in the header field of the requests sent via programming languages, it can be filled with a default value like ‘python/httpclient’. Therefore, we strongly recommend that you use a real User-Agent with this feature.

Result

{
    "headers": {
        "Accept": "*/*",
        "Accept-Encoding": "gzip, deflate, br",
        "Accept-Language": "en-US,en;q=0.9",
        "Authority": "httpbin.co",
        "Cache-Control": "no-cache,no-cache",
        "Host": "httpbin.co",
        "Referer": "https://httpbin.co",
        "Sec-Ch-Ua": "\"Google Chrome\";v=\"107\", \"Chromium\";v=\"107\", \"Not=A?Brand\";v=\"24\"",
        "Sec-Ch-Ua-Mobile": "?0",
        "Sec-Ch-Ua-Platform": "\"macOS\"",
        "Sec-Fetch-Dest": "document",
        "Sec-Fetch-Mode": "navigate",
        "Sec-Fetch-Site": "none",
        "Sec-Fetch-User": "?1",
        "Test-Header-Key": "TestValue",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": "PostmanRuntime/7.29.2",
        "X-Amzn-Trace-Id": "Root=1-63748965-50104cf629bc898d03188f57"
    }
}

Extra Headers

& extraHeaders = true
default = false

It is a feature that you can use only when there is one or more header parameters that you want to use for the needs of the target website, without breaking the requests we have created for the target website.

After setting the query parameter as extraHeaders=true, the header values ​​you will send with the Sd-** prefix will be sent to the target website.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/headers&extraHeaders=true' \
--header 'sd-Test: TestValue'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/headers' -v
--header 'sd-Test: TestValue'
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/headers")
extraHeaders = "true"
payload = {}
url = "http://api.scrape.do?token={}&url={}&extraHeaders={}".format(token, targetUrl, extraHeaders)
headers = {
  'sd-Test': 'TestValue'
}
response = requests.request("GET", url, headers=headers, data=payload)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/headers"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
headers = {
  'sd-Test': 'TestValue'
}
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/headers");
var extraHeaders = "true"
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&extraHeaders=${extraHeaders}`,
    'headers': {
        'sd-Test': 'TestValue',
    }
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'extraHeaders=true'
        }
    },
    headers: {
        'sd-Test': 'TestValue',
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/headers");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&extraHeaders=true";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
request.Headers.Add("sd-Test", "TestValue");
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/headers";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "extraHeaders=true")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
request.Headers.Add("sd-Test", "TestValue");
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/headers",
   "token" => "YOUR_TOKEN",
   "extraHeaders" => "true"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
    'sd-Test: TestValue'
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/headers";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
    'sd-Test: TestValue'
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/headers", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url=" + encoded_url + "&extraHeaders=true")
  .method("GET", body)
  .addHeader("sd-Test", "TestValue")
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/headers")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&extraHeaders=true", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	req.Header.Add("sd-Test", "TestValue")
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/headers", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	req.Header.Add("sd-Test", "TestValue")
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'

str =  CGI.escape "https://httpbin.co/headers"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN&extraHeaders=true")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
request["sd-Test"] = "TestValue"
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/headers', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "extraHeaders=true"
    })
    res["sd-Test"] = "TestValue"

    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Example Result

{
    "headers": {
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
        "Accept-Encoding": "gzip, deflate, br",
        "Accept-Language": "en-US,en;q=0.9",
        "Authority": "httpbin.co",
        "Cache-Control": "no-cache",
        "Host": "httpbin.co",
        "Referer": "https://httpbin.co",
        "Sec-Ch-Ua": "\"Google Chrome\";v=\"107\", \"Chromium\";v=\"107\", \"Not=A?Brand\";v=\"24\"",
        "Sec-Ch-Ua-Mobile": "?0",
        "Sec-Ch-Ua-Platform": "\"macOS\"",
        "Sec-Fetch-Dest": "document",
        "Sec-Fetch-Mode": "navigate",
        "Sec-Fetch-Site": "none",
        "Sec-Fetch-User": "?1",
        "Test": "TestValue",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36",
        "X-Amzn-Trace-Id": "Root=1-63748c46-2e20cdb45d2d89455b4e4fd0"
    }
}

Forward Headers

& forwardHeaders = boolean
default = false

You may need to forward your own headers to the target web page you want to scrape without auto generated headers. In this case you can use forwardHeaders=True query parameter.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/headers&forwardHeaders=True'
--header 'Test-Header-Key: TestValue'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/headers' -v
--header 'Test-Header-Key: TestValue'
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
render = "true"
url = "http://api.scrape.do?token={}&url={}&render={}".format(token, targetUrl,render)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

url = "https://httpbin.co/headers"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
headers = {
  'Test-Header-Key': 'TestValue'
}
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False,headers = headers)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/headers");
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&forwardHeaders=true`,
    'headers': {
        'Test-Header-Key': 'TestValue',
    }
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'forwardHeaders=true'
        }
    },
    headers: {
        'Test-Header-Key': 'TestValue',
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/headers");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&forwardHeaders=true";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
request.Headers.Add("Test-Header-Key", "TestValue");
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "ada2f36f657e4bcf8470227cd8d4c639d549aa97127";
string url = "https://httpbin.co/headers";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "forwardHeaders=true")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
request.Headers.Add("Test-Header-Key", "TestValue");
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/headers",
   "token" => "YOUR_TOKEN",
   "forwardHeaders" => "true"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
    'Test-Header-Key: TestValue'
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/headers";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
    'Test-Header-Key: TestValue'
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/headers", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url="encoded_url"&forwardHeaders=True")
  .method("GET", body)
  .addHeader("Test-Header-Key", "TestValue")
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;

import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;

public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth).addHeader("Test-Header-Key", "TestValue")
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();

        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/headers")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&forwardHeaders=true", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	req.Header.Add("Test-Header-Key", "TestValue")
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/headers", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	req.Header.Add("Test-Header-Key", "TestValue")
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/headers', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "forwardHeaders=true"
    })
    res["Test-Header-Key"] = "TestValue"

    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/headers', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "forwardHeaders=true"
    })
    res["Test-Header-Key"] = "TestValue"

    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Example Result

{
  "headers": {
    "Accept": "*/*",
    "Accept-Encoding": "gzip, deflate, br",
    "Host": "httpbin.co",
    "Test-Header-Key": "TestValue",
    "User-Agent": "curl/7.85.0",
    "X-Amzn-Trace-Id": "Root=1-63f5d502-4fa0a57019562b1b55fed4f6"
  }
}

Set Cookies

& setCookies = url encoded cookies
default = false

You can pass cookies to the target website by encoding setCookies=cookie1=value1;cookie2=value2; and our system will send the cookies you have forwarded to the target website.

You don’t need to use extraHeaders or customHeaders parameters to send cookies to the target website. You can use the setCookies parameter to send cookies to the target website.

You should use url encode for your cookie values. For example; setCookies=cookie1%3Dvalue1%3B. You can see more details here.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?url=https://httpbin.co/anything&token=YOUR_TOKEN&setCookies=cookie1%3Dvalue1%3Bcookie2%3Dvalue2%3Bcookie3%3Dvalue3%3B'
curl -k -x "http://YOUR_TOKEN:setCookies=cookie1%3Dvalue1%3Bcookie2%3Dvalue2%3Bcookie3%3Dvalue3%3B;@proxy.scrape.do:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
encodedCookies = urllib.parse.quote("cookie1=value1;cookie=value2;cookie3=value3;")
targetUrl = urllib.parse.quote("https://httpbin.co/headers")
url = "http://api.scrape.do?token={}&url={}&setCookies={}".format(token, targetUrl,encodedCookies)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
import urllib.parse
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
encodedCookies = urllib.parse.quote("cookie1=value1;cookie=value2;cookie3=value3;")
proxyModeUrl = "http://{}:setCookies={}@proxy.scrape.do:8080".format(token,encodedCookies)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var cookies = "cookie1=value1;cookie=value2;cookie3=value3;";
var encodedCookies = encodeURIComponent(cookies);
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&setCookies=${encodedCookies}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";
var cookies = "cookie1=value1;cookie=value2;cookie3=value3;";
var encodedCookies = encodeURIComponent(cookies);

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'setCookies=' + encodedCookies
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
string cookies = "cookie1=value1;cookie=value2;cookie3=value3;";
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&setCookies={WebUtility.UrlEncode(cookies)}";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
string encodedCookies = WebUtility.UrlEncode("cookie1=value1;cookie=value2;cookie3=value3;");
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, $"setCookies={encodedCookies}")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
$token = "YOUR_TOKEN";
$encodedUrl = urlencode("https://httpbin.co/anything");
$encodedCookies = urlencode('cookie1=value1;cookie=value2;cookie3=value3;');
$url = "https://api.scrape.do?token={$token}&url={$encodedUrl}&setCookies={$encodedCookies}";
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, $url);
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$encodedCookies = "setCookies=cookie1%3Dvalue1%3Bcookie%3Dvalue2%3Bcookie3%3Dvalue3%3B";
$proxy = sprintf("http://%s:%[email protected]:8080", $token,$encodedCookies);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
String cookies = URLEncoder.encode("cookie1=value1;cookie=value2;cookie3=value3;", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?url=" + encoded_url + "&token=YOUR_TOKEN&setCookies=" + cookies +"")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:setCookies=cookie1=value1;cookie=value2;cookie3=value3;@proxy.scrape.do:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	cookies := url.QueryEscape("cookie1=value1;cookie=value2;cookie3=value3;")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&setCookies=%s", token, encoded_url, cookies)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()

	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	cookies := url.QueryEscape("cookie1=value1;cookie=value2;cookie3=value3;")
	proxyStr := fmt.Sprintf("http://%s:setCookies=%[email protected]:8080", token, cookies)
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
str = CGI.escape "https://httpbin.co/anything"
cookies = CGI.escape "cookie1=value1;cookie=value2;cookie3=value3;"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN&setCookies="+cookies+"")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
require 'cgi'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
  cookies = CGI.escape "cookie1=value1;cookie=value2;cookie3=value3;"
  res = HTTParty.get('https://httpbin.co/anything', {
    http_proxyaddr: "proxy.scrape.do",
    http_proxyport: "8080",
    http_proxyuser: "YOUR_TOKEN",
    http_proxypass: "setCookies="+cookies+""
  })
  puts "Response HTTP Status Code: #{ res.code }"
  puts "Response HTTP Response Body: #{ res.body }"
  puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
  puts "HTTP Request failed (#{ e.message })"
end
send_request()

Example Result

{
    "cookies": {
        "cookie1": "value1",
        "cookie2": "value2",
        "cookie3": "value3"
    }
}

Disable Redirection

& disableRedirection = boolean
default = false

Scrape.do by default follow redirected target web pages and returns results from where it redirected. It has a maximum of 10 redirections. If you reach the limit, the system will stop redirection and return a response with the last status.

In some cases, you may not want to use the redirection mechanism. By passing the disableRedirection=true parameter, you can prevent default redirection. 

When you use this feature, you can find the url information that the target website wants to redirect you to in the Scrape.do-Target-Redirected-Location header.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_URL&url=https://httpbin.co/anything&disableRedirection=true'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
disabledRedirection = "true"
url = "http://api.scrape.do?token={}&url={}&disableRedirection={}".format(token, targetUrl, disabledRedirection)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var disableRedirection = "true";
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&disableRedirection=${disableRedirection}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'disableRedirection=true'
        }
    },
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&disableRedirection=true";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "disableRedirection=true")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
   "disableRedirection" => "true"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
.build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
.url("https://api.scrape.do?token=YOUR_URL&url=" + encoded_url + "&disableRedirection=true")
.method("GET", body)
.build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&disableRedirection=true", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'

str = CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN&disableRedirection=true")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "disableRedirection=true"
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Callback & Webhook

& callback = url encoded webhook address

With this feature, you don’t have to wait for the request results. You can send us a webhook address by encoding callback=http://yourdomain.com/webhook and our system will wait for the result of the request for you. When the result is received, it returns your request via the webhook you have forwarded.

Important Notes

  • You must return 200 or 201 status code to us via webhook.
  • If we can’t send you the result by webhook correctly, we will send you a request again, up to a maximum of 5 times every 2 minutes.
  • Our system will send you the result as POST.
  • The url you transmit with callback should be encoded. For more details, you can go to the url field and see how to encode it.
  • You can make callback requests up to your maximum concurrency limit at the same time. For example, if you have a limit of 40 simultaneous requests, you cannot expect 41 callback calls. Our system will return you a maximum of 40 results at the same time.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?url=https://httpbin.co/anything&token=YOUR_TOKEN&callback=https://webhook.site/bc3ba9a9-7df9-421e-b991-6fd38065bb5c'
curl -k -x "http://YOUR_TOKEN:callback=https://mywebsite.com/[email protected]:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse

token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
callbackUrl = urllib.parse.quote("https://mywebsite.com/webhook")
url = "http://api.scrape.do?token={}&url={}&callback={}".format(token, targetUrl, callbackUrl)
response = requests.request("POST", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:callback=https://mywebsite.com/[email protected]:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("POST", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var callbackUrl = encodeURIComponent("https://mywebsite.com/webhook");
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&callback=${callbackUrl}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";
var callback = 'https://mywebsite.com/webhook';
var encodedcallback = encodeURIComponent(callback);

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'callback=' + encodedcallback
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url =WebUtility.UrlEncode("https://httpbin.co/anything") ;
string callbackWebhookURL = WebUtility.UrlEncode("https://mywebsite.com/webhook");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&callback={callbackWebhookURL}";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "callback=https://mywebsite.com/webhook")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
   "callback" => "https://mywebsite.com/webhook"

];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:callback=https://mywebsite.com/[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
String encoded_callbackUrl = URLEncoder.encode("https://mywebsite.com/webhook", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?url=" + encoded_url +"&callback="+ encoded_callbackUrl+"")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;

public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:callback=https://mywebsite.com/[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	encoded_callbackUrl := url.QueryEscape("https://mywebsite.com/webhook")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&callback=%s", token, encoded_url, encoded_callbackUrl)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:callback=https://mywebsite.com/[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)

	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'

str =  CGI.escape "https://httpbin.co/anything"
strWebhook = CGI.escape "https://mywebsite.com/webhook"
url = URI("https://api.scrape.do?token=YOUR_TOKEN&url="+ str +"&callback="+strWebhook+"")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "callback=https://mywebsite.com/webhook"
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Timeout

& timeout = integer
default = 55000

You can set the timeout value for the request you will send to the target website.

The value it can take must be between 5000 ms and 120000 ms. If the time is exceeded, system will return you that the request failed.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/anything&timeout=5000'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
timeout = "5000"
url = "http://api.scrape.do?token={}&url={}&timeout={}".format(token, targetUrl,timeout)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var timeout = "5000";
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&timeout=${timeout}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'timeout=5000'
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&timeout=5000";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var respponse = client.SendAsync(request).Result;
var content = respponse.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "timeout=5000")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
   "timeout" => "5000"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url="encoded_url"&timeout=5000")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&timeout=5000", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
str = CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN&timeout=5000")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "timeout=5000"
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Retry Timeout

& retryTimeout = integer
default = 15000

Our system determines the maximum waiting time with this parameter while waiting for results from the target website. When the time is exceeded, it sends a new request and checks it cyclically.

You can change this parameter, which is 15000 ms by default. The values it can take can be between 5000ms and 55000ms.

Important: Using this parameter can affect your success rates.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/anything&retryTimeout=5000'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
timeout = "5000"
url = "http://api.scrape.do?token={}&url={}&retryTimeout={}".format(token, targetUrl, timeout)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var timeout = "5000";
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&retryTimeout=${timeout}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'retryTimeout=5000'
        }
    }
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&retryTimeout=5000";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "retryTimeout=5000")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
   "retryTimeout" => "5000"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url=" + encoded_url + "&retryTimeout=5000")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encodedUrl := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do?token=%s&url=%s&retryTimeout=5000", token, encodedUrl)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
package main

import (
	"crypto/tls"
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	proxyStr := "http://YOUR_TOKEN:[email protected]:8080"
	proxyURL, err := url.Parse(proxyStr)
	if err != nil {
		fmt.Println(err)
	}
	transport := &http.Transport{
		Proxy:           http.ProxyURL(proxyURL),
		TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
	}
	client := &http.Client{
		Transport: transport,
	}
	req, err := http.NewRequest("GET", "https://httpbin.co/anything", nil)
	parseFormErr := req.ParseForm()
	if parseFormErr != nil {
		fmt.Println(parseFormErr)
	}
	resp, err := client.Do(req)
	if err != nil {
		fmt.Println("Failure : ", err)
	}
	respBody, _ := ioutil.ReadAll(resp.Body)
	fmt.Println("response Status : ", resp.Status)
	fmt.Println("response Headers : ", resp.Header)
	fmt.Println("response Body : ", string(respBody))
}
require "uri"
require "net/http"
require 'cgi'
str = CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do?url=" + str + "&token=YOUR_TOKEN&retryTimeout=5000")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
require 'httparty'
HTTParty::Basement.default_options.update(verify: false)
# Classic (GET )
def send_request
    res = HTTParty.get('https://httpbin.co/anything?json', {
      http_proxyaddr: "proxy.scrape.do",
      http_proxyport: "8080",
      http_proxyuser: "YOUR_TOKEN",
      http_proxypass: "retryTimeout=5000"
    })
    puts "Response HTTP Status Code: #{ res.code }"
    puts "Response HTTP Response Body: #{ res.body }"
    puts "Response HTTP Response Body: #{ res.header }"
rescue StandardError => e
    puts "HTTP Request failed (#{ e.message })"
end
send_request()

Disable Retry

& disableRetry = boolean
default = false

When you use this parameter, our system will not retry the request if it fails.

Regardless of the initial response status, it will return you the result as successful or unsuccessful.

Example Code Snippet

curl --location --request GET 'https://api.scrape.do?token=YOUR_TOKEN&url=https://httpbin.co/anything&disableRetry=true'
curl -k -x "http://YOUR_TOKEN:[email protected]:8080" 'https://httpbin.co/anything' -v
import requests
import urllib.parse

token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
disableRetry = "true"
url = "http://api.scrape.do?token={}&url={}&disableRetry={}".format(token, targetUrl,disableRetry)
response = requests.request("GET", url)
print(response.text)
import requests
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) 
url = "https://httpbin.co/anything"
token = "YOUR_TOKEN"
proxyModeUrl = "http://{}:[email protected]:8080".format(token)
proxies = {
    "http": proxyModeUrl,
    "https": proxyModeUrl,
}
response = requests.request("GET", url, proxies=proxies, verify=False)
print(response.text)
var axios = require('axios');
var token = "YOUR_TOKEN";
var targetUrl = encodeURIComponent("https://httpbin.co/anything");
var disableRetry = "true";
var config = {
    'method': 'GET',
    'url': `https://api.scrape.do?token=${token}&url=${targetUrl}&disableRetry=${disableRetry}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything";

axios({
    method:"GET",
    url:targetUrl,
    proxy: {
        protocol:'http',
        host: 'proxy.scrape.do',
        port: 8080,
        auth: {
            username: token,
            password: 'disableRetry=true'
        }
    },
})
    .then(response => {
        console.log(response.data);
    })
    .catch(error => {
        console.error(error.message);
    });
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do?token={token}&url={url}&disableRetry=true";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var proxy = new WebProxy
{
    Address = new Uri("http://proxy.scrape.do:8080"),
    Credentials = new NetworkCredential(token, "disableRetry=true")
};
var request = new HttpRequestMessage(HttpMethod.Get, url);
var handler = new HttpClientHandler
{
    Proxy = proxy,
    UseProxy = true
};
handler.ServerCertificateCustomValidationCallback =
    (sender, cert, chain, sslPolicyErrors) => { return true; };
var client = new HttpClient(handler);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
$data = [
   "url" => "https://httpbin.co/anything",
   "token" => "YOUR_TOKEN",
   "disableRetry" => "true"
];
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_URL, "https://api.scrape.do?".http_build_query($data));
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
<?php
$curl = curl_init();
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_SSL_VERIFYHOST, 0);
curl_setopt($curl, CURLOPT_SSL_VERIFYPEER, 0);
$url = "https://httpbin.co/anything";
$token = "YOUR_TOKEN";
$proxy = sprintf("http://%s:[email protected]:8080", $token);
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, 'GET');
curl_setopt($curl, CURLOPT_PROXY, $proxy);
curl_setopt($curl, CURLOPT_HTTPHEADER, array(
    "Accept: */*",
));
$response = curl_exec($curl);
curl_close($curl);
echo $response;
?>
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do?token=YOUR_TOKEN&url=" + encoded_url +"&disableRetry=true")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
import java.net.URI;
import java.util.Base64;
import org.apache.hc.client5.http.fluent.Request;
import org.apache.hc.core5.http.HttpHost;
public class TestRequest {
    public static void main(final String... args) throws Exception {
        String url = "https://httpbin.co/anything";
        URI proxyURI = new URI("http://YOUR_TOKEN:[email protected]:8080");
        String basicAuth = new String(
            Base64.getEncoder()
            .encode(
                proxyURI.getUserInfo().getBytes()
            ));
        String response = Request.get(url)
                .addHeader("Proxy-Authorization", "Basic " + basicAuth)
                .viaProxy(HttpHost.create(proxyURI))
                .execute().returnContent().asString();
        System.out.println(response);
    }
}
package main

import (
	"fmt"
	"io/ioutil"