Sticky Sessions
Session based scraping
When you want to use the same proxy address for a certain period of time, you can pass the sessionId=1234 parameter. It can be any integer value. Each unique integer value will be the ID of a session.
- SessionId value must be between 0 and 1000000.
- If no request is sent within 5 minutes, the session you created will be closed automatically.
- If the request sent with SessionId fails, a new proxy address will be provided and the session will be changed.
- If you need to use new proxy constantly, you don't need to use session!
- If used with Geo Targeting or Regional Geo Targeting, the session will be created only for the relevant country or region.
- Sessions are created only for successful requests!
Example:
curl --location --request GET 'https://api.scrape.do/?token=YOUR_TOKEN&url=https://httpbin.co/anything&sessionId=1234'import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote( "https://httpbin.co/anything")
sessionId = "1234"
url = "http://api.scrape.do/?token={}&url={}&sessionId={}".format(token, targetUrl,sessionId)
response = requests.request("GET", url)
print(response.text)const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = encodeURIComponent("https://httpbin.co/anything");
const sessionId = "1234"
const config = {
'method': 'GET',
'url': `https://api.scrape.do/?token=${token}&url=${targetUrl}&sessionId=${sessionId}`,
'headers': {}
};
axios(config)
.then(function (response) {
console.log(response.data);
})
.catch(function (error) {
console.log(error);
});package main
import (
"fmt"
"io/ioutil"
"net/http"
"net/url"
)
func main() {
token := "YOUR_TOKEN"
encoded_url := url.QueryEscape("https://httpbin.co/anything")
url := fmt.Sprintf("https://api.scrape.do/?token=%s&url=%s&sessionId=1234", token, encoded_url)
method := "GET"
client := &http.Client{}
req, err := http.NewRequest(method, url, nil)
if err != nil {
fmt.Println(err)
return
}
res, err := client.Do(req)
if err != nil {
fmt.Println(err)
return
}
defer res.Body.Close()
body, err := ioutil.ReadAll(res.Body)
if err != nil {
fmt.Println(err)
return
}
fmt.Println(string(body))
}require "uri"
require "net/http"
require 'cgi'
str = CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do/?url=" + str + "&token=YOUR_TOKEN&sessionId=1234")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_bodyOkHttpClient client = new OkHttpClient().newBuilder()
.build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
.url("https://api.scrape.do/?token=YOUR_TOKEN&url=" + encoded_url + "&sessionId=1234")
.method("GET", body)
.build();
Response response = client.newCall(request).execute();string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do/?token={token}&url={url}&sessionId=1234";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);
