logo

Wait

Configure wait times and page load behavior

Control when the headless browser considers a page fully loaded and ready for data extraction. These parameters help you handle dynamic content, AJAX requests, and ensure all necessary elements have rendered before scraping.

Wait Until

Our headless browser system uses waitUntil=domcontentloaded during navigation by default. However, in some cases, you may need to change it. With this parameter, you can adjust this behavior and ensure that the page loads correctly according to your needs.

The waitUntil parameter accepts the following values: domcontentloaded, networkidle0, networkidle2, and load:

  • domcontentloaded waits for the DOMContentLoaded event to fire, which occurs when the initial HTML document has been completely loaded and parsed.
  • networkidle0 waits until there are no more than 0 network connections for at least 500 ms.
  • networkidle2 waits until there are no more than 2 network connections for at least 500 ms.
  • load waits for the window.load event to fire, which occurs when all page resources (including images, stylesheets, and scripts) have finished loading. This option is particularly useful for ensuring that scroll-triggered images and other dynamic content are fully loaded.
curl --location --request GET 'https://api.scrape.do/?token=YOUR_TOKEN&url=https://httpbin.co/anything&waitUntil=domcontentloaded&render=true'
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl = "https://httpbin.co/anything"
opts = "domcontentloaded"
encoded_url = urllib.parse.quote(targetUrl)
url = "http://api.scrape.do/?token={}&url={}&waitUntil={}&render=true".format(token, encoded_url,opts)
response = requests.request("GET", url)
print(response.text)
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = "https://httpbin.co/anything"
const encodedUrl = encodeURIComponent(targetUrl);
const opts = "domcontentloaded";
const config = {
    'method': 'GET',
    'url': `https://api.scrape.do/?token=${token}&url=${encodedUrl}&waitUntil=${opts}&render=true`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do/?token=%s&url=%s&waitUntil=domcontentloaded&render=true", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
require "uri"
require "net/http"
require 'cgi'
str = "https://httpbin.co/anything"
encoded_url =  CGI.escape str
url = URI("https://api.scrape.do/?url=" + encoded_url + "&token=YOUR_TOKEN&waitUntil=domcontentloaded&render=true")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do/?token=YOUR_TOKEN&url="encoded_url"&waitUntil=domcontentloaded&render=true")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
string token = "YOUR_TOKEN";
string url = "https://httpbin.co/anything";
var client = new HttpClient();
var requestURL = $"https://api.scrape.do/?token={token}&url={WebUtility.UrlEncode(url)}&waitUntil=domcontentloaded&render=true";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var respponse = client.SendAsync(request).Result;
var content = respponse.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);

Custom Wait

With the JS Render infrastructure, you may want to wait for a certain period of time to ensure that all content is loaded correctly on the web page you're accessing. This parameter accepts values between 0 and 35000 milliseconds.

curl --location --request GET 'https://api.scrape.do/?token=YOUR_TOKEN&url=https://httpbin.co/anything&customWait=1000&render=true'
import requests
import urllib.parse

token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
customWait = "1000"
render = "true"
url = "http://api.scrape.do/?token={}&url={}&customWait={}&render={}".format(token, targetUrl,customWait,render)
response = requests.request("GET", url)
print(response.text)
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = encodeURIComponent("https://httpbin.co/anything");
const customWait = "1000";
const render = true;
const config = {
    'method': 'GET',
    'url': `https://api.scrape.do/?token=${token}&url=${targetUrl}&customWait=${customWait}&render=${render}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do/?token=%s&url=%s&customWait=1000&render=true", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
require "uri"
require "net/http"
require 'cgi'

str = CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do/?url=" + str + "&token=YOUR_TOKEN&customWait=1000&render=true")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do/?token=YOUR_TOKEN&url=" + encoded_url + "&customWait=1000&render=true")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do/?token={token}&url={url}&render=true&customWait=1000";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);

Wait CSS Selector

With the JS Render infrastructure, the data you expect on the web page can be returned via specific elements. We can wait for these elements to load using the waitSelector parameter and return the result once the relevant element is loaded.

For example, you can wait for an element using waitSelector=.element or waitSelector=#element-id. The system waits for up to 10 seconds for the specified element to appear in the content. If the element does not appear, the response will be returned in its raw form.

Important: You need to URL-encode the waitSelector value!

Target Data in Browser

<div class="element" id="element-id">
... content
</div>
curl --location --request GET 'https://api.scrape.do/?url=https://httpbin.co/anything&render=True&token=YOUR_TOKEN&waitSelector=.class_name'
import requests
import urllib.parse
token = "YOUR_TOKEN"
targetUrl =  urllib.parse.quote("https://httpbin.co/anything")
render = "true"
waitSelector = ".class_name"
url = "http://api.scrape.do/?token={}&url={}&render={}&waitSelector={}".format(token, targetUrl, render, waitSelector)
response = requests.request("GET", url)
print(response.text)
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = encodeURIComponent("https://httpbin.co/anything");
const render = "true";
const waitSelector = ".class_name";
const config = {
    'method': 'GET',
    'url': `https://api.scrape.do/?token=${token}&url=${targetUrl}&render=${render}&waitSelector=${waitSelector}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encoded_url := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do/?token=%s&url=%s&render=true&waitSelector=.class_name", token, encoded_url)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
require "uri"
require "net/http"
require 'cgi'
str = CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do/?url=" + str + "&token=YOUR_TOKEN&render=True&waitSelector=.class_name")
https = Net::HTTP.new(url.host, url.port)https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do/?url=" + encoded_url + "&render=true&token=YOUR_TOKEN&waitSelector=.class_name")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
string waitSelector = ".class_name"
var client = new HttpClient();
var requestURL = $"https://api.scrape.do/?token={token}&url={url}&render=true&waitSelector={waitSelector}";        
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var respponse = client.SendAsync(request).Result;
var content = respponse.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);