logo

Block Resources

Optimize performance by blocking CSS and images

By default, this feature is enabled so that results are loaded faster and returned to you more quickly when you navigate to a website through the headless browser. CSS files, images, and fonts are blocked to improve performance and reduce bandwidth usage.

If you want to turn this feature off, simply pass the blockResources=false parameter.

This feature may affect success rates for some target websites. Therefore, setting false on problematic web pages may increase your success rate if the site requires images or CSS to load properly.

curl --location --request GET 'https://api.scrape.do/?url=https://httpbin.co/anything&render=True&token=YOUR_TOKEN&blockResources=false'
import requests
import urllib.parse

token = "YOUR_TOKEN"
targetUrl = urllib.parse.quote("https://httpbin.co/anything")
render = "true"
blockResources = "false"
url = "http://api.scrape.do/?token={}&url={}&render={}&blockResources={}".format(token, targetUrl,render,blockResources)
response = requests.request("GET", url)
print(response.text)
const axios = require('axios');
const token = "YOUR_TOKEN";
const targetUrl = encodeURIComponent("https://httpbin.co/anything")
const render = "true";
const blockResources = "false";
const config = {
    'method': 'GET',
    'url': `https://api.scrape.do/?token=${token}&url=${targetUrl}&render=${render}&blockResources=${blockResources}`,
    'headers': {}
};
axios(config)
    .then(function (response) {
        console.log(response.data);
    })
    .catch(function (error) {
        console.log(error);
    });
package main

import (
	"fmt"
	"io/ioutil"
	"net/http"
	"net/url"
)

func main() {
	token := "YOUR_TOKEN"
	encodedUrl := url.QueryEscape("https://httpbin.co/anything")
	url := fmt.Sprintf("https://api.scrape.do/?token=%s&url=%s&blockResources=false&render=true", token, encodedUrl)
	method := "GET"
	client := &http.Client{}
	req, err := http.NewRequest(method, url, nil)
	if err != nil {
		fmt.Println(err)
		return
	}
	res, err := client.Do(req)
	if err != nil {
		fmt.Println(err)
		return
	}
	defer res.Body.Close()

	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		fmt.Println(err)
		return
	}
	fmt.Println(string(body))
}
require "uri"
require "net/http"
require 'cgi'

str = CGI.escape "https://httpbin.co/anything"
url = URI("https://api.scrape.do/?url=" + str + "&render=True&token=YOUR_TOKEN&blockResources=false")
https = Net::HTTP.new(url.host, url.port)
https.use_ssl = true
request = Net::HTTP::Get.new(url)
response = https.request(request)
puts response.read_body
OkHttpClient client = new OkHttpClient().newBuilder()
  .build();
MediaType mediaType = MediaType.parse("text/plain");
RequestBody body = RequestBody.create(mediaType, "");
String encoded_url = URLEncoder.encode("https://httpbin.co/anything", "UTF-8");
Request request = new Request.Builder()
  .url("https://api.scrape.do/?url=" + encoded_url +"&render=true&token=YOUR_TOKEN&blockResources=false")
  .method("GET", body)
  .build();
Response response = client.newCall(request).execute();
string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("https://httpbin.co/anything");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do/?token={token}&url={url}&render=True&blockResources=false";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
var content = response.Content.ReadAsStringAsync().Result;
Console.WriteLine(content);