Screenshot
Capture visual snapshots of web pages
Scrape.do can capture visual representations of target web pages, allowing you to verify page rendering, monitor visual changes, or archive page layouts. Screenshots are returned as base64-encoded strings in the API response, making them easy to save or process programmatically.
When you use any screenshot feature, our infrastructure automatically operates with blockResources=false and render=true to ensure contents are loaded completely and correctly.
Normal Screenshot
If you need to see what the target web page looks like, it's easy to do with Scrape.do. You can take a screenshot and download it simply by sending the screenShot=true parameter.
The normal screenshot captures whatever is visible within the current viewport or display resolution (default 1920x1080). This means only the "above the fold" content will be captured - similar to what a user sees when first loading the page before scrolling.
curl --location --request GET 'https://api.scrape.do/?token=YOUR_TOKEN&screenShot=true&render=true&returnJSON=true&url=http://example.com/' \import base64
import json
import requests
import urllib.parse
token = "YOUR_TOKEN"
raw_url = "http://example.com/"
url = urllib.parse.quote(raw_url)
req_url = f"https://api.scrape.do/?token={token}&url={url}&screenShot=true&render=true&returnJSON=true"
resp = requests.get(req_url)
if resp.status_code != requests.codes.ok:
resp.raise_for_status()
json_map = json.loads(resp.text)
image_b64 = json_map["screenShots"][0]["image"]
image_bytes = base64.b64decode(image_b64)
file_path = "Example.png"
with open(file_path, "wb") as file:
file.write(image_bytes)const axios = require('axios');
const fs = require('fs');
const token = 'YOUR_TOKEN';
const url = encodeURIComponent('http://example.com/');
const requestURL = `https://api.scrape.do/?token=${token}&url=${url}&screenShot=true&render=true&returnJSON=true`;
axios.get(requestURL)
.then(response => {
if (response.status === 200) {
const content = response.data;
const imageB64 = content.screenShots[0].image;
const filePath = 'Example.png';
fs.writeFile(filePath, Buffer.from(imageB64, 'base64'), err => {
if (err) {
console.error(err);
}
});
} else {
console.log(response.status);
}
})
.catch(error => {
console.error(error);
});package main
import (
"encoding/base64"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
)
func main() {
token := "YOUR_TOKEN"
rawURL := "http://example.com/"
url := url.QueryEscape(rawURL)
reqURL := fmt.Sprintf("https://api.scrape.do/?token=%s&url=%s&screenShot=true&render=true&returnJSON=true", token, url)
resp, err := http.Get(reqURL)
if err != nil {
panic(err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
panic(resp.Status)
}
bytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
panic(err)
}
jsonMap := make(map[string]interface{})
err = json.Unmarshal(bytes, &jsonMap)
if err != nil {
panic(err)
}
imageB64 := jsonMap["screenShots"].([]interface{})[0].(map[string]interface{})["image"].(string)
imageBytes, err := base64.StdEncoding.DecodeString(imageB64)
if err != nil {
panic(err)
}
filePath := "Example.png"
file, err := os.Create(filePath)
if err != nil {
panic(err)
}
defer file.Close()
_, err = file.Write(imageBytes)
if err != nil {
panic(err)
}
}require 'base64'
require 'json'
require 'net/http'
require 'uri'
token = "YOUR_TOKEN"
raw_url = "http://example.com/"
url = URI.encode_www_form_component(raw_url)
req_url = "https://api.scrape.do/?token=#{token}&url=#{url}&screenShot=true&render=true&returnJSON=true"
uri = URI.parse(req_url)
resp = Net::HTTP.get_response(uri)
if resp.code != '200'
raise "Error: #{resp.code} - #{resp.message}"
end
json_map = JSON.parse(resp.body)
image_b64 = json_map["screenShots"][0]["image"]
image_bytes = Base64.decode64(image_b64)
file_path = "Example.png"
File.open(file_path, "wb") do |file|
file.write(image_bytes)
endString token = "YOUR_TOKEN";
String rawUrl = "http://example.com/";
String url = URLEncoder.encode(rawUrl, "UTF-8");
String reqUrl = String.format("https://api.scrape.do/?token=%s&url=%s&screenShot=true&render=true&returnJSON=true", token, url);
CloseableHttpClient httpClient = HttpClients.createDefault();
HttpGet httpGet = new HttpGet(reqUrl);
CloseableHttpResponse response = httpClient.execute(httpGet);
try {
HttpEntity entity = response.getEntity();
String responseBody = entity != null ? EntityUtils.toString(entity) : null;
JSONObject jsonObject = new JSONObject(responseBody);
String imageB64 = jsonObject.getJSONArray("screenShots").getJSONObject(0).getString("image");
byte[] imageBytes = Base64.getDecoder().decode(imageB64);
String filePath = "Example.png";
FileOutputStream fos = new FileOutputStream(filePath);
fos.write(imageBytes);
fos.close();
} finally {
response.close();
}string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("http://example.com/");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do/?token={token}&url={url}&screenShot=true&render=true&returnJSON=true";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
if (response.StatusCode == HttpStatusCode.OK)
{
var content = response.Content.ReadAsStringAsync().Result;
JToken cParse = JToken.Parse(content);
string imageB64 = cParse.SelectToken("screenShots[0].image").ToString();
string filePath = "Example.png";
File.WriteAllBytes(filePath, Convert.FromBase64String(imageB64));
}
else
{
Console.WriteLine(response);
}<?php
$token = "YOUR_TOKEN";
$rawURL = "http://example.com/";
$url = urlencode($rawURL);
$reqURL = sprintf("https://api.scrape.do/?token=%s&url=%s&screenShot=true&render=true&returnJSON=true", $token, $url);
$response = file_get_contents($reqURL);
if (!$response) {
die("Failed to fetch response.");
}
$jsonMap = json_decode($response, true);
if (!$jsonMap) {
die("Failed to parse response.");
}
$imageB64 = $jsonMap["screenShots"][0]["image"];
$imageBytes = base64_decode($imageB64);
$filePath = "Example.png";
$file = fopen($filePath, "wb");
if (!$file) {
die("Failed to open file for writing.");
}
fwrite($file, $imageBytes);
fclose($file);Full Page Screenshot
By default, screenshots only capture the part of the web page that is visible on the screen. If you need a screenshot of the entire web page - including all content that would normally require scrolling - just pass fullScreenShot=true. The system will take a screenshot of the entire page for you, from top to bottom.
This is particularly useful for capturing long-form content, full product listings, or complete article pages.
curl --location --request GET 'https://api.scrape.do/?token=YOUR_TOKEN&render=true&fullScreenShot=true&returnJSON=true&url=http://example.com/' \import base64
import json
import requests
import urllib.parse
token = "YOUR_TOKEN"
raw_url = "http://example.com/"
url = urllib.parse.quote(raw_url)
req_url = f"https://api.scrape.do/?token={token}&url={url}&fullScreenShot=true&render=true&returnJSON=true"
resp = requests.get(req_url)
if resp.status_code != requests.codes.ok:
resp.raise_for_status()
json_map = json.loads(resp.text)
image_b64 = json_map["screenShots"][0]["image"]
image_bytes = base64.b64decode(image_b64)
file_path = "Example.png"
with open(file_path, "wb") as file:
file.write(image_bytes)const axios = require('axios');
const fs = require('fs');
const token = 'YOUR_TOKEN';
const url = encodeURIComponent('http://example.com/');
const requestURL = `https://api.scrape.do/?token=${token}&url=${url}&fullScreenShot=true&render=true&returnJSON=true`;
axios.get(requestURL)
.then(response => {
if (response.status === 200) {
const content = response.data;
const imageB64 = content.screenShots[0].image;
const filePath = 'Example.png';
fs.writeFile(filePath, Buffer.from(imageB64, 'base64'), err => {
if (err) {
console.error(err);
}
});
} else {
console.log(response.status);
}
})
.catch(error => {
console.error(error);
});package main
import (
"encoding/base64"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
)
func main() {
token := "YOUR_TOKEN"
rawURL := "http://example.com/"
url := url.QueryEscape(rawURL)
reqURL := fmt.Sprintf("https://api.scrape.do/?token=%s&url=%s&fullScreenShot=true&render=true&returnJSON=true", token, url)
resp, err := http.Get(reqURL)
if err != nil {
panic(err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
panic(resp.Status)
}
bytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
panic(err)
}
jsonMap := make(map[string]interface{})
err = json.Unmarshal(bytes, &jsonMap)
if err != nil {
panic(err)
}
imageB64 := jsonMap["screenShots"].([]interface{})[0].(map[string]interface{})["image"].(string)
imageBytes, err := base64.StdEncoding.DecodeString(imageB64)
if err != nil {
panic(err)
}
filePath := "Example.png"
file, err := os.Create(filePath)
if err != nil {
panic(err)
}
defer file.Close()
_, err = file.Write(imageBytes)
if err != nil {
panic(err)
}
}require 'base64'
require 'json'
require 'net/http'
require 'uri'
token = "YOUR_TOKEN"
raw_url = "http://example.com/"
url = URI.encode_www_form_component(raw_url)
req_url = "https://api.scrape.do/?token=#{token}&url=#{url}&fullScreenShot=true&render=true&returnJSON=true"
uri = URI.parse(req_url)
resp = Net::HTTP.get_response(uri)
if resp.code != '200'
raise "Error: #{resp.code} - #{resp.message}"
end
json_map = JSON.parse(resp.body)
image_b64 = json_map["screenShots"][0]["image"]
image_bytes = Base64.decode64(image_b64)
file_path = "Example.png"
File.open(file_path, "wb") do |file|
file.write(image_bytes)
endString token = "YOUR_TOKEN";
String rawUrl = "http://example.com/";
String url = URLEncoder.encode(rawUrl, "UTF-8");
String reqUrl = String.format("https://api.scrape.do/?token=%s&url=%s&fullScreenShot=true&render=true&returnJSON=true", token, url);
CloseableHttpClient httpClient = HttpClients.createDefault();
HttpGet httpGet = new HttpGet(reqUrl);
CloseableHttpResponse response = httpClient.execute(httpGet);
try {
HttpEntity entity = response.getEntity();
String responseBody = entity != null ? EntityUtils.toString(entity) : null;
JSONObject jsonObject = new JSONObject(responseBody);
String imageB64 = jsonObject.getJSONArray("screenShots").getJSONObject(0).getString("image");
byte[] imageBytes = Base64.getDecoder().decode(imageB64);
String filePath = "Example.png";
FileOutputStream fos = new FileOutputStream(filePath);
fos.write(imageBytes);
fos.close();
} finally {
response.close();
}string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("http://example.com/");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do/?token={token}&url={url}&fullScreenShot=true&render=true&returnJSON=true";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
if (response.StatusCode == HttpStatusCode.OK)
{
var content = response.Content.ReadAsStringAsync().Result;
JToken cParse = JToken.Parse(content);
string imageB64 = cParse.SelectToken("screenShots[0].image").ToString();
string filePath = "Example.png";
File.WriteAllBytes(filePath, Convert.FromBase64String(imageB64));
}
else
{
Console.WriteLine(response);
}<?php
$token = "YOUR_TOKEN";
$rawURL = "http://example.com/";
$url = urlencode($rawURL);
$reqURL = sprintf("https://api.scrape.do/?token=%s&url=%s&fullScreenShot=true&render=true&returnJSON=true", $token, $url);
$response = file_get_contents($reqURL);
if (!$response) {
die("Failed to fetch response.");
}
$jsonMap = json_decode($response, true);
if (!$jsonMap) {
die("Failed to parse response.");
}
$imageB64 = $jsonMap["screenShots"][0]["image"];
$imageBytes = base64_decode($imageB64);
$filePath = "Example.png";
$file = fopen($filePath, "wb");
if (!$file) {
die("Failed to open file for writing.");
}
fwrite($file, $imageBytes);
fclose($file);Partial Screenshot
Sometimes you may want to take a screenshot of only a specific element loaded on the screen. In this scenario, our system will take a screenshot for the CSS selector you have provided.
By passing the particularScreenShot=#elementSelector parameter, you can take a screenshot of the relevant element. This is useful for capturing specific components like product images, price tables, or review sections without the surrounding page content.
particularScreenShot property expects url-encoded value.
Important Notes
- The Partial Screenshot feature cannot be used without the
returnJSON=trueparameter! - You can use only one of the three screenshot features at the same time.
- The particularScreenShot parameter and the playWithBrowser parameter cannot be used together. If you need to take more than one screenshot, you can add it as an action with Browser Interactions.
curl --location --request GET 'https://api.scrape.do/?token=YOUR_TOKEN&render=true&particularScreenShot=h1&returnJSON=true&url=http://example.com/' \import base64
import json
import requests
import urllib.parse
token = "YOUR_TOKEN"
raw_url = "http://example.com/"
url = urllib.parse.quote(raw_url)
selector = urllib.parse.quote("h1")
req_url = f"https://api.scrape.do/?token={token}&url={url}&particularScreenShot={selector}&render=true&returnJSON=true"
resp = requests.get(req_url)
if resp.status_code != requests.codes.ok:
resp.raise_for_status()
json_map = json.loads(resp.text)
image_b64 = json_map["screenShots"][0]["image"]
image_bytes = base64.b64decode(image_b64)
file_path = "Example.png"
with open(file_path, "wb") as file:
file.write(image_bytes)const axios = require('axios');
const fs = require('fs');
const token = 'YOUR_TOKEN';
const url = encodeURIComponent('http://example.com/');
const selector = encodeURIComponent("h1");
const requestURL = `https://api.scrape.do/?token=${token}&url=${url}&particularScreenShot=${selector}&returnJSON=true&render=true`;
axios.get(requestURL)
.then(response => {
if (response.status === 200) {
const content = response.data;
const imageB64 = content.screenShots[0].image;
const filePath = 'Example.png';
fs.writeFile(filePath, Buffer.from(imageB64, 'base64'), err => {
if (err) {
console.error(err);
}
});
} else {
console.log(response.status);
}
})
.catch(error => {
console.error(error);
});package main
import (
"encoding/base64"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
)
func main() {
token := "YOUR_TOKEN"
rawURL := "http://example.com/"
encodedUrl := url.QueryEscape(rawURL)
encodedSelector := url.QueryEscape("h1")
reqURL := fmt.Sprintf("https://api.scrape.do/?token=%s&url=%s&particularScreenShot=%s&render=true&returnJSON=true", token, encodedUrl, encodedSelector)
resp, err := http.Get(reqURL)
if err != nil {
panic(err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
panic(resp.Status)
}
bytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
panic(err)
}
jsonMap := make(map[string]interface{})
err = json.Unmarshal(bytes, &jsonMap)
if err != nil {
panic(err)
}
imageB64 := jsonMap["screenShots"].([]interface{})[0].(map[string]interface{})["image"].(string)
imageBytes, err := base64.StdEncoding.DecodeString(imageB64)
if err != nil {
panic(err)
}
filePath := "Example.png"
file, err := os.Create(filePath)
if err != nil {
panic(err)
}
defer file.Close()
_, err = file.Write(imageBytes)
if err != nil {
panic(err)
}
}require 'base64'
require 'json'
require 'net/http'
require 'uri'
require 'cgi'
token = "YOUR_TOKEN"
raw_url = "http://example.com/"
url = URI.encode_www_form_component(raw_url)
selector = CGI.escape "h1"
req_url = "https://api.scrape.do/?token=#{token}&url=#{url}&particularScreenShot=#{selector}&render=true&returnJSON=true"
uri = URI.parse(req_url)
resp = Net::HTTP.get_response(uri)
if resp.code != '200'
raise "Error: #{resp.code} - #{resp.message}"
end
json_map = JSON.parse(resp.body)
image_b64 = json_map["screenShots"][0]["image"]
image_bytes = Base64.decode64(image_b64)
file_path = "Example.png"
File.open(file_path, "wb") do |file|
file.write(image_bytes)
endString token = "YOUR_TOKEN";
String rawUrl = "http://example.com/";
String url = URLEncoder.encode(rawUrl, "UTF-8");
String encoded_selector = URLEncoder.encode("h1", "UTF-8");
String reqUrl = String.format("https://api.scrape.do/?token=%s&url=%s&&particularScreenShot=%s&render=true&returnJSON=true", token, url,encoded_selector);
CloseableHttpClient httpClient = HttpClients.createDefault();
HttpGet httpGet = new HttpGet(reqUrl);
CloseableHttpResponse response = httpClient.execute(httpGet);
try {
HttpEntity entity = response.getEntity();
String responseBody = entity != null ? EntityUtils.toString(entity) : null;
JSONObject jsonObject = new JSONObject(responseBody);
String imageB64 = jsonObject.getJSONArray("screenShots").getJSONObject(0).getString("image");
byte[] imageBytes = Base64.getDecoder().decode(imageB64);
String filePath = "Example.png";
FileOutputStream fos = new FileOutputStream(filePath);
fos.write(imageBytes);
fos.close();
} finally {
response.close();
}string token = "YOUR_TOKEN";
string url = WebUtility.UrlEncode("http://example.com/");
string selector = WebUtility.UrlEncode("h1");
var client = new HttpClient();
var requestURL = $"https://api.scrape.do/?token={token}&url={url}&particularScreenShot={selector}&render=true&returnJSON=true";
var request = new HttpRequestMessage(HttpMethod.Get, requestURL);
var response = client.SendAsync(request).Result;
if (response.StatusCode == HttpStatusCode.OK)
{
var content = response.Content.ReadAsStringAsync().Result;
JToken cParse = JToken.Parse(content);
string imageB64 = cParse.SelectToken("screenShots[0].image").ToString();
string filePath = "Example.png";
File.WriteAllBytes(filePath, Convert.FromBase64String(imageB64));
}
else
{
Console.WriteLine(response);
}<?php
$token = "YOUR_TOKEN";
$rawURL = "http://example.com/";
$url = urlencode($rawURL);
$reqURL = sprintf("https://api.scrape.do/?token=%s&url=%s&particularScreenShot=h1&render=true&returnJSON=true", $token, $url);
$response = file_get_contents($reqURL);
if (!$response) {
die("Failed to fetch response.");
}
$jsonMap = json_decode($response, true);
if (!$jsonMap) {
die("Failed to parse response.");
}
$imageB64 = $jsonMap["screenShots"][0]["image"];
$imageBytes = base64_decode($imageB64);
$filePath = "Example.png";
$file = fopen($filePath, "wb");
if (!$file) {
die("Failed to open file for writing.");
}
fwrite($file, $imageBytes);
fclose($file);
