Asynchronous Requests
Web Scraping API Asynchronous Requests
Queue up multiple requests and receive the task_id instantly – after the task is completed, you can retrieve the results from your request using said task_id.
Queue a Single Task
Single query or URL endpoint: https://scraper-api.decodo.com/v3/task
https://scraper-api.decodo.com/v3/taskMake a POST request to this endpoint with your preferred parameters to receive the task_id for retrieval once the task is done, along with the parameters used.
curl --request 'POST' \
--url 'https://scraper-api.decodo.com/v3/task' \
--header 'Accept: application/json' \
--header 'Authorization: Basic TOKEN VALUE' \ // update with your authorization token
--header 'Content-Type: application/json' \
--data '
{
"url": "https://ip.decodo.com"
}
'const scrape = async() => {
const response = await fetch("https://scraper-api.decodo.com/v3/task", {
method: "POST",
body: JSON.stringify({
"url": "https://ip.decodo.com"
}),
headers: {
"Content-Type": "application/json",
"Authorization": "Basic TOKEN VALUE" // update with your authorization token
},
}).catch(error => console.log(error));
console.log(await response.json())
}
scrape()
import requests
url = "https://scraper-api.decodo.com/v3/task"
payload = {
"url": "https://ip.decodo.com"
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": "Basic TOKEN VALUE" # update with your authorization token
}
response = requests.post(url, json=payload, headers=headers)
print(response.text)Response Example
{
"url": "https://ip.decodo.com",
"page_from": 1,
"limit": 10,
"geo": null,
"device_type": "desktop",
"headless": null,
"parse": false,
"locale": null,
"domain": "com",
"output_schema": null,
"created_at": "2026-03-04 11:51:04",
"id": "7434928397127555073",
"status": "pending",
"content_encoding": "utf-8",
"updated_at": "2026-03-04 11:51:04",
"page_count": 1,
"http_method": "get",
"cookies": [],
"force_headers": false,
"force_cookies": false,
"headers": [],
"session_id": null,
"successful_status_codes": [],
"follow_redirect": null,
"payload": null,
"store_id": null,
"headers_cookies_policy": false
}Retrieve result using task_id
Single query or URL endpoint: https://scraper-api.decodo.com/v3/task/{task_id}/results
https://scraper-api.decodo.com/v3/task/{task_id}/resultsMake a GET request to this endpoint, replacing {task_id} with the ID received from the previous POST request to retrieve the result.
Results can be retrieved an unlimited number of times within 24 hours of the initial request
curl --request 'GET' \
--url 'https://scraper-api.decodo.com/v3/task/{task_id}/results' \
--header 'Accept: application/json' \
--header 'Authorization: Basic TOKEN VALUE' \ // update with your authorization tokenconst scrape = async() => {
const response = await fetch("https://scraper-api.decodo.com/v3/task/{task_id}/results", {
method: "GET",
headers: {
"Content-Type": "application/json",
"Authorization": "Basic TOKEN VALUE" // update with your authorization token
},
}).catch(error => console.log(error));
console.log(await response.json())
}
scrape()
import requests
task_id = "TASK_ID"
url = f"https://scraper-api.decodo.com/v3/task/{task_id}/results"
headers = {
"accept": "application/json",
"authorization": "Basic TOKEN VALUE" # update with your authorization token
}
response = requests.get(url, headers=headers)
Get task status using task_id
Asynchronous task status can be checked manually:
curl --request 'GET' \
--url 'https://scraper-api.decodo.com/v3/task/{task_id}' \
--header 'Accept: application/json' \
--header 'Authorization: Basic TOKEN VALUE' \ // update with your authorization tokenconst scrape = async() => {
const response = await fetch("https://scraper-api.decodo.com/v3/task/{task_id}", {
method: "GET",
headers: {
"Content-Type": "application/json",
"Authorization": "Basic TOKEN VALUE" // update with your authorization token
},
}).catch(error => console.log(error));
console.log(await response.json())
}
scrape()
import requests
task_id = "TASK_ID"
url = f"https://scraper-api.decodo.com/v3/task/{task_id}"
headers = {
"accept": "application/json",
"authorization": "Basic TOKEN VALUE" # update with your authorization token
}
response = requests.get(url, headers=headers)
Response will contain request information and task status.
If task is in progress status will be set to pending:
{
"status": "pending",
"id": "7447868801825135617",
"created_at": "2026-04-09 04:51:37",
"updated_at": "2026-04-09 04:51:37",
"target": "google_ai_mode",
"query": "best health trackers under $200",
"page_from": 1,
"limit": 10,
"device_type": "desktop",
"headless": "html",
"parse": true,
"domain": "com",
"page_count": 1,
"force_headers": false,
"force_cookies": false,
"headers_cookies_policy": false
}When task has been completed successfully status will be changed to done:
{
"status": "done",
"id": "7447866391652228097",
"created_at": "2026-04-09 04:42:02",
"updated_at": "2026-04-09 04:42:16",
"target": "google_ai_mode",
"query": "best health trackers under $200",
"page_from": 1,
"limit": 10,
"device_type": "desktop",
"headless": "html",
"parse": true,
"domain": "com",
"page_count": 1,
"force_headers": false,
"force_cookies": false,
"headers_cookies_policy": false
}If task has failed, status will be set to faulted.
Queue multiple tasks
Batch query or URL endpoint: https://scraper-api.decodo.com/v3/task/batch
https://scraper-api.decodo.com/v3/task/batchMake a POST request to this endpoint, providing multiple queries or URLs in JSON format.
Batch requests are limited to 1 request per second.
With a single batch you can submit either multiple queries or urls, but not both. Also, one batch must have only one
target, likegoogle_searchshown in the example below.You can submit up to 3000 URLs/queries per one batch request.
curl --request 'POST' \
--url 'https://scraper-api.decodo.com/v3/task/batch' \
--header 'Accept: application/json' \
--header 'Authorization: Basic TOKEN VALUE' \ // update with your authorization token
--header 'Content-Type: application/json' \
--data '
{
"url": [
"https://ip.decodo.com",
"https://ip.decodo.com",
"https://ip.decodo.com"
],
}
'const scrape = async() => {
const response = await fetch("https://scraper-api.decodo.com/v3/task/batch", {
method: "POST",
body: JSON.stringify({
"url": ["https://ip.decodo.com", "https://ip.decodo.com", "https://ip.decodo.com"]
}),
headers: {
"Content-Type": "application/json",
"Authorization": "Basic TOKEN VALUE" // update with your authorization token
},
}).catch(error => console.log(error));
console.log(await response.json())
}
scrape()
import requests
url = "https://scraper-api.decodo.com/v3/task"
payload = {
"urls": [
"https://ip.decodo.com",
"https://ip.decodo.com",
"https://ip.decodo.com"
]
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": "Basic TOKEN VALUE" # update with your authorization token
}
response = requests.post(url, json=payload, headers=headers)
curl --request 'POST' \
--url 'https://scraper-api.decodo.com/v3/task/batch' \
--header 'Accept: application/json' \
--header 'Authorization: Basic TOKEN VALUE' \ // update with your authorization token
--header 'Content-Type: application/json' \
--data '
{
"target": "google_search",
"query": [
"sky",
"cloud",
"computer"
],
}
'const scrape = async() => {
const response = await fetch("https://scraper-api.decodo.com/v3/task/batch", {
method: "POST",
headers: {
"Accept": "application/json",
"Content-Type": "application/json",
"Authorization": "Basic TOKEN VALUE" // update with your authorization token
},
body: JSON.stringify({
target: "google_search",
query: [
"sky",
"cloud",
"computer"
]
})
}).catch(error => console.log(error));
console.log(await response.json())
}
scrape()import requests
url = "https://scraper-api.decodo.com/v3/task/batch"
payload = {
"target": "google_search",
"query": [
"sky",
"cloud",
"computer"
]
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": "Basic TOKEN VALUE" # update with your authorization token
}
response = requests.post(url, json=payload, headers=headers)
print(response.text)Receive task status to your callback_url when it's done
This will work with any async endpoint by entering the callback_url as one of the parameters
callback_url as one of the parametersWe will make a POST request to your provided URL with the task_id and parameters used once the task is done.
You can use a website like this one to test receiving a response.
Example using single task endpoint:
curl --request 'POST' \
--url 'https://scraper-api.decodo.com/v3/task' \
--header 'Accept: application/json' \
--header 'Authorization: Basic TOKEN VALUE' \ // update with your authorization token
--header 'Content-Type: application/json' \
--data '
{
"url": "https://ip.decodo.com",
"callback_url": "https://your.url"
}
'const scrape = async() => {
const response = await fetch("https://scraper-api.decodo.com/v3/task", {
method: "POST",
headers: {
"Accept": "application/json",
"Content-Type": "application/json",
"Authorization": "Basic TOKEN VALUE" // update with your authorization token
},
body: JSON.stringify({
url: "https://ip.decodo.com",
callback_url: "https://your.url"
})
}).catch(error => console.log(error));
console.log(await response.json())
}
scrape()import requests
url = "https://scraper-api.decodo.com/v3/task"
payload = {
"url": "https://ip.decodo.com",
"callback_url": "https://your.url"
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": "Basic TOKEN VALUE" # update with your authorization token
}
response = requests.post(url, json=payload, headers=headers)
print(response.text)Example of a response you will receive:
{
"id": "7039164056019693569",
"status": "done",
"target": "universal",
"query": "",
"url": "https://ip.decodo.com",
"domain": "com",
"limit": 10,
"locale": null,
"geo": null,
"device_type": "desktop",
"page_from": 1,
"parse": 0,
"output_schema": null,
"headless": null,
"priority": 0,
"persist": true,
"content_encoding": "utf-8",
"created_at": "2023-03-08 09:24:52",
"updated_at": "2023-03-08 09:24:52"
You can then use the id to retrieve the result of your task using this endpoint:
https://scraper-api.decodo.com/v3/task/{task_id}/results
For example, to retrieve the result from the above example, you would send a GET request to:
https://scraper-api.decodo.com/v3/task/7039164056019693569/results
Authenticating callbacks
To verify that a callback request is actually from Scraper API, include a note in the passthrough parameter. The parameter passthrough along with its contents will be present in the payload sent back to your endpoint.
curl --request 'POST' \
--url 'https://scraper-api.decodo.com/v3/task' \
--header 'Accept: application/json' \
--header 'Authorization: Basic TOKEN VALUE' \ // update with your authorization token
--header 'Content-Type: application/json' \
--data '
{
"target": "amazon_pricing",
"query": "B0BS1QCFHX",
"parse": true,
"callback_url": "https://your.callback.url",
"passtrough": "your_note"
}
'const scrape = async() => {
const response = await fetch("https://scraper-api.decodo.com/v3/task", {
method: "POST",
headers: {
"Accept": "application/json",
"Content-Type": "application/json",
"Authorization": "Basic TOKEN VALUE" // update with your authorization token
},
body: JSON.stringify({
target: "amazon_pricing",
query: "B0BS1QCFHX",
parse: true,
callback_url: "https://your.callback.url",
passtrough: "your_note"
})
}).catch(error => console.log(error));
console.log(await response.json())
}
scrape()import requests
url = "https://scraper-api.decodo.com/v3/task"
payload = {
"target": "amazon_pricing",
"query": "B0BS1QCFHX",
"parse": True,
"callback_url": "https://your.callback.url",
"passtrough": "your_note"
}
headers = {
"accept": "application/json",
"content-type": "application/json",
"authorization": "Basic TOKEN VALUE" # update with your authorization token
}
response = requests.post(url, json=payload, headers=headers)
print(response.text)
Support
Still can't find an answer? Want to say hi? We take pride in our 24/7 customer support. Alternatively, you can reach us via our support email at [email protected].
Updated 9 days ago