Thordata Docs
English
English
  • Overview
  • PROXIES
    • Integration Tutorial
      • Residential Proxies Integration
        • AdsPower
        • BitBrowser
        • ClonBrowser
        • Playwright
        • Puppeteer
        • Selenium
        • Ghost Browser
        • SwitchyOmega
        • FoxyProxy
        • Chrome
        • Edge
        • MuLogin
        • Incogniton
      • ISP Proxies Integration
        • AdsPower
        • BitBrowser
        • ClonBrowser
        • Ghost Browser
        • SwitchyOmega
        • FoxyProxy
        • Chrome
        • Edge
      • Datacenter Proxies Integration
        • AdsPower
        • BitBrowser
        • ClonBrowser
        • Ghost Browser
        • SwitchyOmega
        • FoxyProxy
        • Chrome
        • Edge
      • Unlimited Proxies Integration
        • AdsPower
        • BitBrowser
        • ClonBrowser
        • Ghost Browser
        • SwitchyOmega
        • FoxyProxy
        • Chrome
        • Edge
    • Residential Proxies
      • Getting Started Guide
      • Endpoint Generator
        • User & Pass auth
        • Whitelisted IPs
          • Country Entry
            • Country/Region Entry Node
          • API Link
      • Users
      • Basic Query
      • Location Settings
        • Country / Region
        • City
        • State
      • Session Control
      • Protocols
      • Country/Region list
      • CDKEY Generation and Redemption
    • ISP Proxies
      • Getting Started Guide
    • Datacenter Proxies
      • Getting Started Guide
    • Unlimited Proxy Servers
      • Endpoint Generator
        • User & Pass auth
        • Whitelisted IPs
          • Country Entry
          • API Link
      • Users
      • Country/Region list
  • Scraping
    • SERP API
      • Introduction
      • Quick Start Guide
      • Send Your First Request
      • Query Parameters
        • Google
        • Bing
        • Yandex
        • DuckDuckGo
      • FAQs
    • WEB Scraper API
      • Getting Started Guide
      • Send your first request
      • API request builder
      • Parameter Description
        • YouTube Scraping Parameters
        • Amazon Scraping Parameters
      • Webhook Integration
      • Amazon S3 Integration
  • Universal Scraping API
    • Quick Start Guide
    • Configuration
    • Parameter Description
  • FREE TOOLS
    • Chrome Extension
      • Tutorial
  • USEFUL LINKS
    • Dashboard Center
  • FAQ
    • Product Problem
      • How do I choose the right proxy package?
      • How to target specific countries/regions?
      • How to target a specific city?
      • What is Unlimited Proxies?
      • Why is it not effective to use our proxies in a fingerprint browser?
        • 1. The proxy does not work at all
        • 2. The results of other detection channels you are using are not the same as the country extracted
        • 3. Your targeted website's detection results are not the same as the country extracted
        • 4. The proxy works, but the IP location randomly jumps to other countries you don't want
    • Payment Problem
      • What payment methods are supported?
      • What should I do if I buy the wrong package?
      • Do you provide refunds for paid services?
      • After payment, how long does it take to receive my agency balance or activate my account?
  • SUPPORT
    • Contact Us
Powered by GitBook
On this page

Was this helpful?

  1. Scraping
  2. WEB Scraper API

Send your first request

PreviousGetting Started GuideNextAPI request builder

Last updated 6 hours ago

Was this helpful?

Send your first request using Thordata’s Web Scraper API. Before you start, you need an API Token. You can obtain a free trial token on the [API Builder] page. Then, copy your credentials from the “Token” field. Note: The token you receive is sensitive information. Please keep it secure and do not disclose it. If your token is leaked and your resources are abused, you will bear the consequences yourself; the platform is not responsible. Code Example: After obtaining your API credentials, use the following code to send your first request:

curl --request POST --url https://scraperapi.thordata.com/builder?prodoct_id=3 --header "Authorization: Bearer Token-ID" --header "content-type: application/json" --data "{\"spider_info\":{\"spider_parameters\":[{\"url\":\"https://www.amazon.com/HISDERN-Checkered-Handkerchief-Classic-Necktie/dp/B0BRXPR726\"}],\"spider_id\":\"3\",\"spider_errors\":true},\"spider_name\":\"amazon.com\"}"
import http.client

conn = http.client.HTTPSConnection("scraperapi.thordata.com")

payload = "{\"spider_info\":{\"spider_parameters\":[{\"url\":\"https://www.amazon.com/HISDERN-Checkered-Handkerchief-Classic-Necktie/dp/B0BRXPR726\"}],\"spider_id\":\"3\",\"spider_errors\":true},\"spider_name\":\"amazon.com\"}"

headers = {
    'Authorization': "Bearer Token-ID",
    'content-type': "application/json"
    }

conn.request("POST", "/builder?prodect_id=3", payload, headers)

res = conn.getresponse()
data = res.read()

print(data.decode("utf-8"))
package main

import (
	"fmt"
	"strings"
	"net/http"
	"io/ioutil"
)

func main() {

	url := "https://scraperapi.thordata.com/builder?prodect_id=3"

	payload := strings.NewReader("{\"spider_info\":{\"spider_parameters\":[{\"url\":\"https://www.amazon.com/HISDERN-Checkered-Handkerchief-Classic-Necktie/dp/B0BRXPR726\"}],\"spider_id\":\"3\",\"spider_errors\":true},\"spider_name\":\"amazon.com\"}")

	req, _ := http.NewRequest("POST", url, payload)

	req.Header.Add("Authorization", "Bearer Token-ID")
	req.Header.Add("content-type", "application/json")

	res, _ := http.DefaultClient.Do(req)

	defer res.Body.Close()
	body, _ := ioutil.ReadAll(res.Body)

	fmt.Println(res)
	fmt.Println(string(body))

}
const http = require("https");

const options = {
  "method": "POST",
  "hostname": "scraperapi.thordata.com",
  "port": null,
  "path": "/builder?prodect_id=3",
  "headers": {
    "Authorization": "Bearer Token-ID",
    "content-type": "application/json"
  }
};

const req = http.request(options, function (res) {
  const chunks = [];

  res.on("data", function (chunk) {
    chunks.push(chunk);
  });

  res.on("end", function () {
    const body = Buffer.concat(chunks);
    console.log(body.toString());
  });
});

req.write(JSON.stringify({
    "spider_info": {
            "spider_parameters": [
            {"url":"https://www.amazon.com/HISDERN-Checkered-Handkerchief-Classic-Necktie/dp/B0BRXPR726"}
        ],
            "spider_id": 3,
            "spider_errors": true,
        },
        "spider_name": "amazon.com"
}));
req.end();
<?php

$curl = curl_init();

curl_setopt_array($curl, [
  CURLOPT_URL => "https://scraperapi.thordata.com/builder?prodect_id=3",
  CURLOPT_RETURNTRANSFER => true,
  CURLOPT_ENCODING => "",
  CURLOPT_MAXREDIRS => 10,
  CURLOPT_TIMEOUT => 30,
  CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,
  CURLOPT_CUSTOMREQUEST => "POST",
  CURLOPT_POSTFIELDS => "{\"spider_info\":{\"spider_parameters\":[{\"url\":\"https://www.amazon.com/HISDERN-Checkered-Handkerchief-Classic-Necktie/dp/B0BRXPR726\"}],\"spider_id\":\"3\",\"spider_errors\":true},\"spider_name\":\"amazon.com\"}",
  CURLOPT_HTTPHEADER => [
    "Authorization: Bearer Token-ID",
    "content-type: application/json"
  ],
]);

$response = curl_exec($curl);
$err = curl_error($curl);

curl_close($curl);

if ($err) {
  echo "cURL Error #:" . $err;
} else {
  echo $response;
}
HttpRequest request = HttpRequest.newBuilder()
    .uri(URI.create("https://scraperapi.thordata.com/builder?prodect_id=3"))
    .header("Authorization", "Bearer Token-ID")
    .header("content-type", "application/json")
    .method("POST", HttpRequest.BodyPublishers.ofString("{\"spider_info\":{\"spider_parameters\":[{\"url\":\"https://www.amazon.com/HISDERN-Checkered-Handkerchief-Classic-Necktie/dp/B0BRXPR726\"}],\"spider_id\":\"3\",\"spider_errors\":true},\"spider_name\":\"amazon.com\"}"))
    .build();
HttpResponse<String> response = HttpClient.newHttpClient().send(request, HttpResponse.BodyHandlers.ofString());
System.out.println(response.body());
var client = new HttpClient();
var request = new HttpRequestMessage
{
    Method = HttpMethod.Post,
    RequestUri = new Uri("https://scraperapi.thordata.com/builder?prodect_id=3"),
    Headers =
    {
        { "Authorization", "Bearer Token-ID" },
    },
    Content = new StringContent("{\"spider_info\":{\"spider_parameters\":[{\"url\":\"https://www.amazon.com/HISDERN-Checkered-Handkerchief-Classic-Necktie/dp/B0BRXPR726\"}],\"spider_id\":\"3\",\"spider_errors\":true},\"spider_name\":\"amazon.com\"}")
    {
        Headers =
        {
            ContentType = new MediaTypeHeaderValue("application/json")
        }
    }
};
using (var response = await client.SendAsync(request))
{
    response.EnsureSuccessStatusCode();
    var body = await response.Content.ReadAsStringAsync();
    Console.WriteLine(body);
}
require 'uri'
require 'net/http'
require 'openssl'

url = URI("https://scraperapi.thordata.com/builder?prodect_id=3")

http = Net::HTTP.new(url.host, url.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE

request = Net::HTTP::Post.new(url)
request["Authorization"] = 'Bearer Token-ID'
request["content-type"] = 'application/json'
request.body = "{\"spider_info\":{\"spider_parameters\":[{\"url\":\"https://www.amazon.com/HISDERN-Checkered-Handkerchief-Classic-Necktie/dp/B0BRXPR726\"}],\"spider_id\":\"3\",\"spider_errors\":true},\"spider_name\":\"amazon.com\"}"

response = http.request(request)
puts response.read_body

After sending the request, the system will return the output in JSON or CSV format. You can view it in the “Tasks” list.

If you need further assistance, please contact us via email at support@thordata.com.