Everything that you needs To build you own SEO tool.
We provides data from Google and Bing search engines.
We build pretty complex tools and this allows our customer to build own SEO tool.
We provides results in REST JSON format. That is very easy to use.
With Google and Bing SERP api you can get below result snippets in JSON response
Recently, We have introduced image api, Where you can perform Google and Bing image search and get result in json resonse
We provides flexibility to choose Device such as Desktop, Tablet and Mobile. Because Search engine provides different results for different devices.
Search Engine provides result based on user location. So we are allowing user to choose what location and language they want to target.
We sends ping to url that provided by user once the search is completed. Postback includes result data. While pingback just calls that url with task id
Each API request runs immediately – no waiting for results.
It will take upto 5 seconds to return the result
In addition, each API request runs in a full browser, and we'll even solve all CAPTCHAs. Mimicking completely what a human will do. This guarantees that you get what users truly see.
Regular organic results are available as well as Maps, Related Search, People also ask, Answer box, People also serch for and Knowledge Graph.
Retrive ad result in JSON response of request (Supported on Google Only)
Scrape search results from our fast, easy, and complete API.
Use one of our code snippet to get started.
curl -X POST \
https://api.serphouse.com/serp/live \
-H 'accept: application/json' \
-H 'authorization: Bearer API_TOKEN' \
-H 'content-type: application/json' \
-d '{
"data": {
"domain":"google.com",
"lang":"en",
"q": "Coffee",
"loc":"Texas,United States",
"device": "desktop",
"serp_type": "web"
}
}'
require 'uri'
require 'net/http'
url = URI("https://api.serphouse.com/serp/live")
http = Net::HTTP.new(url.host, url.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
request = Net::HTTP::Post.new(url)
request["accept"] = 'application/json'
request["content-type"] = 'application/json'
request["authorization"] = 'Bearer API_TOKEN'
request.body = '{"data": {
"domain": "google.com",
"lang": "en",
"q": "Coffee",
"loc": "Texas,United States",
"device": "desktop",
"serp_type": "web"
}}'
response = http.request(request)
puts response.read_body
import requests
url = "https://api.serphouse.com/serp/live"
payload = '{"data": {
"domain": "google.com",
"lang": "en",
"q": "Coffee",
"loc": "Texas,United States",
"device": "desktop",
"serp_type": "web"
}}'
headers = {
'accept': "application/json",
'content-type': "application/json",
'authorization': "Bearer API_TOKEN"
}
response = requests.request("POST", url, data=payload, headers=headers)
print(response.text)
var http = require("https");
var options = {
"method": "POST",
"hostname": "https://api.serphouse.com",
"port": null,
"path": "/serp/live",
"headers": {
"accept": "application/json",
"content-type": "application/json",
"authorization": "Bearer API_TOKEN"
}
};
var req = http.request(options, function (res) {
var chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function () {
var body = Buffer.concat(chunks);
console.log(body.toString());
});
});
req.write(JSON.stringify({
data: {
domain: 'google.com',
lang: 'en',
q: 'Coffee',
loc: 'Texas,United States',
device: 'desktop',
serp_type: 'web'
}
}));
req.end();
$curl = curl_init();
curl_setopt_array($curl, array(
CURLOPT_URL => "https://api.serphouse.com/serp/live",
CURLOPT_RETURNTRANSFER => true,
CURLOPT_ENCODING => "",
CURLOPT_MAXREDIRS => 10,
CURLOPT_TIMEOUT => 30,
CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,
CURLOPT_CUSTOMREQUEST => "POST",
CURLOPT_POSTFIELDS => '{"data": {
"domain":"google.com",
"lang": "en",
"q": "Coffee",
"loc": "Texas,United States",
"device": "desktop",
"serp_type": "web"
}}',
CURLOPT_HTTPHEADER => array(
"accept: application/json",
"authorization: Bearer API_TOKEN",
"content-type: application/json"
),
));
$response = curl_exec($curl);
$err = curl_error($curl);
curl_close($curl);
if ($err) {
echo "cURL Error #:" . $err;
}
else {
echo $response;
}
OkHttpClient client = new OkHttpClient();
MediaType mediaType = MediaType.parse("application/json");
RequestBody body = RequestBody.create(mediaType, '{"data": {
"domain": "google.com",
"lang": "en",
"q": "Coffee",
"loc": "Texas,United States",
"device": "desktop",
"serp_type": "web"}}
');
Request request = new Request.Builder()
.url("https://api.serphouse.com/serp/live")
.post(body)
.addHeader("accept", "application/json")
.addHeader("content-type", "application/json")
.addHeader("authorization", "Bearer API_TOKEN")
.build();
Response response = client.newCall(request).execute();
package main
import (
"fmt"
"strings"
"net/http"
"io/ioutil"
)
func main() {
url := "https://api.serphouse.com/serp/live"
payload := strings.NewReader('{"data": {
"domain":"google.com",
"lang": "en",
"q": "Coffee",
"loc": "Texas,United States",
"device": "desktop",
"serp_type": "web"}}
')
req, _ := http.NewRequest("POST", url, payload)
req.Header.Add("accept", "application/json")
req.Header.Add("content-type", "application/json")
req.Header.Add("authorization", "Bearer API_TOKEN")
res, _ := http.DefaultClient.Do(req)
defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body)
fmt.Println(res)
fmt.Println(string(body))
}
Happy Clients
Requests
every second
Locations
No credit card required • Cancel at any time
200 Delayed searches
200 Real-time searches
Data from Google and Bing
1000 Delayed searches
1000 Real-time searches
Data from Google and Bing
6000 Delayed searches
6000 Real-time searches
Data from Google and Bing
X Real-time searches
X Delayed searches
Data from Google and Bing
24/7 Support
Find the perfect plan for you — 100% satisfaction guaranteed.