Scrape.do exposes very simple and customizable very fastest proxy web scraper api in an endpoint.
The data center, residential and mobile IPs from a large IP pool are used to crawl a target site with 99.9% success, with using different IPs for every request. ๐
Each request returns raw html along with headers and cookies.
You will only be charged for the 200 and 404 status codes. For other occasions, you will not be charged.
If you exceed the rate limit, you will receive an error code 429. You may be able to resolve this issue when you set your request limit to meet the same conditions as your subscription plan.
Please note that there is a 2 MB response size limit for each request! If you exceed the limit, the transaction is considered successful, and 2 MB of the full response will be returned to you.
If the monthly request limit is exceeded or you have an unpaid bill, you will get a 401 error.
You can use the features specified on other pages by sending more than one parameter at the same time..
API_TOKEN : It is your personal API key.
Url : It is the target website which you will crawl.
Free plan includes 1000 monthly api call with 5 concurrent request. You can test it as you wish.
$ curl "http://api.scrape.do?token=API_TOKEN&url=https://www.example.com"
import requestsโurl = "http://api.scrape.do?token=API_TOKEN&url=http://example.com"โpayload = {}headers= {}โresponse = requests.request("GET", url, headers=headers, data = payload)โprint(response.text.encode('utf8'))โ
package mainโimport ("fmt""net/http""io/ioutil")โfunc main() {โurl := "http://api.scrape.do?token=API_TOKEN&url=http://example.com"method := "GET"โclient := &http.Client {}req, err := http.NewRequest(method, url, nil)โif err != nil {fmt.Println(err)}res, err := client.Do(req)defer res.Body.Close()body, err := ioutil.ReadAll(res.Body)โfmt.Println(string(body))}
var client = new RestClient("http://api.scrape.do?token=API_TOKEN&url=http://example.com");client.Timeout = -1;var request = new RestRequest(Method.GET);IRestResponse response = client.Execute(request);Console.WriteLine(response.Content);
OkHttpClient client = new OkHttpClient().newBuilder().build();Request request = new Request.Builder().url("http://api.scrape.do?token=API_TOKEN&url=http://example.com").method("GET", null).build();Response response = client.newCall(request).execute();
var axios = require('axios');โvar config = {method: 'get',url: 'http://api.scrape.do?token=API_TOKEN&url=http://example.com',headers: { }};โaxios(config).then(function (response) {console.log(JSON.stringify(response.data));}).catch(function (error) {console.log(error);});
var xhr = new XMLHttpRequest();xhr.withCredentials = true;โxhr.addEventListener("readystatechange", function() {if(this.readyState === 4) {console.log(this.responseText);}});โxhr.open("GET", "http://api.scrape.do?token=API_TOKEN&url=http://example.com");โxhr.send();
require "uri"require "net/http"โurl = URI("http://api.scrape.do?token=API_TOKEN&url=http://example.com")โhttp = Net::HTTP.new(url.host, url.port);request = Net::HTTP::Get.new(url)โresponse = http.request(request)puts response.read_body
<?phpโ$curl = curl_init();โcurl_setopt_array($curl, array(CURLOPT_URL => "http://api.scrape.do?token=API_TOKEN&url=http://example.com",CURLOPT_RETURNTRANSFER => true,CURLOPT_ENCODING => "",CURLOPT_MAXREDIRS => 10,CURLOPT_TIMEOUT => 0,CURLOPT_FOLLOWLOCATION => true,CURLOPT_HTTP_VERSION => CURL_HTTP_VERSION_1_1,CURLOPT_CUSTOMREQUEST => "GET",));โ$response = curl_exec($curl);โcurl_close($curl);echo $response;
import Foundationโvar semaphore = DispatchSemaphore (value: 0)โvar request = URLRequest(url: URL(string: "http://api.scrape.do?token=API_TOKEN&url=http://example.com")!,timeoutInterval: Double.infinity)request.httpMethod = "GET"โlet task = URLSession.shared.dataTask(with: request) { data, response, error inguard let data = data else {print(String(describing: error))return}print(String(data: data, encoding: .utf8)!)semaphore.signal()}โtask.resume()semaphore.wait()
<!doctype html><html><head><title>Example Domain</title>โ<meta charset="utf-8" /><meta http-equiv="Content-type" content="text/html; charset=utf-8" />............................</head></html>
That is all ๐
If you have any question please feel free to contact us or email us at support@scrape.doโ