From 90f45fd1c9bc0616eb1b59f24ba86b7d38878d91 Mon Sep 17 00:00:00 2001 From: valka465 <46899675+valka465@users.noreply.github.com> Date: Thu, 28 Mar 2024 11:49:06 +0300 Subject: [PATCH 1/3] Update README.md HasData info added. --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index f8dddbe..e23ec5c 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ SerpBear is an Open Source Search Engine Position Tracking and Keyword Research #### How it Works -The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SearchApi, SerpApi or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. +The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SearchApi, SerpApi, HasData or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. The Keyword Research and keyword generation feature works by integrating your Google Ads test accounts into SerpBear. You can also view the added keyword's monthly search volume data once you [integrate Google Ads](https://docs.serpbear.com/miscellaneous/integrate-google-ads). @@ -52,6 +52,7 @@ If you don't want to use proxies, you can use third party Scraping services to s | SearchApi.io | From $40/mo | From 10,000/mo | Yes | | valueserp.com | Pay As You Go | $2.50/1000 req | No | | serper.dev | Pay As You Go | $1.00/1000 req | No | +| hasdata.com | From $29/mo | From 10,000/mo | Yes | **Tech Stack** From 3fc102452081f70adb3ebb5b4f24f05964e7f50c Mon Sep 17 00:00:00 2001 From: valka465 <46899675+valka465@users.noreply.github.com> Date: Thu, 28 Mar 2024 11:54:38 +0300 Subject: [PATCH 2/3] Update index.ts HasData added --- scrapers/index.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scrapers/index.ts b/scrapers/index.ts index a2ea0d9..0cfb09c 100644 --- a/scrapers/index.ts +++ b/scrapers/index.ts @@ -7,6 +7,7 @@ import proxy from './services/proxy'; import searchapi from './services/searchapi'; import valueSerp from './services/valueserp'; import serper from './services/serper'; +import hasdata from './services/hasdata'; export default [ scrapingRobot, @@ -18,4 +19,5 @@ export default [ searchapi, valueSerp, serper, + hasdata, ]; From 79fc6b935c59ffdde9269086b56acf45f82cfa00 Mon Sep 17 00:00:00 2001 From: valka465 <46899675+valka465@users.noreply.github.com> Date: Thu, 28 Mar 2024 11:57:15 +0300 Subject: [PATCH 3/3] HasData scraper added --- scrapers/services/hasdata.ts | 45 ++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 scrapers/services/hasdata.ts diff --git a/scrapers/services/hasdata.ts b/scrapers/services/hasdata.ts new file mode 100644 index 0000000..353e718 --- /dev/null +++ b/scrapers/services/hasdata.ts @@ -0,0 +1,45 @@ +import countries from '../../utils/countries'; + +interface HasDataResult { + title: string, + link: string, + position: number, +} + +const hasdata:ScraperSettings = { + id: 'hasdata', + name: 'HasData', + website: 'hasdata.com', + allowsCity: true, + headers: (keyword, settings) => { + return { + 'Content-Type': 'application/json', + 'x-api-key': settings.scaping_api, + }; + }, + scrapeURL: (keyword, settings) => { + const country = keyword.country || 'US'; + const countryName = countries[country][0]; + const location = keyword.city && countryName ? `&location=${encodeURI(`${keyword.city},${countryName}`)}` : ''; + return `https://api.scrape-it.cloud/scrape/google/serp?q=${encodeURI(keyword.keyword)}${location}&num=100&gl=${country.toLowerCase()}&deviceType=${keyword.device}`; + }, + resultObjectKey: 'organicResults', + + serpExtractor: (content) => { + const extractedResult = []; + const results: HasDataResult[] = (typeof content === 'string') ? JSON.parse(content) : content as HasDataResult[]; + + for (const { link, title, position } of results) { + if (title && link) { + extractedResult.push({ + title, + url: link, + position, + }); + } + } + return extractedResult; + }, +}; + +export default hasdata;