diff --git a/README.md b/README.md
index 39d2ee7..992da7c 100644
--- a/README.md
+++ b/README.md
@@ -38,8 +38,10 @@ The App uses third party website scrapers like ScrapingAnt, ScrapingRobot or You
| Serpwatcher.com | $49/mo| 3000/mo | No |
| whatsmyserp.com | $49/mo| 30,000/mo| No |
| serply.io | $49/mo | 5000/mo | Yes |
+| serpapi.com | From $50/mo** | From 5,000/mo** | Yes |
(*) Free upto a limit. If you are using ScrapingAnt you can lookup 10,000 times per month for free.
+(**) Free up to 100 per month. Paid from 5,000 to 10,000,000+ per month.
**Stack**
- Next.js for Frontend & Backend.
diff --git a/components/settings/Settings.tsx b/components/settings/Settings.tsx
index 42beb33..3602dbf 100644
--- a/components/settings/Settings.tsx
+++ b/components/settings/Settings.tsx
@@ -79,7 +79,7 @@ const Settings = ({ closeSettings }:SettingsProps) => {
}
}
- if (['scrapingant', 'scrapingrobot'].includes(settings.scraper_type) && !settings.scaping_api) {
+ if (['scrapingant', 'scrapingrobot', 'serply', 'serpapi'].includes(settings.scraper_type) && !settings.scaping_api) {
error = { type: 'no_api_key', msg: 'Insert a Valid API Key or Token for the Scraper Service.' };
}
@@ -106,6 +106,7 @@ const Settings = ({ closeSettings }:SettingsProps) => {
{ label: 'ScrapingAnt.com', value: 'scrapingant' },
{ label: 'ScrapingRobot.com', value: 'scrapingrobot' },
{ label: 'serply.io', value: 'serply' },
+ { label: 'serpapi.com', value: 'serpapi' },
];
const tabStyle = 'inline-block px-4 py-1 rounded-full mr-3 cursor-pointer text-sm';
@@ -151,7 +152,7 @@ const Settings = ({ closeSettings }:SettingsProps) => {
minWidth={270}
/>
- {['scrapingant', 'scrapingrobot', 'serply'].includes(settings.scraper_type) && (
+ {['scrapingant', 'scrapingrobot', 'serply', 'serpapi'].includes(settings.scraper_type) && (
=> {
+const refreshParallel = async (keywords:KeywordType[], settings:SettingsType) : Promise => {
const promises: Promise[] = keywords.map((keyword) => {
return scrapeKeywordFromGoogle(keyword, settings);
});
diff --git a/utils/scraper.ts b/utils/scraper.ts
index 494a843..8578ea9 100644
--- a/utils/scraper.ts
+++ b/utils/scraper.ts
@@ -32,6 +32,12 @@ interface SerplyResult {
realPosition: number,
}
+interface SerpApiResult {
+ title: string,
+ link: string,
+ position: number,
+}
+
/**
* Creates a SERP Scraper client promise based on the app settings.
* @param {KeywordType} keyword - the keyword to get the SERP for.
@@ -79,6 +85,11 @@ export const getScraperClient = (keyword:KeywordType, settings:SettingsType): Pr
apiURL = `https://api.serply.io/v1/search/q=${encodeURI(keyword.keyword)}&num=100&hl=${country}`;
}
+ // SerpApi docs: https://serpapi.com
+ if (settings && settings.scraper_type === 'serpapi' && settings.scaping_api) {
+ apiURL = `https://serpapi.com/search?q=${encodeURI(keyword.keyword)}&num=100&gl=${keyword.country}&device=${keyword.device}&api_key=${settings.scaping_api}`;
+ }
+
if (settings && settings.scraper_type === 'proxy' && settings.proxy) {
const axiosConfig: CreateAxiosDefaults = {};
axiosConfig.headers = headers;
@@ -128,8 +139,8 @@ export const scrapeKeywordFromGoogle = async (keyword:KeywordType, settings:Sett
res = await scraperClient.then((result:any) => result.json());
}
- if (res && (res.data || res.html || res.result || res.results)) {
- const extracted = extractScrapedResult(res.data || res.html || res.result || res.results, settings.scraper_type);
+ if (res && (res.data || res.html || res.result || res.results || res.organic_results)) {
+ const extracted = extractScrapedResult(res.data || res.html || res.result || res.results || res.organic_results, settings.scraper_type);
// await writeFile('result.txt', JSON.stringify(extracted), { encoding: 'utf-8' }).catch((err) => { console.log(err); });
const serp = getSerp(keyword.domain, extracted);
refreshedResults = { ID: keyword.ID, keyword: keyword.keyword, position: serp.postion, url: serp.url, result: extracted, error: false };
@@ -185,6 +196,19 @@ export const extractScrapedResult = (content: string, scraper_type:string): Sear
});
}
}
+ } else if (scraper_type === 'serpapi') {
+ // results already in json
+ const results: SerpApiResult[] = (typeof content === 'string') ? JSON.parse(content) : content as SerpApiResult[];
+
+ for (const { link, title, position } of results) {
+ if (title && link) {
+ extractedResult.push({
+ title: title,
+ url: link,
+ position: position,
+ });
+ }
+ }
} else {
for (let i = 0; i < searchResult.length; i += 1) {
if (searchResult[i]) {