{/* TODO: Insert Existing Tags as Suggestions */}
setNewKeywordsData({ ...newKeywordsData, tags: e.target.value })}
/>
+
diff --git a/components/keywords/Keyword.tsx b/components/keywords/Keyword.tsx
index 5d3500b..cf8bbb3 100644
--- a/components/keywords/Keyword.tsx
+++ b/components/keywords/Keyword.tsx
@@ -40,7 +40,7 @@ const Keyword = (props: KeywordProps) => {
scDataType = 'threeDays',
} = props;
const {
- keyword, domain, ID, position, url = '', lastUpdated, country, sticky, history = {}, updating = false, lastUpdateError = false,
+ keyword, domain, ID, city, position, url = '', lastUpdated, country, sticky, history = {}, updating = false, lastUpdateError = false,
} = keywordData;
const [showOptions, setShowOptions] = useState(false);
const [showPositionError, setPositionError] = useState(false);
@@ -85,12 +85,12 @@ const Keyword = (props: KeywordProps) => {
return (
-
+
showKeywordDetails()}>
- {keyword}
+
+ {keyword}{city ? ` (${city})` : ''}
{sticky &&
}
{lastUpdateError && lastUpdateError.date
diff --git a/components/keywords/KeywordsTable.tsx b/components/keywords/KeywordsTable.tsx
index 9f34f7a..d78378e 100644
--- a/components/keywords/KeywordsTable.tsx
+++ b/components/keywords/KeywordsTable.tsx
@@ -1,8 +1,6 @@
import React, { useState, useMemo } from 'react';
import { Toaster } from 'react-hot-toast';
-import { CSSTransition } from 'react-transition-group';
import { FixedSizeList as List, ListChildComponentProps } from 'react-window';
-import AddKeywords from './AddKeywords';
import { filterKeywords, keywordsByDevice, sortKeywords } from '../../utils/client/sortFilter';
import Icon from '../common/Icon';
import Keyword from './Keyword';
@@ -25,7 +23,7 @@ type KeywordsTableProps = {
}
const KeywordsTable = (props: KeywordsTableProps) => {
- const { domain, keywords = [], isLoading = true, showAddModal = false, setShowAddModal, isConsoleIntegrated = false } = props;
+ const { keywords = [], isLoading = true, isConsoleIntegrated = false } = props;
const showSCData = isConsoleIntegrated;
const [device, setDevice] = useState
('desktop');
const [selectedKeywords, setSelectedKeywords] = useState([]);
@@ -243,13 +241,6 @@ const KeywordsTable = (props: KeywordsTableProps) => {
)}
-
- setShowAddModal(false)}
- />
-
{showTagManager && (
{
- const { keyword, device, country, domain, tags } = kwrd;
+ const { keyword, device, country, domain, tags, city } = kwrd;
const tagsArray = tags ? tags.split(',').map((item:string) => item.trim()) : [];
const newKeyword = {
keyword,
device,
domain,
country,
+ city,
position: 0,
updating: true,
history: JSON.stringify({}),
diff --git a/pages/api/settings.ts b/pages/api/settings.ts
index 9b5e5a6..aed5f76 100644
--- a/pages/api/settings.ts
+++ b/pages/api/settings.ts
@@ -72,7 +72,7 @@ export const getAppSettings = async () : Promise => {
scaping_api,
smtp_password,
search_console_integrated: !!(process.env.SEARCH_CONSOLE_PRIVATE_KEY && process.env.SEARCH_CONSOLE_CLIENT_EMAIL),
- available_scapers: allScrapers.map((scraper) => ({ label: scraper.name, value: scraper.id })),
+ available_scapers: allScrapers.map((scraper) => ({ label: scraper.name, value: scraper.id, allowsCity: !!scraper.allowsCity })),
failed_queue: failedQueue,
screenshot_key: screenshotAPIKey,
};
diff --git a/pages/domain/[slug]/index.tsx b/pages/domain/[slug]/index.tsx
index 4d0f5c8..91cd413 100644
--- a/pages/domain/[slug]/index.tsx
+++ b/pages/domain/[slug]/index.tsx
@@ -1,9 +1,7 @@
-import React, { useEffect, useMemo, useState } from 'react';
+import React, { useMemo, useState } from 'react';
import type { NextPage } from 'next';
import Head from 'next/head';
import { useRouter } from 'next/router';
-// import { useQuery } from 'react-query';
-// import toast from 'react-hot-toast';
import { CSSTransition } from 'react-transition-group';
import Sidebar from '../../../components/common/Sidebar';
import TopBar from '../../../components/common/TopBar';
@@ -16,17 +14,20 @@ import Settings from '../../../components/settings/Settings';
import { useFetchDomains } from '../../../services/domains';
import { useFetchKeywords } from '../../../services/keywords';
import { useFetchSettings } from '../../../services/settings';
+import AddKeywords from '../../../components/keywords/AddKeywords';
const SingleDomain: NextPage = () => {
const router = useRouter();
- const [noScrapprtError, setNoScrapprtError] = useState(false);
const [showAddKeywords, setShowAddKeywords] = useState(false);
const [showAddDomain, setShowAddDomain] = useState(false);
const [showDomainSettings, setShowDomainSettings] = useState(false);
const [showSettings, setShowSettings] = useState(false);
const [keywordSPollInterval, setKeywordSPollInterval] = useState(undefined);
- const { data: appSettings } = useFetchSettings();
+ const { data: appSettingsData, isLoading: isAppSettingsLoading } = useFetchSettings();
const { data: domainsData } = useFetchDomains(router);
+ const appSettings: SettingsType = appSettingsData?.settings || {};
+ const { scraper_type = '', available_scapers = [] } = appSettings;
+ const activeScraper = useMemo(() => available_scapers.find((scraper) => scraper.value === scraper_type), [scraper_type, available_scapers]);
const activDomain: DomainType|null = useMemo(() => {
let active:DomainType|null = null;
@@ -40,18 +41,9 @@ const SingleDomain: NextPage = () => {
const theDomains: DomainType[] = (domainsData && domainsData.domains) || [];
const theKeywords: KeywordType[] = keywordsData && keywordsData.keywords;
- useEffect(() => {
- // console.log('appSettings.settings: ', appSettings && appSettings.settings);
- if (appSettings && appSettings.settings && (!appSettings.settings.scraper_type || (appSettings.settings.scraper_type === 'none'))) {
- setNoScrapprtError(true);
- }
- }, [appSettings]);
-
- // console.log('Websites Data:', router, activDomain, theKeywords);
-
return (
- {noScrapprtError && (
+ {((!scraper_type || (scraper_type === 'none')) && !isAppSettingsLoading) && (
A Scrapper/Proxy has not been set up Yet. Open Settings to set it up and start using the app.
@@ -80,7 +72,7 @@ const SingleDomain: NextPage = () => {
keywords={theKeywords}
showAddModal={showAddKeywords}
setShowAddModal={setShowAddKeywords}
- isConsoleIntegrated={!!(appSettings && appSettings?.settings?.search_console_integrated) }
+ isConsoleIntegrated={!!(appSettings && appSettings.search_console_integrated) }
/>
@@ -98,6 +90,15 @@ const SingleDomain: NextPage = () => {
setShowSettings(false)} />
+
+ setShowAddKeywords(false)}
+ />
+
);
};
diff --git a/scrapers/services/searchapi.ts b/scrapers/services/searchapi.ts
index 44b7489..fd43d34 100644
--- a/scrapers/services/searchapi.ts
+++ b/scrapers/services/searchapi.ts
@@ -1,7 +1,16 @@
+import countries from '../../utils/countries';
+
+interface SearchApiResult {
+ title: string,
+ link: string,
+ position: number,
+ }
+
const searchapi:ScraperSettings = {
id: 'searchapi',
name: 'SearchApi.io',
website: 'searchapi.io',
+ allowsCity: true,
headers: (keyword, settings) => {
return {
'Content-Type': 'application/json',
@@ -9,7 +18,10 @@ const searchapi:ScraperSettings = {
};
},
scrapeURL: (keyword) => {
- return `https://www.searchapi.io/api/v1/search?engine=google&q=${encodeURI(keyword.keyword)}&num=100&gl=${keyword.country}&device=${keyword.device}`;
+ const country = keyword.country || 'US';
+ const countryName = countries[country][0];
+ const location = keyword.city && countryName ? `&location=${encodeURI(`${keyword.city},${countryName}`)}` : '';
+ return `https://www.searchapi.io/api/v1/search?engine=google&q=${encodeURI(keyword.keyword)}&num=100&gl=${country}&device=${keyword.device}${location}`;
},
resultObjectKey: 'organic_results',
serpExtractor: (content) => {
@@ -29,10 +41,4 @@ const searchapi:ScraperSettings = {
},
};
-interface SearchApiResult {
- title: string,
- link: string,
- position: number,
-}
-
export default searchapi;
diff --git a/scrapers/services/serpapi.ts b/scrapers/services/serpapi.ts
index 5af8c3d..2d09a85 100644
--- a/scrapers/services/serpapi.ts
+++ b/scrapers/services/serpapi.ts
@@ -1,3 +1,5 @@
+import countries from '../../utils/countries';
+
interface SerpApiResult {
title: string,
link: string,
@@ -8,6 +10,7 @@ const serpapi:ScraperSettings = {
id: 'serpapi',
name: 'SerpApi.com',
website: 'serpapi.com',
+ allowsCity: true,
headers: (keyword, settings) => {
return {
'Content-Type': 'application/json',
@@ -15,7 +18,9 @@ const serpapi:ScraperSettings = {
};
},
scrapeURL: (keyword, settings) => {
- return `https://serpapi.com/search?q=${encodeURI(keyword.keyword)}&num=100&gl=${keyword.country}&device=${keyword.device}&api_key=${settings.scaping_api}`;
+ const countryName = countries[keyword.country || 'US'][0];
+ const location = keyword.city && keyword.country ? `&location=${encodeURI(`${keyword.city},${countryName}`)}` : '';
+ return `https://serpapi.com/search?q=${encodeURI(keyword.keyword)}&num=100&gl=${keyword.country}&device=${keyword.device}${location}&api_key=${settings.scaping_api}`;
},
resultObjectKey: 'organic_results',
serpExtractor: (content) => {
diff --git a/scrapers/services/spaceserp.ts b/scrapers/services/spaceserp.ts
index ae28aa3..47dcc25 100644
--- a/scrapers/services/spaceserp.ts
+++ b/scrapers/services/spaceserp.ts
@@ -1,3 +1,5 @@
+import countries from '../../utils/countries';
+
interface SpaceSerpResult {
title: string,
link: string,
@@ -9,10 +11,14 @@ const spaceSerp:ScraperSettings = {
id: 'spaceSerp',
name: 'Space Serp',
website: 'spaceserp.com',
+ allowsCity: true,
scrapeURL: (keyword, settings, countryData) => {
const country = keyword.country || 'US';
+ const countryName = countries[country][0];
+ const location = keyword.city ? `&location=${encodeURI(`${keyword.city},${countryName}`)}` : '';
+ const device = keyword.device === 'mobile' ? '&device=mobile' : '';
const lang = countryData[country][2];
- return `https://api.spaceserp.com/google/search?apiKey=${settings.scaping_api}&q=${encodeURI(keyword.keyword)}&pageSize=100&gl=${country}&hl=${lang}${keyword.device === 'mobile' ? '&device=mobile' : ''}&resultBlocks=`;
+ return `https://api.spaceserp.com/google/search?apiKey=${settings.scaping_api}&q=${encodeURI(keyword.keyword)}&pageSize=100&gl=${country}&hl=${lang}${location}${device}&resultBlocks=`;
},
resultObjectKey: 'organic_results',
serpExtractor: (content) => {
diff --git a/types.d.ts b/types.d.ts
index 957c1d8..cb2993f 100644
--- a/types.d.ts
+++ b/types.d.ts
@@ -39,6 +39,7 @@ type KeywordType = {
lastUpdateError: {date: string, error: string, scraper: string} | false,
scData?: KeywordSCData,
uid?: string
+ city?: string
}
type KeywordLastResult = {
@@ -78,7 +79,7 @@ type SettingsType = {
smtp_username?: string,
smtp_password?: string,
search_console_integrated?: boolean,
- available_scapers?: Array,
+ available_scapers?: { label: string, value: string, allowsCity?: boolean }[],
scrape_interval?: string,
scrape_delay?: string,
scrape_retry?: boolean,
@@ -108,7 +109,8 @@ type KeywordAddPayload = {
device: string,
country: string,
domain: string,
- tags: string,
+ tags?: string,
+ city?:string
}
type SearchAnalyticsRawItem = {
@@ -177,11 +179,23 @@ type scraperExtractedItem = {
position: number,
}
interface ScraperSettings {
+ /** A Unique ID for the Scraper. eg: myScraper */
id:string,
+ /** The Name of the Scraper */
name:string,
+ /** The Website address of the Scraper */
website:string,
+ /** The result object's key that contains the results of the scraped data. For example,
+ * if your scraper API the data like this `{scraped:[item1,item2..]}` the resultObjectKey should be "scraped" */
resultObjectKey: string,
+ /** If the Scraper allows setting a perices location or allows city level scraping set this to true. */
+ allowsCity?: boolean,
+ /** Set your own custom HTTP header properties when making the scraper API request.
+ * The function should return an object that contains all the header properties you want to pass to API request's header.
+ * Example: `{'Cache-Control': 'max-age=0', 'Content-Type': 'application/json'}` */
headers?(keyword:KeywordType, settings: SettingsType): Object,
+ /** Construct the API URL for scraping the data through your Scraper's API */
scrapeURL?(keyword:KeywordType, settings:SettingsType, countries:countryData): string,
+ /** Custom function to extract the serp result from the scraped data. The extracted data should be @return {scraperExtractedItem[]} */
serpExtractor?(content:string): scraperExtractedItem[],
}