mirror of
https://github.com/towfiqi/serpbear
synced 2025-06-26 18:15:54 +00:00
feat: adds the ability to add url as a domain.
You can now track specific marketplace/social domain URLs. For example a reddit.com post, an amazon.com product, github repo etc. closes: #53, #90, #119
This commit is contained in:
parent
e2ecdef10e
commit
3c2a1b8a5b
@ -1,7 +1,7 @@
|
||||
import React, { useState } from 'react';
|
||||
import Modal from '../common/Modal';
|
||||
import { useAddDomain } from '../../services/domains';
|
||||
import { isValidDomain } from '../../utils/client/validators';
|
||||
import { isValidUrl } from '../../utils/client/validators';
|
||||
|
||||
type AddDomainProps = {
|
||||
domains: DomainType[],
|
||||
@ -16,24 +16,30 @@ const AddDomain = ({ closeModal, domains = [] }: AddDomainProps) => {
|
||||
const addDomain = () => {
|
||||
setNewDomainError('');
|
||||
const existingDomains = domains.map((d) => d.domain);
|
||||
const insertedDomains = newDomain.split('\n');
|
||||
const insertedURLs = newDomain.split('\n');
|
||||
const domainsTobeAdded:string[] = [];
|
||||
const invalidDomains:string[] = [];
|
||||
insertedDomains.forEach((dom) => {
|
||||
const domain = dom.trim();
|
||||
if (isValidDomain(domain)) {
|
||||
if (!existingDomains.includes(domain)) {
|
||||
domainsTobeAdded.push(domain);
|
||||
insertedURLs.forEach((url) => {
|
||||
const theURL = url.trim();
|
||||
if (isValidUrl(theURL)) {
|
||||
const domURL = new URL(theURL);
|
||||
const isDomain = domURL.pathname === '/';
|
||||
if (isDomain && !existingDomains.includes(domURL.host)) {
|
||||
domainsTobeAdded.push(domURL.host);
|
||||
}
|
||||
if (!isDomain && !existingDomains.includes(domURL.href)) {
|
||||
const cleanedURL = domURL.href.replace('https://', '').replace('http://', '').replace(/^\/+|\/+$/g, '');
|
||||
domainsTobeAdded.push(cleanedURL);
|
||||
}
|
||||
} else {
|
||||
invalidDomains.push(domain);
|
||||
invalidDomains.push(theURL);
|
||||
}
|
||||
});
|
||||
if (invalidDomains.length > 0) {
|
||||
setNewDomainError(`Please Insert Valid Domain names. Invalid Domains: ${invalidDomains.join(', ')}`);
|
||||
setNewDomainError(`Please Insert Valid Domain URL. Invalid URLs: ${invalidDomains.join(', ')}`);
|
||||
} else if (domainsTobeAdded.length > 0) {
|
||||
// TODO: Domain Action
|
||||
addMutate(domainsTobeAdded);
|
||||
console.log('domainsTobeAdded :', domainsTobeAdded);
|
||||
addMutate(domainsTobeAdded);
|
||||
}
|
||||
};
|
||||
|
||||
@ -45,11 +51,11 @@ const AddDomain = ({ closeModal, domains = [] }: AddDomainProps) => {
|
||||
return (
|
||||
<Modal closeModal={() => { closeModal(false); }} title={'Add New Domain'}>
|
||||
<div data-testid="adddomain_modal">
|
||||
<h4 className='text-sm mt-4'>Domain Names</h4>
|
||||
<h4 className='text-sm mt-4'>Domain URL</h4>
|
||||
<textarea
|
||||
className={`w-full h-40 border rounded border-gray-200 p-4 outline-none
|
||||
focus:border-indigo-300 ${newDomainError ? ' border-red-400 focus:border-red-400' : ''}`}
|
||||
placeholder="Type or Paste Domains here. Insert Each Domain in a New line."
|
||||
placeholder="Type or Paste URLs here. Insert Each URL in a New line."
|
||||
value={newDomain}
|
||||
autoFocus={true}
|
||||
onChange={handleDomainInput}>
|
||||
|
@ -38,7 +38,7 @@ const DomainItem = ({ domain, selected, isConsoleIntegrated = false, thumb, upda
|
||||
/>
|
||||
</div>
|
||||
<div className="domain_details flex-1">
|
||||
<h3 className='font-semibold text-base mb-2'>{domain.domain}</h3>
|
||||
<h3 className='font-semibold text-base mb-2 max-w-[200px] text-ellipsis overflow-hidden' title={domain.domain}>{domain.domain}</h3>
|
||||
{keywordsUpdated && (
|
||||
<span className=' text-gray-600 text-xs'>
|
||||
Updated <TimeAgo title={dayjs(keywordsUpdated).format('DD-MMM-YYYY, hh:mm:ss A')} date={keywordsUpdated} />
|
||||
|
@ -31,7 +31,7 @@ const SCKeywordsTable = ({ domain, keywords = [], isLoading = true, isConsoleInt
|
||||
const [filterParams, setFilterParams] = useState<KeywordFilters>({ countries: [], tags: [], search: '' });
|
||||
const [sortBy, setSortBy] = useState<string>('imp_desc');
|
||||
const [SCListHeight, setSCListHeight] = useState(500);
|
||||
const { keywordsData } = useFetchKeywords(router);
|
||||
const { keywordsData } = useFetchKeywords(router, domain?.domain || '');
|
||||
const addedkeywords: string[] = keywordsData?.keywords?.map((key: KeywordType) => `${key.keyword}:${key.country}:${key.device}`) || [];
|
||||
const { mutate: addKeywords } = useAddKeywords(() => { if (domain && domain.slug) router.push(`/domain/${domain.slug}`); });
|
||||
const [isMobile] = useIsMobile();
|
||||
|
@ -69,7 +69,7 @@ const addDomain = async (req: NextApiRequest, res: NextApiResponse<DomainsAddRes
|
||||
domains.forEach((domain: string) => {
|
||||
domainsToAdd.push({
|
||||
domain: domain.trim(),
|
||||
slug: domain.trim().replaceAll('-', '_').replaceAll('.', '-'),
|
||||
slug: domain.trim().replaceAll('-', '_').replaceAll('.', '-').replaceAll('/', '-'),
|
||||
lastUpdated: new Date().toJSON(),
|
||||
added: new Date().toJSON(),
|
||||
});
|
||||
|
@ -37,12 +37,14 @@ const getDomainSearchConsoleInsight = async (req: NextApiRequest, res: NextApiRe
|
||||
|
||||
// First try and read the Local SC Domain Data file.
|
||||
const localSCData = await readLocalSCData(domainname);
|
||||
const oldFetchedDate = localSCData.lastFetched;
|
||||
const fetchTimeDiff = new Date().getTime() - (oldFetchedDate ? new Date(oldFetchedDate as string).getTime() : 0);
|
||||
|
||||
if (localSCData && localSCData.stats && localSCData.stats.length && fetchTimeDiff <= 86400000) {
|
||||
const response = getInsightFromSCData(localSCData);
|
||||
return res.status(200).json({ data: response });
|
||||
if (localSCData) {
|
||||
const oldFetchedDate = localSCData.lastFetched;
|
||||
const fetchTimeDiff = new Date().getTime() - (oldFetchedDate ? new Date(oldFetchedDate as string).getTime() : 0);
|
||||
if (localSCData.stats && localSCData.stats.length && fetchTimeDiff <= 86400000) {
|
||||
const response = getInsightFromSCData(localSCData);
|
||||
return res.status(200).json({ data: response });
|
||||
}
|
||||
}
|
||||
|
||||
// If the Local SC Domain Data file does not exist, fetch from Googel Search Console.
|
||||
|
@ -45,7 +45,7 @@ const getKeywords = async (req: NextApiRequest, res: NextApiResponse<KeywordsGet
|
||||
if (!req.query.domain && typeof req.query.domain !== 'string') {
|
||||
return res.status(400).json({ error: 'Domain is Required!' });
|
||||
}
|
||||
const domain = (req.query.domain as string).replaceAll('-', '.').replaceAll('_', '-');
|
||||
const domain = (req.query.domain as string);
|
||||
const integratedSC = process.env.SEARCH_CONSOLE_PRIVATE_KEY && process.env.SEARCH_CONSOLE_CLIENT_EMAIL;
|
||||
const domainSCData = integratedSC ? await readLocalSCData(domain) : false;
|
||||
|
||||
|
@ -27,10 +27,6 @@ const SingleDomain: NextPage = () => {
|
||||
const [keywordSPollInterval, setKeywordSPollInterval] = useState<undefined|number>(undefined);
|
||||
const { data: appSettings } = useFetchSettings();
|
||||
const { data: domainsData } = useFetchDomains(router);
|
||||
const { keywordsData, keywordsLoading } = useFetchKeywords(router, setKeywordSPollInterval, keywordSPollInterval);
|
||||
|
||||
const theDomains: DomainType[] = (domainsData && domainsData.domains) || [];
|
||||
const theKeywords: KeywordType[] = keywordsData && keywordsData.keywords;
|
||||
|
||||
const activDomain: DomainType|null = useMemo(() => {
|
||||
let active:DomainType|null = null;
|
||||
@ -40,6 +36,10 @@ const SingleDomain: NextPage = () => {
|
||||
return active;
|
||||
}, [router.query.slug, domainsData]);
|
||||
|
||||
const { keywordsData, keywordsLoading } = useFetchKeywords(router, activDomain?.domain || '', setKeywordSPollInterval, keywordSPollInterval);
|
||||
const theDomains: DomainType[] = (domainsData && domainsData.domains) || [];
|
||||
const theKeywords: KeywordType[] = keywordsData && keywordsData.keywords;
|
||||
|
||||
useEffect(() => {
|
||||
// console.log('appSettings.settings: ', appSettings && appSettings.settings);
|
||||
if (appSettings && appSettings.settings && (!appSettings.settings.scraper_type || (appSettings.settings.scraper_type === 'none'))) {
|
||||
@ -47,7 +47,7 @@ const SingleDomain: NextPage = () => {
|
||||
}
|
||||
}, [appSettings]);
|
||||
|
||||
// console.log('Domains Data:', router, activDomain, theKeywords);
|
||||
// console.log('Websites Data:', router, activDomain, theKeywords);
|
||||
|
||||
return (
|
||||
<div className="Domain ">
|
||||
|
@ -2,16 +2,21 @@ import toast from 'react-hot-toast';
|
||||
import { NextRouter } from 'next/router';
|
||||
import { useMutation, useQuery, useQueryClient } from 'react-query';
|
||||
|
||||
export const fetchKeywords = async (router: NextRouter) => {
|
||||
if (!router.query.slug) { return []; }
|
||||
const res = await fetch(`${window.location.origin}/api/keywords?domain=${router.query.slug}`, { method: 'GET' });
|
||||
export const fetchKeywords = async (router: NextRouter, domain: string) => {
|
||||
if (!domain) { return []; }
|
||||
const res = await fetch(`${window.location.origin}/api/keywords?domain=${domain}`, { method: 'GET' });
|
||||
return res.json();
|
||||
};
|
||||
|
||||
export function useFetchKeywords(router: NextRouter, setKeywordSPollInterval?:Function, keywordSPollInterval:undefined|number = undefined) {
|
||||
export function useFetchKeywords(
|
||||
router: NextRouter,
|
||||
domain: string,
|
||||
setKeywordSPollInterval?:Function,
|
||||
keywordSPollInterval:undefined|number = undefined,
|
||||
) {
|
||||
const { data: keywordsData, isLoading: keywordsLoading, isError } = useQuery(
|
||||
['keywords', router.query.slug],
|
||||
() => fetchKeywords(router),
|
||||
['keywords', domain],
|
||||
() => fetchKeywords(router, domain),
|
||||
{
|
||||
refetchInterval: keywordSPollInterval,
|
||||
onSuccess: (data) => {
|
||||
|
@ -33,3 +33,14 @@ export const isValidDomain = (domain:string): boolean => {
|
||||
|
||||
return isValid;
|
||||
};
|
||||
|
||||
export const isValidUrl = (str: string) => {
|
||||
let url;
|
||||
|
||||
try {
|
||||
url = new URL(str);
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
return url.protocol === 'http:' || url.protocol === 'https:';
|
||||
};
|
||||
|
@ -187,15 +187,17 @@ export const extractScrapedResult = (content: string, device: string): SearchRes
|
||||
|
||||
/**
|
||||
* Find in the domain's position from the extracted search result.
|
||||
* @param {string} domain - Domain Name to look for.
|
||||
* @param {string} domainURL - URL Name to look for.
|
||||
* @param {SearchResult[]} result - The search result array extracted from the Google Search result.
|
||||
* @returns {SERPObject}
|
||||
*/
|
||||
export const getSerp = (domain:string, result:SearchResult[]) : SERPObject => {
|
||||
if (result.length === 0 || !domain) { return { postion: 0, url: '' }; }
|
||||
export const getSerp = (domainURL:string, result:SearchResult[]) : SERPObject => {
|
||||
if (result.length === 0 || !domainURL) { return { postion: 0, url: '' }; }
|
||||
const URLToFind = new URL(domainURL.includes('https://') ? domainURL : `https://${domainURL}`);
|
||||
const theURL = URLToFind.hostname + URLToFind.pathname;
|
||||
const foundItem = result.find((item) => {
|
||||
const itemDomain = item.url.replace('www.', '').match(/^(?:https?:)?(?:\/\/)?([^/?]+)/i);
|
||||
return itemDomain && itemDomain.includes(domain.replace('www.', ''));
|
||||
const itemURL = new URL(item.url.includes('https://') ? item.url : `https://${item.url}`);
|
||||
return theURL === itemURL.hostname + itemURL.pathname || `${theURL}/` === itemURL.hostname + itemURL.pathname;
|
||||
});
|
||||
return { postion: foundItem ? foundItem.position : 0, url: foundItem && foundItem.url ? foundItem.url : '' };
|
||||
};
|
||||
|
@ -139,9 +139,9 @@ export const integrateKeywordSCData = (keyword: KeywordType, SCData:SCDomainData
|
||||
const ctr:any = { yesterday: 0, threeDays: 0, sevenDays: 0, thirtyDays: 0, avgSevenDays: 0, avgThreeDays: 0, avgThirtyDays: 0 };
|
||||
const position:any = { yesterday: 0, threeDays: 0, sevenDays: 0, thirtyDays: 0, avgSevenDays: 0, avgThreeDays: 0, avgThirtyDays: 0 };
|
||||
|
||||
const threeDaysData = SCData.threeDays.find((item:SearchAnalyticsItem) => item.uid === kuid) || {};
|
||||
const SevenDaysData = SCData.sevenDays.find((item:SearchAnalyticsItem) => item.uid === kuid) || {};
|
||||
const ThirdyDaysData = SCData.thirtyDays.find((item:SearchAnalyticsItem) => item.uid === kuid) || {};
|
||||
const threeDaysData = SCData?.threeDays?.find((item:SearchAnalyticsItem) => item.uid === kuid) || {};
|
||||
const SevenDaysData = SCData?.sevenDays?.find((item:SearchAnalyticsItem) => item.uid === kuid) || {};
|
||||
const ThirdyDaysData = SCData?.thirtyDays?.find((item:SearchAnalyticsItem) => item.uid === kuid) || {};
|
||||
const totalData:any = { threeDays: threeDaysData, sevenDays: SevenDaysData, thirtyDays: ThirdyDaysData };
|
||||
|
||||
Object.keys(totalData).forEach((dataKey) => {
|
||||
@ -169,11 +169,15 @@ export const integrateKeywordSCData = (keyword: KeywordType, SCData:SCDomainData
|
||||
* @param {string} domain - The `domain` parameter is a string that represents the domain for which the SC data is being read.
|
||||
* @returns {Promise<SCDomainDataType>}
|
||||
*/
|
||||
export const readLocalSCData = async (domain:string): Promise<SCDomainDataType> => {
|
||||
const filePath = `${process.cwd()}/data/SC_${domain}.json`;
|
||||
const currentQueueRaw = await readFile(filePath, { encoding: 'utf-8' }).catch(async () => { await updateLocalSCData(domain); return '{}'; });
|
||||
const domainSCData = JSON.parse(currentQueueRaw);
|
||||
return domainSCData;
|
||||
export const readLocalSCData = async (domain:string): Promise<SCDomainDataType|false> => {
|
||||
try {
|
||||
const filePath = `${process.cwd()}/data/SC_${domain.replaceAll('/', '-')}.json`;
|
||||
const currentQueueRaw = await readFile(filePath, { encoding: 'utf-8' }).catch(async () => { await updateLocalSCData(domain); return '{}'; });
|
||||
const domainSCData = JSON.parse(currentQueueRaw);
|
||||
return domainSCData;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
@ -183,10 +187,14 @@ export const readLocalSCData = async (domain:string): Promise<SCDomainDataType>
|
||||
* @returns {Promise<SCDomainDataType|false>}
|
||||
*/
|
||||
export const updateLocalSCData = async (domain:string, scDomainData?:SCDomainDataType): Promise<SCDomainDataType|false> => {
|
||||
const filePath = `${process.cwd()}/data/SC_${domain}.json`;
|
||||
const emptyData:SCDomainDataType = { threeDays: [], sevenDays: [], thirtyDays: [], lastFetched: '', lastFetchError: '' };
|
||||
await writeFile(filePath, JSON.stringify(scDomainData || emptyData), { encoding: 'utf-8' }).catch((err) => { console.log(err); });
|
||||
return scDomainData || emptyData;
|
||||
try {
|
||||
const filePath = `${process.cwd()}/data/SC_${domain.replaceAll('/', '-')}.json`;
|
||||
const emptyData:SCDomainDataType = { threeDays: [], sevenDays: [], thirtyDays: [], lastFetched: '', lastFetchError: '' };
|
||||
await writeFile(filePath, JSON.stringify(scDomainData || emptyData), { encoding: 'utf-8' }).catch((err) => { console.log(err); });
|
||||
return scDomainData || emptyData;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
@ -195,7 +203,7 @@ export const updateLocalSCData = async (domain:string, scDomainData?:SCDomainDat
|
||||
* @returns {Promise<boolean>} - Returns true if file was removed, else returns false.
|
||||
*/
|
||||
export const removeLocalSCData = async (domain:string): Promise<boolean> => {
|
||||
const filePath = `${process.cwd()}/data/SC_${domain}.json`;
|
||||
const filePath = `${process.cwd()}/data/SC_${domain.replaceAll('/', '-')}.json`;
|
||||
try {
|
||||
await unlink(filePath);
|
||||
return true;
|
||||
|
Loading…
Reference in New Issue
Block a user