38 Commits

Author SHA1 Message Date
towfiqi
c870250fbd chore(release): 0.3.2 2023-11-09 20:35:49 +06:00
towfiqi
da92f11afa chore: resolves linter nag. 2023-11-09 20:35:22 +06:00
towfiqi
9b9b74af4c fix: Resolves issue with adding long tld emails
closes #127
2023-11-09 20:35:01 +06:00
Towfiq I
291aa60bbb Merge pull request #129 from SearchApi/feature/integrate-searchapi
feat: Integrates SearchAPI
fix: Resolves build issue due to missing jest types.
fix: Resolves keyword SERP fetch issue.
2023-11-09 20:01:02 +06:00
SebastjanPrachovskij
8a35e358e6 Integrate SearchApi to SerpBear
Remove unnecessary spacing

Fix keyword.id & authorization for searchapi
2023-11-09 14:50:11 +02:00
towfiqi
f164b287be chore(release): 0.3.1 2023-11-04 22:05:59 +06:00
towfiqi
97dd0b131b fix: Updates vulnerable dependencies 2023-11-04 22:05:10 +06:00
towfiqi
454454a422 fix: Removes dev files from docker volumes 2023-11-04 11:25:56 +06:00
towfiqi
4620f11c4b Fixed: Resolves missing Scrapers list on new install 2023-11-04 10:47:22 +06:00
towfiqi
7ab435ed8b chore(release): 0.3.0 2023-11-03 22:24:46 +06:00
towfiqi
9feff13f18 fix: Fxies special character keyword scrape issue.
closes #113 #122
2023-11-03 21:44:23 +06:00
towfiqi
f57bca23da fix: Resolves missing keyword scrape spinner issue 2023-11-03 21:30:00 +06:00
towfiqi
392122a710 fix: Fixes the weekly cron day issue.
closes #118
2023-11-03 21:05:28 +06:00
towfiqi
fc183d246d feat: Displays the Best position of the keywords
closes #89
2023-11-03 20:45:36 +06:00
towfiqi
994afbcedb chore: changes toggle button ui 2023-11-03 13:51:50 +06:00
towfiqi
d3d336fa71 feat: Remembers last selected coutry
closes #101
2023-11-03 12:47:29 +06:00
towfiqi
6f34d64fd5 chore: opens keyword detail view on chart click 2023-11-03 12:43:56 +06:00
towfiqi
a0014c7650 chore: breakup Settings component 2023-11-03 12:20:02 +06:00
towfiqi
dc3c7a722b feat: Adds ability to disable/clear retry queue for failed keywords 2023-11-03 11:59:51 +06:00
towfiqi
8a949ce4c0 fix: Cron stopped on failing to parse failed queue
closes #116
2023-11-03 11:58:17 +06:00
towfiqi
312d12f589 chore: fixes tag menu overlap issue. 2023-11-02 22:46:55 +06:00
towfiqi
be80ed7ef3 fix: Fixes import order error in some instances.
closes #114
2023-11-02 22:35:25 +06:00
towfiqi
4748ffc382 feat: Adds ability to search w/o case sensitivity
closes #115
2023-11-02 22:21:07 +06:00
towfiqi
c0470cfa9d fix: Fixes issue with adding hyphenated subdomains. 2023-11-02 22:15:26 +06:00
towfiqi
1d6b2be95a feat: Refresh All feature now shows update real-time 2023-11-02 21:51:06 +06:00
towfiqi
0d846b29f1 chore(release): 0.2.6 2023-03-29 21:01:13 +06:00
towfiqi
3b96dab9cc chore: Adds Space Serp Details. 2023-03-29 20:54:55 +06:00
towfiqi
0a83924ffe feat: Add option to Delay Between scrapes.
fixes #87
2023-03-29 20:54:30 +06:00
towfiqi
d9505158c4 fix: Fixes first Keryword Error cut off issue. 2023-03-29 20:11:56 +06:00
towfiqi
9757fde02e fix: Fixes lags when tracking thousands of keywords
fixes #88
2023-03-29 12:59:22 +06:00
towfiqi
0538a8c016 feat: Integrates Space Serp. 2023-03-29 12:14:28 +06:00
Towfiq I
cace34f39a Merge pull request #85 from Teeth-Talk/hotfix-typo
fix(components): fix typo "Goolge" -> "Google"
2023-03-29 10:49:04 +06:00
Martin Silha
dce7c412e8 fix(components): fix typo "Goolge" -> "Google" 2023-03-16 21:44:59 +00:00
towfiqi
e61dfb5b90 chore(release): 0.2.5 2023-03-07 11:08:42 +06:00
towfiqi
b9d58a721d fix: Settings Update Toast was not showing up. 2023-03-05 19:39:14 +06:00
towfiqi
b83df5f3db feat: Adds current App version Number in Footer. 2023-03-05 19:23:07 +06:00
towfiqi
3b6d034d6f feat: Adds Keyword Scraping Interval Settings.
fixes #81, #76
2023-03-05 12:28:21 +06:00
towfiqi
5dd366b91e fix: Fixes Broken Image thumbnail loading issue. 2023-03-05 12:14:08 +06:00
38 changed files with 4607 additions and 11926 deletions

View File

@@ -2,6 +2,71 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
### [0.3.2](https://github.com/towfiqi/serpbear/compare/v0.3.1...v0.3.2) (2023-11-09)
### Bug Fixes
* Resolves issue with adding long tld emails ([9b9b74a](https://github.com/towfiqi/serpbear/commit/9b9b74af4c249e27458d29ba052e96ab2db8b640)), closes [#127](https://github.com/towfiqi/serpbear/issues/127)
### [0.3.1](https://github.com/towfiqi/serpbear/compare/v0.3.0...v0.3.1) (2023-11-04)
### Bug Fixes
* Removes dev files from docker volumes ([454454a](https://github.com/towfiqi/serpbear/commit/454454a422bab4d37a2d43ad95868e293a97b88e))
* Updates vulnerable dependencies ([97dd0b1](https://github.com/towfiqi/serpbear/commit/97dd0b131be4cec73d07f35062334dd1881f0013))
## [0.3.0](https://github.com/towfiqi/serpbear/compare/v0.2.6...v0.3.0) (2023-11-03)
### Features
* Adds ability to disable/clear retry queue for failed keywords ([dc3c7a7](https://github.com/towfiqi/serpbear/commit/dc3c7a722b18248115969c51f2495ccf1c43926d))
* Adds ability to search w/o case sensitivity ([4748ffc](https://github.com/towfiqi/serpbear/commit/4748ffc382161c5d861b8d43e8eba466a031e2bc)), closes [#115](https://github.com/towfiqi/serpbear/issues/115)
* Displays the Best position of the keywords ([fc183d2](https://github.com/towfiqi/serpbear/commit/fc183d246d55e0eecf43c91f6da8a59192e8e771)), closes [#89](https://github.com/towfiqi/serpbear/issues/89)
* Refresh All feature now shows update real-time ([1d6b2be](https://github.com/towfiqi/serpbear/commit/1d6b2be95aa133b7998f5cf098f15aa32f5badd2))
* Remembers last selected coutry ([d3d336f](https://github.com/towfiqi/serpbear/commit/d3d336fa71cc789624b10f3cdd1a2b5983053e6f)), closes [#101](https://github.com/towfiqi/serpbear/issues/101)
### Bug Fixes
* Resolves missing keyword scrape spinner issue ([f57bca2](https://github.com/towfiqi/serpbear/commit/f57bca23daa3fe888af4c19a681dcec6b6100d83))
* Cron stopped on failing to parse failed queue ([8a949ce](https://github.com/towfiqi/serpbear/commit/8a949ce4c078ff377e91a95c4b86ef2b15dae88b)), closes [#116](https://github.com/towfiqi/serpbear/issues/116)
* Fixes import order error in some instances. ([be80ed7](https://github.com/towfiqi/serpbear/commit/be80ed7ef3dd0a315c5ad67d17e61a4797dc274c)), closes [#114](https://github.com/towfiqi/serpbear/issues/114)
* Fixes issue with adding hyphenated subdomains. ([c0470cf](https://github.com/towfiqi/serpbear/commit/c0470cfa9d0dac86317c886065b461cfe82ffb16))
* Fixes the weekly cron day issue. ([392122a](https://github.com/towfiqi/serpbear/commit/392122a7101683342830e900c6f0c39f9272bb34)), closes [#118](https://github.com/towfiqi/serpbear/issues/118)
* Fxies special character keyword scrape issue. ([9feff13](https://github.com/towfiqi/serpbear/commit/9feff13f18a4d72203dde694a147831f990b37fb)), closes [#113](https://github.com/towfiqi/serpbear/issues/113) [#122](https://github.com/towfiqi/serpbear/issues/122)
### [0.2.6](https://github.com/towfiqi/serpbear/compare/v0.2.5...v0.2.6) (2023-03-29)
### Features
* Add option to Delay Between scrapes. ([0a83924](https://github.com/towfiqi/serpbear/commit/0a83924ffe2243c52849c167c6c15d9688ff1dc7)), closes [#87](https://github.com/towfiqi/serpbear/issues/87)
* Integrates Space Serp. ([0538a8c](https://github.com/towfiqi/serpbear/commit/0538a8c01601d2f6365848580591a248528e67c7))
### Bug Fixes
* **components:** fix typo "Goolge" -> "Google" ([dce7c41](https://github.com/towfiqi/serpbear/commit/dce7c412e813fc845973f36ad1c9fa91df4a6611))
* Fixes first Keryword Error cut off issue. ([d950515](https://github.com/towfiqi/serpbear/commit/d9505158c439a924a1c86eb8243faf2a15bed43e))
* Fixes lags when tracking thousands of keywords ([9757fde](https://github.com/towfiqi/serpbear/commit/9757fde02ec83405546733381104c54ed6510681)), closes [#88](https://github.com/towfiqi/serpbear/issues/88)
### [0.2.5](https://github.com/towfiqi/serpbear/compare/v0.2.4...v0.2.5) (2023-03-07)
### Features
* Adds current App version Number in Footer. ([b83df5f](https://github.com/towfiqi/serpbear/commit/b83df5f3dbd64db657d31f0526438e7165e1b475))
* Adds Keyword Scraping Interval Settings. ([3b6d034](https://github.com/towfiqi/serpbear/commit/3b6d034d6f7da0b4259070220fffff44184dd680)), closes [#81](https://github.com/towfiqi/serpbear/issues/81) [#76](https://github.com/towfiqi/serpbear/issues/76)
### Bug Fixes
* Fixes Broken Image thumbnail loading issue. ([5dd366b](https://github.com/towfiqi/serpbear/commit/5dd366b91e2a94e658bf5250a8a0fa64c09e1c11))
* Settings Update Toast was not showing up. ([b9d58a7](https://github.com/towfiqi/serpbear/commit/b9d58a721df12f3f34220a3ae5da6897e23c83ec))
### [0.2.4](https://github.com/towfiqi/serpbear/compare/v0.2.3...v0.2.4) (2023-02-15)

View File

@@ -10,6 +10,8 @@ COPY . .
FROM node:lts-alpine AS builder
WORKDIR /app
COPY --from=deps /app ./
RUN rm -rf /app/data
RUN rm -rf /app/__test__
RUN npm run build

View File

@@ -18,7 +18,7 @@ SerpBear is an Open Source Search Engine Position Tracking App. It allows you to
- **Zero Cost to RUN:** Run the App on mogenius.com or Fly.io for free.
#### How it Works
The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SerpApi or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. Also, When you connect your Googel Search Console account, the app shows actual search visits for each tracked keywords. You can also discover new keywords, and find the most performing keywords, countries, pages.
The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SearchApi, SerpApi or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. Also, When you connect your Googel Search Console account, the app shows actual search visits for each tracked keywords. You can also discover new keywords, and find the most performing keywords, countries, pages.
#### Getting Started
- **Step 1:** Deploy & Run the App.
@@ -40,6 +40,8 @@ The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SerpA
| whatsmyserp.com | $49/mo| 30,000/mo| No |
| serply.io | $49/mo | 5000/mo | Yes |
| serpapi.com | From $50/mo** | From 5,000/mo** | Yes |
| spaceserp.com | $59/lifetime | 15,000/mo | Yes |
| SearchApi.io | From $40/mo | From 10,000/mo | Yes |
(*) Free upto a limit. If you are using ScrapingAnt you can lookup 10,000 times per month for free.
(**) Free up to 100 per month. Paid from 5,000 to 10,000,000+ per month.

View File

@@ -35,7 +35,7 @@ const ChartSlim = ({ labels, sreies }:ChartProps) => {
},
};
return <div className='w-[120px] h-[30px] rounded border border-gray-200'>
return <div className='w-[100px] h-[30px] rounded border border-gray-200'>
<Line
datasetIdKey='XXX'
options={options}

View File

@@ -1,6 +1,7 @@
import React, { useState } from 'react';
import Modal from '../common/Modal';
import { useAddDomain } from '../../services/domains';
import { isValidDomain } from '../../utils/validators';
type AddDomainProps = {
closeModal: Function
@@ -13,7 +14,7 @@ const AddDomain = ({ closeModal }: AddDomainProps) => {
const addDomain = () => {
// console.log('ADD NEW DOMAIN', newDomain);
if (/^[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9](?:\.[a-zA-Z]{2,})+$/.test(newDomain.trim())) {
if (isValidDomain(newDomain.trim())) {
setNewDomainError(false);
// TODO: Domain Action
addMutate(newDomain.trim());

View File

@@ -9,10 +9,11 @@ import Icon from '../common/Icon';
type DomainItemProps = {
domain: DomainType,
selected: boolean,
isConsoleIntegrated: boolean
isConsoleIntegrated: boolean,
thumb: string,
}
const DomainItem = ({ domain, selected, isConsoleIntegrated = false }: DomainItemProps) => {
const DomainItem = ({ domain, selected, isConsoleIntegrated = false, thumb }: DomainItemProps) => {
const { keywordsUpdated, slug, keywordCount = 0, avgPosition = 0, scVisits = 0, scImpressions = 0, scPosition = 0 } = domain;
// const router = useRouter();
return (
@@ -21,10 +22,10 @@ const DomainItem = ({ domain, selected, isConsoleIntegrated = false }: DomainIte
<a className='flex flex-col lg:flex-row'>
<div className={`flex-1 p-6 flex ${!isConsoleIntegrated ? 'basis-1/3' : ''}`}>
<div className="domain_thumb w-20 h-20 mr-6 bg-slate-100 rounded border border-gray-200 overflow-hidden">
<img src={`https://image.thum.io/get/maxAge/96/width/200/https://${domain.domain}`} alt={domain.domain} />
{thumb && <img src={thumb} alt={domain.domain} />}
</div>
<div className="domain_details flex-1">
<h3 className='font-semibold text-base mb-2 capitalize'>{domain.domain}</h3>
<h3 className='font-semibold text-base mb-2'>{domain.domain}</h3>
{keywordsUpdated && (
<span className=' text-gray-600 text-xs'>
Updated <TimeAgo title={dayjs(keywordsUpdated).format('DD-MMM-YYYY, hh:mm:ss A')} date={keywordsUpdated} />

View File

@@ -41,7 +41,7 @@ const DomainSettings = ({ domain, closeModal }: DomainSettingsProps) => {
let error: DomainSettingsError | null = null;
if (domainSettings.notification_emails) {
const notification_emails = domainSettings.notification_emails.split(',');
const invalidEmails = notification_emails.find((x) => /^\w+([.-]?\w+)*@\w+([.-]?\w+)*(\.\w{2,3})+$/.test(x) === false);
const invalidEmails = notification_emails.find((x) => /^\w+([.-]?\w+)*@\w+([.-]?\w+)*(\.\w{2,15})+$/.test(x) === false);
console.log('invalidEmails: ', invalidEmails);
if (invalidEmails) {
error = { type: 'email', msg: 'Invalid Email' };

View File

@@ -117,7 +117,7 @@ const SCInsight = ({ insight, isLoading = true, isConsoleIntegrated = true }: SC
)}
{!isConsoleIntegrated && (
<p className=' p-9 pt-[10%] text-center text-gray-500'>
Goolge Search has not been Integrated yet. Please follow <a className='text-indigo-600 underline' href='https://docs.serpbear.com/miscellaneous/integrate-google-search-console' target="_blank" rel='noreferrer'>These Steps</a> to integrate Google Search Data for this Domain.
Google Search has not been Integrated yet. Please follow <a className='text-indigo-600 underline' href='https://docs.serpbear.com/miscellaneous/integrate-google-search-console' target="_blank" rel='noreferrer'>These Steps</a> to integrate Google Search Data for this Domain.
</p>
)}
</div>

View File

@@ -21,7 +21,8 @@ type KeywordsInput = {
const AddKeywords = ({ closeModal, domain, keywords }: AddKeywordsProps) => {
const [error, setError] = useState<string>('');
const [newKeywordsData, setNewKeywordsData] = useState<KeywordsInput>({ keywords: '', device: 'desktop', country: 'US', domain, tags: '' });
const defCountry = localStorage.getItem('default_country') || 'US';
const [newKeywordsData, setNewKeywordsData] = useState<KeywordsInput>({ keywords: '', device: 'desktop', country: defCountry, domain, tags: '' });
const { mutate: addMutate, isLoading: isAdding } = useAddKeywords(() => closeModal(false));
const deviceTabStyle = 'cursor-pointer px-3 py-2 rounded mr-2';
@@ -64,7 +65,10 @@ const AddKeywords = ({ closeModal, domain, keywords }: AddKeywordsProps) => {
selected={[newKeywordsData.country]}
options={Object.keys(countries).map((countryISO:string) => { return { label: countries[countryISO][0], value: countryISO }; })}
defaultLabel='All Countries'
updateField={(updated:string[]) => setNewKeywordsData({ ...newKeywordsData, country: updated[0] })}
updateField={(updated:string[]) => {
setNewKeywordsData({ ...newKeywordsData, country: updated[0] });
localStorage.setItem('default_country', updated[0]);
}}
rounded='rounded'
maxHeight={48}
flags={true}

View File

@@ -9,6 +9,7 @@ import { generateTheChartData } from '../common/generateChartData';
type KeywordProps = {
keywordData: KeywordType,
selected: boolean,
index: number,
refreshkeyword: Function,
favoriteKeyword: Function,
removeKeyword: Function,
@@ -18,6 +19,7 @@ type KeywordProps = {
lastItem?:boolean,
showSCData: boolean,
scDataType: string,
style: Object
}
const Keyword = (props: KeywordProps) => {
@@ -32,6 +34,8 @@ const Keyword = (props: KeywordProps) => {
manageTags,
lastItem,
showSCData = true,
style,
index,
scDataType = 'threeDays',
} = props;
const {
@@ -63,10 +67,23 @@ const Keyword = (props: KeywordProps) => {
return status;
}, [history, position]);
const bestPosition: false | {position: number, date: string} = useMemo(() => {
let bestPos;
if (Object.keys(history).length > 0) {
const historyArray = Object.keys(history).map((itemID) => ({ date: itemID, position: history[itemID] }))
.sort((a, b) => a.position - b.position);
if (historyArray[0]) {
bestPos = { ...historyArray[0] };
}
}
return bestPos || false;
}, [history]);
const optionsButtonStyle = 'block px-2 py-2 cursor-pointer hover:bg-indigo-50 hover:text-blue-700';
const renderPosition = (pos:number, type?:string) => {
if (pos === 0) {
if (!updating && pos === 0) {
return <span className='text-gray-400' title='Not in Top 100'>{'>100'}</span>;
}
if (updating && type !== 'sc') {
@@ -78,8 +95,10 @@ const Keyword = (props: KeywordProps) => {
return (
<div
key={keyword}
style={style}
className={`keyword relative py-5 px-4 text-gray-600 border-b-[1px] border-gray-200 lg:py-4 lg:px-6 lg:border-0
lg:flex lg:justify-between lg:items-center ${selected ? ' bg-indigo-50 keyword--selected' : ''} ${lastItem ? 'border-b-0' : ''}`}>
<div className=' w-3/4 lg:flex-1 lg:basis-20 lg:w-auto font-semibold cursor-pointer'>
<button
className={`p-0 mr-2 leading-[0px] inline-block rounded-sm pt-0 px-[1px] pb-[3px] border
@@ -100,18 +119,32 @@ const Keyword = (props: KeywordProps) => {
</button>
}
</div>
<div
className={`keyword_position absolute bg-[#f8f9ff] w-fit min-w-[50px] h-12 p-2 text-base mt-[-20px] rounded right-5 lg:relative
lg:bg-transparent lg:w-auto lg:h-auto lg:mt-0 lg:p-0 lg:text-sm lg:flex-1 lg:basis-40 lg:grow-0 lg:right-0 text-center font-semibold`}>
lg:bg-transparent lg:w-auto lg:h-auto lg:mt-0 lg:p-0 lg:text-sm lg:flex-1 lg:basis-24 lg:grow-0 lg:right-0 text-center font-semibold`}>
{renderPosition(position)}
{!updating && positionChange > 0 && <i className=' not-italic ml-1 text-xs text-[#5ed7c3]'> {positionChange}</i>}
{!updating && positionChange < 0 && <i className=' not-italic ml-1 text-xs text-red-300'> {positionChange}</i>}
</div>
<div
title={bestPosition && bestPosition.date
? new Date(bestPosition.date).toLocaleDateString('en-US', { weekday: 'long', year: 'numeric', month: 'short', day: 'numeric' }) : ''
}
className={`keyword_best hidden bg-[#f8f9ff] w-fit min-w-[50px] h-12 p-2 text-base mt-[-20px] rounded right-5 lg:relative lg:block
lg:bg-transparent lg:w-auto lg:h-auto lg:mt-0 lg:p-0 lg:text-sm lg:flex-1 lg:basis-16 lg:grow-0 lg:right-0 text-center font-semibold`}>
{bestPosition ? bestPosition.position || '-' : (position || '-')}
</div>
{chartData.labels.length > 0 && (
<div className='lg:flex-1 hidden lg:block'>
<div
className='hidden basis-32 grow-0 cursor-pointer lg:block'
onClick={() => showKeywordDetails()}>
<ChartSlim labels={chartData.labels} sreies={chartData.sreies} />
</div>
)}
<div
className={`keyword_url inline-block mt-4 mr-5 ml-5 lg:flex-1 text-gray-400 lg:m-0 max-w-[70px]
overflow-hidden text-ellipsis whitespace-nowrap lg:max-w-none lg:pr-5`}>
@@ -119,6 +152,7 @@ const Keyword = (props: KeywordProps) => {
<Icon type="link-alt" size={14} color="#999" /></span>{turncatedURL || '-'}
</a>
</div>
<div
className='inline-block mt-[4] top-[-5px] relative lg:flex-1 lg:m-0'>
<span className='mr-2 lg:hidden'><Icon type="clock" size={14} color="#999" /></span>
@@ -171,8 +205,10 @@ const Keyword = (props: KeywordProps) => {
</ul>
)}
</div>
{lastUpdateError && lastUpdateError.date && showPositionError && (
<div className=' absolute mt-[-70px] p-2 bg-white z-30 border border-red-200 rounded w-[220px] left-4 shadow-sm text-xs lg:bottom-12'>
<div className={`absolute p-2 bg-white z-30 border border-red-200 rounded w-[220px] left-4 shadow-sm text-xs
${index > 2 ? 'lg:bottom-12 mt-[-70px]' : ' top-12'}`}>
Error Updating Keyword position (Tried <TimeAgo
title={dayjs(lastUpdateError.date).format('DD-MMM-YYYY, hh:mm:ss A')}
date={lastUpdateError.date} />)
@@ -183,7 +219,8 @@ const Keyword = (props: KeywordProps) => {
{lastUpdateError.scraper && <strong className='capitalize'>{lastUpdateError.scraper}: </strong>}{lastUpdateError.error}
</div>
</div>
)}
)}
</div>
);
};

View File

@@ -1,6 +1,7 @@
import React, { useState, useMemo } from 'react';
import React, { useState, useMemo, useEffect } from 'react';
import { Toaster } from 'react-hot-toast';
import { CSSTransition } from 'react-transition-group';
import { FixedSizeList as List, ListChildComponentProps } from 'react-window';
import AddKeywords from './AddKeywords';
import { filterKeywords, keywordsByDevice, sortKeywords } from '../../utils/sortFilter';
import Icon from '../common/Icon';
@@ -30,6 +31,8 @@ const KeywordsTable = (props: KeywordsTableProps) => {
const [showRemoveModal, setShowRemoveModal] = useState<boolean>(false);
const [showTagManager, setShowTagManager] = useState<null|number>(null);
const [showAddTags, setShowAddTags] = useState<boolean>(false);
const [isMobile, setIsMobile] = useState<boolean>(false);
const [SCListHeight, setSCListHeight] = useState(500);
const [filterParams, setFilterParams] = useState<KeywordFilters>({ countries: [], tags: [], search: '' });
const [sortBy, setSortBy] = useState<string>('date_asc');
const [scDataType, setScDataType] = useState<string>('threeDays');
@@ -47,6 +50,16 @@ const KeywordsTable = (props: KeywordsTableProps) => {
avgThirtyDays: 'Last Thirty Days Avg',
};
useEffect(() => {
setIsMobile(!!(window.matchMedia('only screen and (max-width: 760px)').matches));
const resizeList = () => setSCListHeight(window.innerHeight - (isMobile ? 200 : 400));
resizeList();
window.addEventListener('resize', resizeList);
return () => {
window.removeEventListener('resize', resizeList);
};
}, [isMobile]);
const processedKeywords: {[key:string] : KeywordType[]} = useMemo(() => {
const procKeywords = keywords.filter((x) => x.device === device);
const filteredKeywords = filterKeywords(procKeywords, filterParams);
@@ -67,6 +80,27 @@ const KeywordsTable = (props: KeywordsTableProps) => {
}
setSelectedKeywords(updatedSelectd);
};
const Row = ({ data, index, style }:ListChildComponentProps) => {
const keyword = data[index];
return (
<Keyword
key={keyword.ID}
style={style}
index={index}
selected={selectedKeywords.includes(keyword.ID)}
selectKeyword={selectKeyword}
keywordData={keyword}
refreshkeyword={() => refreshMutate({ ids: [keyword.ID] })}
favoriteKeyword={favoriteMutate}
manageTags={() => setShowTagManager(keyword.ID)}
removeKeyword={() => { setSelectedKeywords([keyword.ID]); setShowRemoveModal(true); }}
showKeywordDetails={() => setShowKeyDetails(keyword)}
lastItem={index === (processedKeywords[device].length - 1)}
showSCData={showSCData}
scDataType={scDataType}
/>
);
};
const selectedAllItems = selectedKeywords.length === processedKeywords[device].length;
@@ -119,7 +153,7 @@ const KeywordsTable = (props: KeywordsTableProps) => {
<div className=' lg:min-w-[800px]'>
<div className={`domKeywords_head domKeywords_head--${sortBy} hidden lg:flex p-3 px-6 bg-[#FCFCFF]
text-gray-600 justify-between items-center font-semibold border-y`}>
<span className='domKeywords_head_keyword flex-1 basis-20 w-auto '>
<span className='domKeywords_head_keyword flex-1 basis-[4rem] w-auto '>
{processedKeywords[device].length > 0 && (
<button
className={`p-0 mr-2 leading-[0px] inline-block rounded-sm pt-0 px-[1px] pb-[3px] border border-slate-300
@@ -131,8 +165,9 @@ const KeywordsTable = (props: KeywordsTableProps) => {
)}
Keyword
</span>
<span className='domKeywords_head_position flex-1 basis-40 grow-0 text-center'>Position</span>
<span className='domKeywords_head_history flex-1'>History (7d)</span>
<span className='domKeywords_head_position flex-1 basis-24 grow-0 text-center'>Position</span>
<span className='domKeywords_head_best flex-1 basis-16 grow-0 text-center'>Best</span>
<span className='domKeywords_head_history flex-1 basis-32 grow-0 '>History (7d)</span>
<span className='domKeywords_head_url flex-1'>URL</span>
<span className='domKeywords_head_updated flex-1'>Updated</span>
{showSCData && (
@@ -141,7 +176,7 @@ const KeywordsTable = (props: KeywordsTableProps) => {
<div>
<div
className=' w-48 select-none cursor-pointer absolute bg-white rounded-full
px-2 py-[2px] mt-[-22px] ml-3 border border-gray-200 z-50'
px-2 py-[2px] mt-[-22px] ml-3 border border-gray-200 z-40'
onClick={() => setShowScDataTypes(!showScDataTypes)}>
<Icon type="google" size={13} /> {scDataObject[scDataType]}
<Icon classes="ml-2" type={showScDataTypes ? 'caret-up' : 'caret-down'} size={10} />
@@ -170,21 +205,19 @@ const KeywordsTable = (props: KeywordsTableProps) => {
)}
</div>
<div className='domKeywords_keywords border-gray-200 min-h-[55vh] relative'>
{processedKeywords[device] && processedKeywords[device].length > 0
&& processedKeywords[device].map((keyword, index) => <Keyword
key={keyword.ID}
selected={selectedKeywords.includes(keyword.ID)}
selectKeyword={selectKeyword}
keywordData={keyword}
refreshkeyword={() => refreshMutate({ ids: [keyword.ID] })}
favoriteKeyword={favoriteMutate}
manageTags={() => setShowTagManager(keyword.ID)}
removeKeyword={() => { setSelectedKeywords([keyword.ID]); setShowRemoveModal(true); }}
showKeywordDetails={() => setShowKeyDetails(keyword)}
lastItem={index === (processedKeywords[device].length - 1)}
showSCData={showSCData}
scDataType={scDataType}
/>)}
{processedKeywords[device] && processedKeywords[device].length > 0 && (
<List
innerElementType="div"
itemData={processedKeywords[device]}
itemCount={processedKeywords[device].length}
itemSize={isMobile ? 146 : 57}
height={SCListHeight}
width={'100%'}
className={'styled-scrollbar'}
>
{Row}
</List>
)}
{!isLoading && processedKeywords[device].length === 0 && (
<p className=' p-9 pt-[10%] text-center text-gray-500'>No Keywords Added for this Device Type.</p>
)}

View File

@@ -214,7 +214,7 @@ const SCKeywordsTable = ({ domain, keywords = [], isLoading = true, isConsoleInt
)}
{!isConsoleIntegrated && (
<p className=' p-9 pt-[10%] text-center text-gray-500'>
Goolge Search has not been Integrated yet. Please follow <a className='text-indigo-600 underline' href='https://docs.serpbear.com/miscellaneous/integrate-google-search-console' target="_blank" rel='noreferrer'>These Steps</a> to integrate Google Search Data for this Domain.
Google Search has not been Integrated yet. Please follow <a className='text-indigo-600 underline' href='https://docs.serpbear.com/miscellaneous/integrate-google-search-console' target="_blank" rel='noreferrer'>These Steps</a> to integrate Google Search Data for this Domain.
</p>
)}
</div>

View File

@@ -0,0 +1,112 @@
import React from 'react';
import SelectField from '../common/SelectField';
type NotificationSettingsProps = {
settings: SettingsType,
settingsError: null | {
type: string,
msg: string
},
updateSettings: Function,
}
const NotificationSettings = ({ settings, settingsError, updateSettings }:NotificationSettingsProps) => {
const labelStyle = 'mb-2 font-semibold inline-block text-sm text-gray-700 capitalize';
return (
<div>
<div className='settings__content styled-scrollbar p-6 text-sm'>
<div className="settings__section__input mb-5">
<label className={labelStyle}>Notification Frequency</label>
<SelectField
multiple={false}
selected={[settings.notification_interval]}
options={[
{ label: 'Daily', value: 'daily' },
{ label: 'Weekly', value: 'weekly' },
{ label: 'Monthly', value: 'monthly' },
{ label: 'Never', value: 'never' },
]}
defaultLabel={'Notification Settings'}
updateField={(updated:string[]) => updated[0] && updateSettings('notification_interval', updated[0])}
rounded='rounded'
maxHeight={48}
minWidth={270}
/>
</div>
{settings.notification_interval !== 'never' && (
<>
<div className="settings__section__input mb-5">
<label className={labelStyle}>Notification Emails</label>
<input
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
${settingsError?.type === 'no_email' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.notification_email}
placeholder={'test@gmail.com'}
onChange={(event) => updateSettings('notification_email', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>SMTP Server</label>
<input
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
${settingsError?.type === 'no_smtp_server' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.smtp_server || ''}
onChange={(event) => updateSettings('smtp_server', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>SMTP Port</label>
<input
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
${settingsError && settingsError.type === 'no_smtp_port' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.smtp_port || ''}
onChange={(event) => updateSettings('smtp_port', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>SMTP Username</label>
<input
className={'w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200'}
type="text"
value={settings?.smtp_username || ''}
onChange={(event) => updateSettings('smtp_username', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>SMTP Password</label>
<input
className={'w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200'}
type="text"
value={settings?.smtp_password || ''}
onChange={(event) => updateSettings('smtp_password', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>From Email Address</label>
<input
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
${settingsError?.type === 'no_smtp_from' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.notification_email_from || ''}
placeholder="no-reply@mydomain.com"
onChange={(event) => updateSettings('notification_email_from', event.target.value)}
/>
</div>
</>
)}
</div>
{settingsError?.msg && (
<div className='absolute w-full bottom-16 text-center p-3 bg-red-100 text-red-600 text-sm font-semibold'>
{settingsError.msg}
</div>
)}
</div>
);
};
export default NotificationSettings;

View File

@@ -0,0 +1,149 @@
import React from 'react';
import { useClearFailedQueue } from '../../services/settings';
import Icon from '../common/Icon';
import SelectField, { SelectionOption } from '../common/SelectField';
type ScraperSettingsProps = {
settings: SettingsType,
settingsError: null | {
type: string,
msg: string
},
updateSettings: Function,
}
const ScraperSettings = ({ settings, settingsError, updateSettings }:ScraperSettingsProps) => {
const { mutate: clearFailedMutate, isLoading: clearingQueue } = useClearFailedQueue(() => {});
const scrapingOptions: SelectionOption[] = [
{ label: 'Daily', value: 'daily' },
{ label: 'Every Other Day', value: 'other_day' },
{ label: 'Weekly', value: 'weekly' },
{ label: 'Monthly', value: 'monthly' },
{ label: 'Never', value: 'never' },
];
const delayOptions: SelectionOption[] = [
{ label: 'No Delay', value: '0' },
{ label: '5 Seconds', value: '5000' },
{ label: '10 Seconds', value: '10000' },
{ label: '30 Seconds', value: '30000' },
{ label: '1 Minutes', value: '60000' },
{ label: '2 Minutes', value: '120000' },
{ label: '5 Minutes', value: '300000' },
{ label: '10 Minutes', value: '600000' },
{ label: '15 Minutes', value: '900000' },
{ label: '30 Minutes', value: '1800000' },
];
const allScrapers: SelectionOption[] = settings.available_scapers ? settings.available_scapers : [];
const scraperOptions: SelectionOption[] = [{ label: 'None', value: 'none' }, ...allScrapers];
const labelStyle = 'mb-2 font-semibold inline-block text-sm text-gray-700 capitalize';
return (
<div>
<div className='settings__content styled-scrollbar p-6 text-sm'>
<div className="settings__section__select mb-5">
<label className={labelStyle}>Scraping Method</label>
<SelectField
options={scraperOptions}
selected={[settings.scraper_type || 'none']}
defaultLabel="Select Scraper"
updateField={(updatedTime:[string]) => updateSettings('scraper_type', updatedTime[0])}
multiple={false}
rounded={'rounded'}
minWidth={270}
/>
</div>
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi', 'spaceSerp', 'searchapi'].includes(settings.scraper_type) && (
<div className="settings__section__input mr-3">
<label className={labelStyle}>Scraper API Key or Token</label>
<input
className={`w-full p-2 border border-gray-200 rounded mt-2 mb-3 focus:outline-none focus:border-blue-200
${settingsError?.type === 'no_api_key' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.scaping_api || ''}
placeholder={'API Key/Token'}
onChange={(event) => updateSettings('scaping_api', event.target.value)}
/>
</div>
)}
{settings.scraper_type === 'proxy' && (
<div className="settings__section__input mb-5">
<label className={labelStyle}>Proxy List</label>
<textarea
className={`w-full p-2 border border-gray-200 rounded mb-3 text-xs
focus:outline-none min-h-[160px] focus:border-blue-200
${settingsError?.type === 'no_email' ? ' border-red-400 focus:border-red-400' : ''} `}
value={settings?.proxy}
placeholder={'http://122.123.22.45:5049\nhttps://user:password@122.123.22.45:5049'}
onChange={(event) => updateSettings('proxy', event.target.value)}
/>
</div>
)}
{settings.scraper_type !== 'none' && (
<div className="settings__section__input mb-5">
<label className={labelStyle}>Scraping Frequency</label>
<SelectField
multiple={false}
selected={[settings?.scrape_interval || 'daily']}
options={scrapingOptions}
defaultLabel={'Notification Settings'}
updateField={(updated:string[]) => updated[0] && updateSettings('scrape_interval', updated[0])}
rounded='rounded'
maxHeight={48}
minWidth={270}
/>
<small className=' text-gray-500 pt-2 block'>This option requires Server/Docker Instance Restart to take Effect.</small>
</div>
)}
<div className="settings__section__input mb-5">
<label className={labelStyle}>Delay Between Each keyword Scrape</label>
<SelectField
multiple={false}
selected={[settings?.scrape_delay || '0']}
options={delayOptions}
defaultLabel={'Delay Settings'}
updateField={(updated:string[]) => updated[0] && updateSettings('scrape_delay', updated[0])}
rounded='rounded'
maxHeight={48}
minWidth={270}
/>
<small className=' text-gray-500 pt-2 block'>This option requires Server/Docker Instance Restart to take Effect.</small>
</div>
<div className="settings__section__input mb-5">
<label className="relative inline-flex items-center cursor-pointer w-full justify-between">
<span className="text-sm font-medium text-gray-900 dark:text-gray-300 w-56">Auto Retry Failed Keyword Scrape</span>
<input
type="checkbox"
value={settings?.scrape_retry ? 'true' : '' }
checked={settings.scrape_retry || false}
className="sr-only peer"
onChange={() => updateSettings('scrape_retry', !settings.scrape_retry)}
/>
<div className="relative rounded-3xl w-9 h-5 bg-gray-200 peer-focus:outline-none peer-focus:ring-4
peer-focus:ring-blue-300 dark:peer-focus:ring-blue-800rounded-full peer dark:bg-gray-700
peer-checked:after:translate-x-full peer-checked:after:border-white after:content-['']
after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300
after:border after:rounded-full after:h-4 after:w-4
after:transition-all dark:border-gray-600 peer-checked:bg-blue-600"></div>
</label>
</div>
{settings?.scrape_retry && (settings.failed_queue?.length || 0) > 0 && (
<div className="settings__section__input mb-5">
<label className={labelStyle}>Clear Failed Retry Queue</label>
<button
onClick={() => clearFailedMutate()}
className=' py-3 px-5 w-full rounded cursor-pointer bg-gray-100 text-gray-800
font-semibold text-sm hover:bg-gray-200'>
{clearingQueue && <Icon type="loading" size={14} />} Clear Failed Queue
({settings.failed_queue?.length || 0} Keywords)
</button>
</div>
)}
</div>
</div>
);
};
export default ScraperSettings;

View File

@@ -1,8 +1,9 @@
import React, { useEffect, useState } from 'react';
// import { useQuery } from 'react-query';
import { Toaster } from 'react-hot-toast';
import useUpdateSettings, { useFetchSettings } from '../../services/settings';
import Icon from '../common/Icon';
import SelectField, { SelectionOption } from '../common/SelectField';
import NotificationSettings from './NotificationSettings';
import ScraperSettings from './ScraperSettings';
type SettingsProps = {
closeSettings: Function,
@@ -16,6 +17,8 @@ type SettingsError = {
const defaultSettings = {
scraper_type: 'none',
scrape_delay: 'none',
scrape_retry: false,
notification_interval: 'daily',
notification_email: '',
smtp_server: '',
@@ -57,7 +60,7 @@ const Settings = ({ closeSettings }:SettingsProps) => {
if (e.target === e.currentTarget) { closeSettings(); }
};
const updateSettings = (key: string, value:string|number) => {
const updateSettings = (key: string, value:string|number|boolean) => {
setSettings({ ...settings, [key]: value });
};
@@ -89,17 +92,6 @@ const Settings = ({ closeSettings }:SettingsProps) => {
}
};
const labelStyle = 'mb-2 font-semibold inline-block text-sm text-gray-700 capitalize';
const notificationOptions: SelectionOption[] = [
{ label: 'Daily', value: 'daily' },
{ label: 'Weekly', value: 'weekly' },
{ label: 'Monthly', value: 'monthly' },
{ label: 'Never', value: 'never' },
];
const allScrapers: SelectionOption[] = settings.available_scapers ? settings.available_scapers : [];
const scraperOptions: SelectionOption[] = [{ label: 'None', value: 'none' }, ...allScrapers];
const tabStyle = 'inline-block px-4 py-1 rounded-full mr-3 cursor-pointer text-sm';
return (
<div className="settings fixed w-full h-screen top-0 left-0 z-50" onClick={closeOnBGClick}>
@@ -127,141 +119,13 @@ const Settings = ({ closeSettings }:SettingsProps) => {
</li>
</ul>
</div>
{currentTab === 'scraper' && (
<div>
<div className='settings__content styled-scrollbar p-6 text-sm'>
<div className="settings__section__select mb-5">
<label className={labelStyle}>Scraping Method</label>
<SelectField
options={scraperOptions}
selected={[settings.scraper_type || 'none']}
defaultLabel="Select Scraper"
updateField={(updatedTime:[string]) => updateSettings('scraper_type', updatedTime[0])}
multiple={false}
rounded={'rounded'}
minWidth={270}
/>
</div>
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi'].includes(settings.scraper_type) && (
<div className="settings__section__input mr-3">
<label className={labelStyle}>Scraper API Key or Token</label>
<input
className={`w-full p-2 border border-gray-200 rounded mt-2 mb-3 focus:outline-none focus:border-blue-200
${settingsError && settingsError.type === 'no_api_key' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.scaping_api || ''}
placeholder={'API Key/Token'}
onChange={(event) => updateSettings('scaping_api', event.target.value)}
/>
</div>
)}
{settings.scraper_type === 'proxy' && (
<div className="settings__section__input mb-5">
<label className={labelStyle}>Proxy List</label>
<textarea
className={`w-full p-2 border border-gray-200 rounded mb-3 text-xs
focus:outline-none min-h-[160px] focus:border-blue-200
${settingsError && settingsError.type === 'no_email' ? ' border-red-400 focus:border-red-400' : ''} `}
value={settings?.proxy}
placeholder={'http://122.123.22.45:5049\nhttps://user:password@122.123.22.45:5049'}
onChange={(event) => updateSettings('proxy', event.target.value)}
/>
</div>
)}
</div>
</div>
{currentTab === 'scraper' && settings && (
<ScraperSettings settings={settings} updateSettings={updateSettings} settingsError={settingsError} />
)}
{currentTab === 'notification' && (
<div>
<div className='settings__content styled-scrollbar p-6 text-sm'>
<div className="settings__section__input mb-5">
<label className={labelStyle}>Notification Frequency</label>
<SelectField
multiple={false}
selected={[settings.notification_interval]}
options={notificationOptions}
defaultLabel={'Notification Settings'}
updateField={(updated:string[]) => updated[0] && updateSettings('notification_interval', updated[0])}
rounded='rounded'
maxHeight={48}
minWidth={270}
/>
</div>
{settings.notification_interval !== 'never' && (
<>
<div className="settings__section__input mb-5">
<label className={labelStyle}>Notification Emails</label>
<input
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
${settingsError && settingsError.type === 'no_email' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.notification_email}
placeholder={'test@gmail.com'}
onChange={(event) => updateSettings('notification_email', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>SMTP Server</label>
<input
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
${settingsError && settingsError.type === 'no_smtp_server' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.smtp_server || ''}
onChange={(event) => updateSettings('smtp_server', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>SMTP Port</label>
<input
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
${settingsError && settingsError.type === 'no_smtp_port' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.smtp_port || ''}
onChange={(event) => updateSettings('smtp_port', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>SMTP Username</label>
<input
className={'w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200'}
type="text"
value={settings?.smtp_username || ''}
onChange={(event) => updateSettings('smtp_username', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>SMTP Password</label>
<input
className={'w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200'}
type="text"
value={settings?.smtp_password || ''}
onChange={(event) => updateSettings('smtp_password', event.target.value)}
/>
</div>
<div className="settings__section__input mb-5">
<label className={labelStyle}>From Email Address</label>
<input
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
${settingsError && settingsError.type === 'no_smtp_from' ? ' border-red-400 focus:border-red-400' : ''} `}
type="text"
value={settings?.notification_email_from || ''}
placeholder="no-reply@mydomain.com"
onChange={(event) => updateSettings('notification_email_from', event.target.value)}
/>
</div>
</>
)}
</div>
{settingsError && (
<div className='absolute w-full bottom-16 text-center p-3 bg-red-100 text-red-600 text-sm font-semibold'>
{settingsError.msg}
</div>
)}
</div>
)}
{currentTab === 'notification' && settings && (
<NotificationSettings settings={settings} updateSettings={updateSettings} settingsError={settingsError} />
)}
<div className=' border-t-[1px] border-gray-200 p-2 px-3'>
<button
onClick={() => performUpdate()}
@@ -270,6 +134,7 @@ const Settings = ({ closeSettings }:SettingsProps) => {
</button>
</div>
</div>
<Toaster position='bottom-center' containerClassName="react_toaster" />
</div>
);
};

100
cron.js
View File

@@ -49,11 +49,14 @@ const generateCronTime = (interval) => {
if (interval === 'daily') {
cronTime = '0 0 0 * * *';
}
if (interval === 'other_day') {
cronTime = '0 0 2-30/2 * *';
}
if (interval === 'daily_morning') {
cronTime = '0 0 3 * * *';
}
if (interval === 'weekly') {
cronTime = '0 0 0 */7 * *';
cronTime = '0 0 * * 1';
}
if (interval === 'monthly') {
cronTime = '0 0 1 * *'; // Run every first day of the month at 00:00(midnight)
@@ -63,19 +66,43 @@ const generateCronTime = (interval) => {
};
const runAppCronJobs = () => {
// RUN SERP Scraping CRON (EveryDay at Midnight) 0 0 0 * *
const scrapeCronTime = generateCronTime('daily');
Cron(scrapeCronTime, () => {
// console.log('### Running Keyword Position Cron Job!');
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/cron`, fetchOpts)
.then((res) => res.json())
// .then((data) =>{ console.log(data)})
.catch((err) => {
console.log('ERROR Making Daily Scraper Cron Request..');
console.log(err);
});
}, { scheduled: true });
getAppSettings().then((settings) => {
// RUN SERP Scraping CRON (EveryDay at Midnight) 0 0 0 * *
const scrape_interval = settings.scrape_interval || 'daily';
if (scrape_interval !== 'never') {
const scrapeCronTime = generateCronTime(scrape_interval);
Cron(scrapeCronTime, () => {
// console.log('### Running Keyword Position Cron Job!');
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/cron`, fetchOpts)
.then((res) => res.json())
// .then((data) =>{ console.log(data)})
.catch((err) => {
console.log('ERROR Making SERP Scraper Cron Request..');
console.log(err);
});
}, { scheduled: true });
}
// RUN Email Notification CRON
const notif_interval = (!settings.notification_interval || settings.notification_interval === 'never') ? false : settings.notification_interval;
if (notif_interval) {
const cronTime = generateCronTime(notif_interval === 'daily' ? 'daily_morning' : notif_interval);
if (cronTime) {
Cron(cronTime, () => {
// console.log('### Sending Notification Email...');
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/notify`, fetchOpts)
.then((res) => res.json())
.then((data) => console.log(data))
.catch((err) => {
console.log('ERROR Making Cron Email Notification Request..');
console.log(err);
});
}, { scheduled: true });
}
}
});
// Run Failed scraping CRON (Every Hour)
const failedCronTime = generateCronTime('hourly');
@@ -84,16 +111,20 @@ const runAppCronJobs = () => {
readFile(`${process.cwd()}/data/failed_queue.json`, { encoding: 'utf-8' }, (err, data) => {
if (data) {
const keywordsToRetry = data ? JSON.parse(data) : [];
if (keywordsToRetry.length > 0) {
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/refresh?id=${keywordsToRetry.join(',')}`, fetchOpts)
.then((res) => res.json())
.then((refreshedData) => console.log(refreshedData))
.catch((fetchErr) => {
console.log('ERROR Making failed_queue Cron Request..');
console.log(fetchErr);
});
try {
const keywordsToRetry = data ? JSON.parse(data) : [];
if (keywordsToRetry.length > 0) {
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/refresh?id=${keywordsToRetry.join(',')}`, fetchOpts)
.then((res) => res.json())
.then((refreshedData) => console.log(refreshedData))
.catch((fetchErr) => {
console.log('ERROR Making failed_queue Cron Request..');
console.log(fetchErr);
});
}
} catch (error) {
console.log('ERROR Reading Failed Scrapes Queue File..', error);
}
} else {
console.log('ERROR Reading Failed Scrapes Queue File..', err);
@@ -115,27 +146,6 @@ const runAppCronJobs = () => {
});
}, { scheduled: true });
}
// RUN Email Notification CRON
getAppSettings().then((settings) => {
const notif_interval = (!settings.notification_interval || settings.notification_interval === 'never') ? false : settings.notification_interval;
if (notif_interval) {
const cronTime = generateCronTime(notif_interval === 'daily' ? 'daily_morning' : notif_interval);
if (cronTime) {
Cron(cronTime, () => {
// console.log('### Sending Notification Email...');
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/notify`, fetchOpts)
.then((res) => res.json())
.then((data) => console.log(data))
.catch((err) => {
console.log('ERROR Making Cron Email Notification Request..');
console.log(err);
});
}, { scheduled: true });
}
}
});
};
runAppCronJobs();

View File

@@ -6,6 +6,6 @@ import './styles/globals.css';
// Used for __tests__/testing-library.js
// Learn more: https://github.com/testing-library/jest-dom
import '@testing-library/jest-dom/extend-expect';
import '@testing-library/jest-dom';
global.ResizeObserver = require('resize-observer-polyfill');

View File

@@ -1,8 +1,13 @@
/** @type {import('next').NextConfig} */
const { version } = require('./package.json');
const nextConfig = {
reactStrictMode: true,
swcMinify: false,
output: 'standalone',
publicRuntimeConfig: {
version,
},
};
module.exports = nextConfig;

15341
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "serpbear",
"version": "0.2.4",
"version": "0.3.2",
"private": true,
"scripts": {
"dev": "next dev",
@@ -17,7 +17,6 @@
},
"dependencies": {
"@googleapis/searchconsole": "^1.0.0",
"@testing-library/react": "^13.4.0",
"@types/react-transition-group": "^4.4.5",
"axios": "^1.1.3",
"axios-retry": "^3.3.1",
@@ -31,10 +30,9 @@
"dotenv": "^16.0.3",
"https-proxy-agent": "^5.0.1",
"isomorphic-fetch": "^3.0.0",
"jsonwebtoken": "^8.5.1",
"jsonwebtoken": "^9.0.2",
"msw": "^0.49.0",
"next": "12.3.1",
"node-cron": "^3.0.2",
"next": "^12.3.4",
"nodemailer": "^6.8.0",
"react": "18.2.0",
"react-chartjs-2": "^4.3.1",
@@ -45,15 +43,17 @@
"react-transition-group": "^4.4.5",
"react-window": "^1.8.8",
"reflect-metadata": "^0.1.13",
"sequelize": "^6.25.2",
"sequelize": "^6.34.0",
"sequelize-typescript": "^2.1.5",
"sqlite3": "^5.1.2"
"sqlite3": "^5.1.6"
},
"devDependencies": {
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/jest-dom": "^6.1.4",
"@testing-library/react": "^14.0.0",
"@types/cookies": "^0.7.7",
"@types/cryptr": "^4.0.1",
"@types/isomorphic-fetch": "^0.0.36",
"@types/jest": "^29.5.8",
"@types/jsonwebtoken": "^8.5.9",
"@types/node": "18.11.0",
"@types/nodemailer": "^6.4.6",
@@ -65,9 +65,9 @@
"eslint": "8.25.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-next": "12.3.1",
"jest": "^29.3.1",
"jest-environment-jsdom": "^29.3.1",
"postcss": "^8.4.18",
"jest": "^29.7.0",
"jest-environment-jsdom": "^29.7.0",
"postcss": "^8.4.31",
"prettier": "^2.7.1",
"resize-observer-polyfill": "^1.5.1",
"sass": "^1.55.0",

29
pages/api/clearfailed.ts Normal file
View File

@@ -0,0 +1,29 @@
import { writeFile } from 'fs/promises';
import type { NextApiRequest, NextApiResponse } from 'next';
import verifyUser from '../../utils/verifyUser';
type SettingsGetResponse = {
cleared?: boolean,
error?: string,
}
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
const authorized = verifyUser(req, res);
if (authorized !== 'authorized') {
return res.status(401).json({ error: authorized });
}
if (req.method === 'PUT') {
return clearFailedQueue(req, res);
}
return res.status(502).json({ error: 'Unrecognized Route.' });
}
const clearFailedQueue = async (req: NextApiRequest, res: NextApiResponse<SettingsGetResponse>) => {
try {
await writeFile(`${process.cwd()}/data/failed_queue.json`, JSON.stringify([]), { encoding: 'utf-8' });
return res.status(200).json({ cleared: true });
} catch (error) {
console.log('[ERROR] Cleraring Failed Queue File.', error);
return res.status(200).json({ error: 'Error Cleraring Failed Queue!' });
}
};

View File

@@ -3,7 +3,7 @@ import db from '../../database/database';
import Keyword from '../../database/models/keyword';
import { getAppSettings } from './settings';
import verifyUser from '../../utils/verifyUser';
import { refreshAndUpdateKeywords } from './refresh';
import refreshAndUpdateKeywords from '../../utils/refresh';
type CRONRefreshRes = {
started: boolean

View File

@@ -37,7 +37,10 @@ const getDomainSearchConsoleInsight = async (req: NextApiRequest, res: NextApiRe
// First try and read the Local SC Domain Data file.
const localSCData = await readLocalSCData(domainname);
if (localSCData && localSCData.stats && localSCData.stats.length) {
const oldFetchedDate = localSCData.lastFetched;
const fetchTimeDiff = new Date().getTime() - (oldFetchedDate ? new Date(oldFetchedDate as string).getTime() : 0);
if (localSCData && localSCData.stats && localSCData.stats.length && fetchTimeDiff <= 86400000) {
const response = getInsightFromSCData(localSCData);
return res.status(200).json({ data: response });
}

View File

@@ -2,11 +2,11 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { Op } from 'sequelize';
import db from '../../database/database';
import Keyword from '../../database/models/keyword';
import { refreshAndUpdateKeywords } from './refresh';
import { getAppSettings } from './settings';
import verifyUser from '../../utils/verifyUser';
import parseKeywords from '../../utils/parseKeywords';
import { integrateKeywordSCData, readLocalSCData } from '../../utils/searchConsole';
import refreshAndUpdateKeywords from '../../utils/refresh';
type KeywordsGetResponse = {
keywords?: KeywordType[],

View File

@@ -2,11 +2,10 @@ import type { NextApiRequest, NextApiResponse } from 'next';
import { Op } from 'sequelize';
import db from '../../database/database';
import Keyword from '../../database/models/keyword';
import refreshKeywords from '../../utils/refresh';
import refreshAndUpdateKeywords from '../../utils/refresh';
import { getAppSettings } from './settings';
import verifyUser from '../../utils/verifyUser';
import parseKeywords from '../../utils/parseKeywords';
import { removeFromRetryQueue, retryScrape } from '../../utils/scraper';
type KeywordsRefreshRes = {
keywords?: KeywordType[]
@@ -63,57 +62,3 @@ const refresTheKeywords = async (req: NextApiRequest, res: NextApiResponse<Keywo
return res.status(400).json({ error: 'Error refreshing keywords!' });
}
};
export const refreshAndUpdateKeywords = async (initKeywords:Keyword[], settings:SettingsType) => {
const formattedKeywords = initKeywords.map((el) => el.get({ plain: true }));
const refreshed: any = await refreshKeywords(formattedKeywords, settings);
// const fetchKeywords = await refreshKeywords(initialKeywords.map( k=> k.keyword ));
const updatedKeywords: KeywordType[] = [];
for (const keywordRaw of initKeywords) {
const keywordPrased = parseKeywords([keywordRaw.get({ plain: true })]);
const keyword = keywordPrased[0];
const udpatedkeyword = refreshed.find((item:any) => item.ID && item.ID === keyword.ID);
if (udpatedkeyword && keyword) {
const newPos = udpatedkeyword.position;
const newPosition = newPos !== false ? newPos : keyword.position;
const { history } = keyword;
const theDate = new Date();
history[`${theDate.getFullYear()}-${theDate.getMonth() + 1}-${theDate.getDate()}`] = newPosition;
const updatedVal = {
position: newPosition,
updating: false,
url: udpatedkeyword.url,
lastResult: udpatedkeyword.result,
history,
lastUpdated: udpatedkeyword.error ? keyword.lastUpdated : theDate.toJSON(),
lastUpdateError: udpatedkeyword.error
? JSON.stringify({ date: theDate.toJSON(), error: `${udpatedkeyword.error}`, scraper: settings.scraper_type })
: 'false',
};
updatedKeywords.push({ ...keyword, ...{ ...updatedVal, lastUpdateError: JSON.parse(updatedVal.lastUpdateError) } });
// If failed, Add to Retry Queue Cron
if (udpatedkeyword.error) {
await retryScrape(keyword.ID);
} else {
await removeFromRetryQueue(keyword.ID);
}
// Update the Keyword Position in Database
try {
await keywordRaw.update({
...updatedVal,
lastResult: Array.isArray(udpatedkeyword.result) ? JSON.stringify(udpatedkeyword.result) : udpatedkeyword.result,
history: JSON.stringify(history),
});
console.log('[SUCCESS] Updating the Keyword: ', keyword.keyword);
} catch (error) {
console.log('[ERROR] Updating SERP for Keyword', keyword.keyword, error);
}
}
}
return updatedKeywords;
};

View File

@@ -1,6 +1,7 @@
import { writeFile, readFile } from 'fs/promises';
import type { NextApiRequest, NextApiResponse } from 'next';
import Cryptr from 'cryptr';
import { writeFile, readFile } from 'fs/promises';
import getConfig from 'next/config';
import verifyUser from '../../utils/verifyUser';
import allScrapers from '../../scrapers/index';
@@ -26,7 +27,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
const getSettings = async (req: NextApiRequest, res: NextApiResponse<SettingsGetResponse>) => {
const settings = await getAppSettings();
if (settings) {
return res.status(200).json({ settings });
const { publicRuntimeConfig } = getConfig();
const version = publicRuntimeConfig?.version;
return res.status(200).json({ settings: { ...settings, version } });
}
return res.status(400).json({ error: 'Error Loading Settings!' });
};
@@ -54,6 +57,8 @@ const updateSettings = async (req: NextApiRequest, res: NextApiResponse<Settings
export const getAppSettings = async () : Promise<SettingsType> => {
try {
const settingsRaw = await readFile(`${process.cwd()}/data/settings.json`, { encoding: 'utf-8' });
const failedQueueRaw = await readFile(`${process.cwd()}/data/failed_queue.json`, { encoding: 'utf-8' });
const failedQueue: string[] = failedQueueRaw ? JSON.parse(failedQueueRaw) : [];
const settings: SettingsType = settingsRaw ? JSON.parse(settingsRaw) : {};
let decryptedSettings = settings;
@@ -67,6 +72,7 @@ export const getAppSettings = async () : Promise<SettingsType> => {
smtp_password,
search_console_integrated: !!(process.env.SEARCH_CONSOLE_PRIVATE_KEY && process.env.SEARCH_CONSOLE_CLIENT_EMAIL),
available_scapers: allScrapers.map((scraper) => ({ label: scraper.name, value: scraper.id })),
failed_queue: failedQueue,
};
} catch (error) {
console.log('Error Decrypting Settings API Keys!');
@@ -84,8 +90,14 @@ export const getAppSettings = async () : Promise<SettingsType> => {
smtp_port: '',
smtp_username: '',
smtp_password: '',
scrape_retry: false,
};
const otherSettings = {
available_scapers: allScrapers.map((scraper) => ({ label: scraper.name, value: scraper.id })),
failed_queue: [],
};
await writeFile(`${process.cwd()}/data/settings.json`, JSON.stringify(settings), { encoding: 'utf-8' });
return settings;
await writeFile(`${process.cwd()}/data/failed_queue.json`, JSON.stringify([]), { encoding: 'utf-8' });
return { ...settings, ...otherSettings };
}
};

View File

@@ -11,16 +11,43 @@ import { useFetchDomains } from '../../services/domains';
import DomainItem from '../../components/domains/DomainItem';
import Icon from '../../components/common/Icon';
type thumbImages = { [domain:string] : string }
const SingleDomain: NextPage = () => {
const router = useRouter();
const [noScrapprtError, setNoScrapprtError] = useState(false);
const [showSettings, setShowSettings] = useState(false);
const [showAddDomain, setShowAddDomain] = useState(false);
const [domainThumbs, setDomainThumbs] = useState<thumbImages>({});
const { data: appSettings } = useFetchSettings();
const { data: domainsData, isLoading } = useFetchDomains(router, true);
useEffect(() => {
console.log('Domains Data: ', domainsData);
// console.log('Domains Data: ', domainsData);
if (domainsData?.domains && domainsData.domains.length > 0) {
const domainThumbsRaw = localStorage.getItem('domainThumbs');
const domThumbs = domainThumbsRaw ? JSON.parse(domainThumbsRaw) : {};
domainsData.domains.forEach(async (domain:DomainType) => {
if (domain.domain) {
if (!domThumbs[domain.domain]) {
const domainImageBlob = await fetch(`https://image.thum.io/get/auth/66909-serpbear/maxAge/96/width/200/https://${domain.domain}`).then((res) => res.blob());
if (domainImageBlob) {
const reader = new FileReader();
await new Promise((resolve, reject) => {
reader.onload = resolve;
reader.onerror = reject;
reader.readAsDataURL(domainImageBlob);
});
const imageBase: string = reader.result && typeof reader.result === 'string' ? reader.result : '';
localStorage.setItem('domainThumbs', JSON.stringify({ ...domThumbs, [domain.domain]: imageBase }));
setDomainThumbs((currentThumbs) => ({ ...currentThumbs, [domain.domain]: imageBase }));
}
} else {
setDomainThumbs((currentThumbs) => ({ ...currentThumbs, [domain.domain]: domThumbs[domain.domain] }));
}
}
});
}
}, [domainsData]);
useEffect(() => {
@@ -31,7 +58,7 @@ const SingleDomain: NextPage = () => {
}, [appSettings]);
return (
<div className="Domain ">
<div className="Domain flex flex-col min-h-screen">
{noScrapprtError && (
<div className=' p-3 bg-red-600 text-white text-sm text-center'>
A Scrapper/Proxy has not been set up Yet. Open Settings to set it up and start using the app.
@@ -62,6 +89,7 @@ const SingleDomain: NextPage = () => {
domain={domain}
selected={false}
isConsoleIntegrated={!!(appSettings && appSettings?.settings?.search_console_integrated) }
thumb={domainThumbs[domain.domain]}
// isConsoleIntegrated={false}
/>;
})}
@@ -84,6 +112,9 @@ const SingleDomain: NextPage = () => {
<CSSTransition in={showSettings} timeout={300} classNames="settings_anim" unmountOnExit mountOnEnter>
<Settings closeSettings={() => setShowSettings(false)} />
</CSSTransition>
<footer className='text-center flex flex-1 justify-center pb-5 items-end'>
<span className='text-gray-500 text-xs'><a href='https://github.com/towfiqi/serpbear' target="_blank" rel='noreferrer'>SerpBear v{appSettings?.settings?.version || '0.0.0'}</a></span>
</footer>
</div>
);
};

View File

@@ -2,12 +2,16 @@ import scrapingAnt from './services/scrapingant';
import scrapingRobot from './services/scrapingrobot';
import serpapi from './services/serpapi';
import serply from './services/serply';
import spaceserp from './services/spaceserp';
import proxy from './services/proxy';
import searchapi from './services/searchapi';
export default [
scrapingRobot,
scrapingAnt,
serpapi,
serply,
spaceserp,
proxy,
searchapi,
];

View File

@@ -11,7 +11,8 @@ const scrapingAnt:ScraperSettings = {
const scraperCountries = ['AE', 'BR', 'CN', 'DE', 'ES', 'FR', 'GB', 'HK', 'PL', 'IN', 'IT', 'IL', 'JP', 'NL', 'RU', 'SA', 'US', 'CZ'];
const country = scraperCountries.includes(keyword.country.toUpperCase()) ? keyword.country : 'US';
const lang = countryData[country][2];
return `https://api.scrapingant.com/v2/extended?url=https%3A%2F%2Fwww.google.com%2Fsearch%3Fnum%3D100%26hl%3D${lang}%26q%3D${encodeURI(keyword.keyword)}&x-api-key=${settings.scaping_api}&proxy_country=${country}&browser=false`;
const url = encodeURI(`https://www.google.com/search?num=100&hl=${lang}&q=${keyword.keyword}`);
return `https://api.scrapingant.com/v2/extended?url=${url}&x-api-key=${settings.scaping_api}&proxy_country=${country}&browser=false`;
},
resultObjectKey: 'result',
};

View File

@@ -4,8 +4,10 @@ const scrapingRobot:ScraperSettings = {
website: 'scrapingrobot.com',
scrapeURL: (keyword, settings, countryData) => {
const country = keyword.country || 'US';
const device = keyword.device === 'mobile' ? '&mobile=true' : '';
const lang = countryData[country][2];
return `https://api.scrapingrobot.com/?token=${settings.scaping_api}&proxyCountry=${country}&render=false${keyword.device === 'mobile' ? '&mobile=true' : ''}&url=https%3A%2F%2Fwww.google.com%2Fsearch%3Fnum%3D100%26hl%3D${lang}%26q%3D${encodeURI(keyword.keyword)}`;
const url = encodeURI(`https://www.google.com/search?num=100&hl=${lang}&q=${keyword.keyword}`);
return `https://api.scrapingrobot.com/?token=${settings.scaping_api}&proxyCountry=${country}&render=false${device}&url=${url}`;
},
resultObjectKey: 'result',
};

View File

@@ -0,0 +1,38 @@
const searchapi:ScraperSettings = {
id: 'searchapi',
name: 'SearchApi.io',
website: 'searchapi.io',
headers: (keyword, settings) => {
return {
'Content-Type': 'application/json',
Authorization: `Bearer ${settings.scaping_api}`,
};
},
scrapeURL: (keyword) => {
return `https://www.searchapi.io/api/v1/search?engine=google&q=${encodeURI(keyword.keyword)}&num=100&gl=${keyword.country}&device=${keyword.device}`;
},
resultObjectKey: 'organic_results',
serpExtractor: (content) => {
const extractedResult = [];
const results: SearchApiResult[] = (typeof content === 'string') ? JSON.parse(content) : content as SearchApiResult[];
for (const { link, title, position } of results) {
if (title && link) {
extractedResult.push({
title,
url: link,
position,
});
}
}
return extractedResult;
},
};
interface SearchApiResult {
title: string,
link: string,
position: number,
}
export default searchapi;

View File

@@ -0,0 +1,34 @@
interface SpaceSerpResult {
title: string,
link: string,
domain: string,
position: number
}
const spaceSerp:ScraperSettings = {
id: 'spaceSerp',
name: 'Space Serp',
website: 'spaceserp.com',
scrapeURL: (keyword, settings, countryData) => {
const country = keyword.country || 'US';
const lang = countryData[country][2];
return `https://api.spaceserp.com/google/search?apiKey=${settings.scaping_api}&q=${encodeURI(keyword.keyword)}&pageSize=100&gl=${country}&hl=${lang}${keyword.device === 'mobile' ? '&device=mobile' : ''}&resultBlocks=`;
},
resultObjectKey: 'organic_results',
serpExtractor: (content) => {
const extractedResult = [];
const results: SpaceSerpResult[] = (typeof content === 'string') ? JSON.parse(content) : content as SpaceSerpResult[];
for (const result of results) {
if (result.title && result.link) {
extractedResult.push({
title: result.title,
url: result.link,
position: result.position,
});
}
}
return extractedResult;
},
};
export default spaceSerp;

View File

@@ -38,4 +38,27 @@ const useUpdateSettings = (onSuccess:Function|undefined) => {
});
};
export function useClearFailedQueue(onSuccess:Function) {
const queryClient = useQueryClient();
return useMutation(async () => {
const headers = new Headers({ 'Content-Type': 'application/json', Accept: 'application/json' });
const fetchOpts = { method: 'PUT', headers };
const res = await fetch(`${window.location.origin}/api/clearfailed`, fetchOpts);
if (res.status >= 400 && res.status < 600) {
throw new Error('Bad response from server');
}
return res.json();
}, {
onSuccess: async () => {
onSuccess();
toast('Failed Queue Cleared', { icon: '✔️' });
queryClient.invalidateQueries(['settings']);
},
onError: () => {
console.log('Error Clearing Failed Queue!!!');
toast('Error Clearing Failed Queue.', { icon: '⚠️' });
},
});
}
export default useUpdateSettings;

7
types.d.ts vendored
View File

@@ -78,7 +78,12 @@ type SettingsType = {
smtp_username?: string,
smtp_password?: string,
search_console_integrated?: boolean,
available_scapers?: Array
available_scapers?: Array,
scrape_interval?: string,
scrape_delay?: string,
scrape_retry?: boolean,
failed_queue?: string[]
version?: string
}
type KeywordSCDataChild = {

View File

@@ -1,32 +1,117 @@
import { performance } from 'perf_hooks';
import { RefreshResult, scrapeKeywordFromGoogle } from './scraper';
import { setTimeout as sleep } from 'timers/promises';
import { RefreshResult, removeFromRetryQueue, retryScrape, scrapeKeywordFromGoogle } from './scraper';
import parseKeywords from './parseKeywords';
import Keyword from '../database/models/keyword';
/**
* Refreshes the Keywords position by Scraping Google Search Result by
* Determining whether the keywords should be scraped in Parallel or not
* @param {KeywordType[]} keywords - Keywords to scrape
* @param {Keyword[]} rawkeyword - Keywords to scrape
* @param {SettingsType} settings - The App Settings that contain the Scraper settings
* @returns {Promise}
*/
const refreshKeywords = async (keywords:KeywordType[], settings:SettingsType): Promise<RefreshResult[]> => {
if (!keywords || keywords.length === 0) { return []; }
const refreshAndUpdateKeywords = async (rawkeyword:Keyword[], settings:SettingsType): Promise<KeywordType[]> => {
const keywords:KeywordType[] = rawkeyword.map((el) => el.get({ plain: true }));
if (!rawkeyword || rawkeyword.length === 0) { return []; }
const start = performance.now();
const updatedKeywords: KeywordType[] = [];
let refreshedResults: RefreshResult[] = [];
if (['scrapingant', 'serpapi'].includes(settings.scraper_type)) {
refreshedResults = await refreshParallel(keywords, settings);
if (['scrapingant', 'serpapi', 'searchapi'].includes(settings.scraper_type)) {
const refreshedResults = await refreshParallel(keywords, settings);
if (refreshedResults.length > 0) {
for (const keyword of rawkeyword) {
const refreshedkeywordData = refreshedResults.find((k) => k && k.ID === keyword.ID);
if (refreshedkeywordData) {
const updatedkeyword = await updateKeywordPosition(keyword, refreshedkeywordData, settings);
updatedKeywords.push(updatedkeyword);
}
}
}
} else {
for (const keyword of keywords) {
for (const keyword of rawkeyword) {
console.log('START SCRAPE: ', keyword.keyword);
const refreshedkeywordData = await scrapeKeywordFromGoogle(keyword, settings);
refreshedResults.push(refreshedkeywordData);
const updatedkeyword = await refreshAndUpdateKeyword(keyword, settings);
updatedKeywords.push(updatedkeyword);
if (keywords.length > 0 && settings.scrape_delay && settings.scrape_delay !== '0') {
await sleep(parseInt(settings.scrape_delay, 10));
}
}
}
const end = performance.now();
console.log(`time taken: ${end - start}ms`);
return refreshedResults;
return updatedKeywords;
};
/**
* Scrape Serp for given keyword and update the position in DB.
* @param {Keyword} keyword - Keywords to scrape
* @param {SettingsType} settings - The App Settings that contain the Scraper settings
* @returns {Promise<KeywordType>}
*/
const refreshAndUpdateKeyword = async (keyword: Keyword, settings: SettingsType): Promise<KeywordType> => {
const currentkeyword = keyword.get({ plain: true });
const refreshedkeywordData = await scrapeKeywordFromGoogle(currentkeyword, settings);
const updatedkeyword = refreshedkeywordData ? await updateKeywordPosition(keyword, refreshedkeywordData, settings) : currentkeyword;
return updatedkeyword;
};
/**
* Processes the scraped data for the given keyword and updates the keyword serp position in DB.
* @param {Keyword} keywordRaw - Keywords to Update
* @param {RefreshResult} udpatedkeyword - scraped Data for that Keyword
* @param {SettingsType} settings - The App Settings that contain the Scraper settings
* @returns {Promise<KeywordType>}
*/
export const updateKeywordPosition = async (keywordRaw:Keyword, udpatedkeyword: RefreshResult, settings: SettingsType): Promise<KeywordType> => {
const keywordPrased = parseKeywords([keywordRaw.get({ plain: true })]);
const keyword = keywordPrased[0];
// const udpatedkeyword = refreshed;
let updated = keyword;
if (udpatedkeyword && keyword) {
const newPos = udpatedkeyword.position;
const newPosition = newPos !== 0 ? newPos : keyword.position;
const { history } = keyword;
const theDate = new Date();
const dateKey = `${theDate.getFullYear()}-${theDate.getMonth() + 1}-${theDate.getDate()}`;
history[dateKey] = newPosition;
const updatedVal = {
position: newPosition,
updating: false,
url: udpatedkeyword.url,
lastResult: udpatedkeyword.result,
history,
lastUpdated: udpatedkeyword.error ? keyword.lastUpdated : theDate.toJSON(),
lastUpdateError: udpatedkeyword.error
? JSON.stringify({ date: theDate.toJSON(), error: `${udpatedkeyword.error}`, scraper: settings.scraper_type })
: 'false',
};
// If failed, Add to Retry Queue Cron
if (udpatedkeyword.error && settings?.scrape_retry) {
await retryScrape(keyword.ID);
} else {
await removeFromRetryQueue(keyword.ID);
}
// Update the Keyword Position in Database
try {
await keywordRaw.update({
...updatedVal,
lastResult: Array.isArray(udpatedkeyword.result) ? JSON.stringify(udpatedkeyword.result) : udpatedkeyword.result,
history: JSON.stringify(history),
});
console.log('[SUCCESS] Updating the Keyword: ', keyword.keyword);
updated = { ...keyword, ...updatedVal, lastUpdateError: JSON.parse(updatedVal.lastUpdateError) };
} catch (error) {
console.log('[ERROR] Updating SERP for Keyword', keyword.keyword, error);
}
}
return updated;
};
/**
@@ -49,4 +134,4 @@ const refreshParallel = async (keywords:KeywordType[], settings:SettingsType) :
});
};
export default refreshKeywords;
export default refreshAndUpdateKeywords;

View File

@@ -12,14 +12,14 @@ type SearchResult = {
}
type SERPObject = {
postion:number|boolean,
postion:number,
url:string
}
export type RefreshResult = false | {
ID: number,
keyword: string,
position:number | boolean,
position:number,
url: string,
result: SearchResult[],
error?: boolean | string
@@ -192,7 +192,7 @@ export const extractScrapedResult = (content: string, device: string): SearchRes
* @returns {SERPObject}
*/
export const getSerp = (domain:string, result:SearchResult[]) : SERPObject => {
if (result.length === 0 || !domain) { return { postion: false, url: '' }; }
if (result.length === 0 || !domain) { return { postion: 0, url: '' }; }
const foundItem = result.find((item) => {
const itemDomain = item.url.replace('www.', '').match(/^(?:https?:)?(?:\/\/)?([^/?]+)/i);
return itemDomain && itemDomain.includes(domain.replace('www.', ''));
@@ -207,7 +207,7 @@ export const getSerp = (domain:string, result:SearchResult[]) : SERPObject => {
* @returns {void}
*/
export const retryScrape = async (keywordID: number) : Promise<void> => {
if (!keywordID) { return; }
if (!keywordID && !Number.isInteger(keywordID)) { return; }
let currentQueue: number[] = [];
const filePath = `${process.cwd()}/data/failed_queue.json`;
@@ -215,7 +215,7 @@ export const retryScrape = async (keywordID: number) : Promise<void> => {
currentQueue = currentQueueRaw ? JSON.parse(currentQueueRaw) : [];
if (!currentQueue.includes(keywordID)) {
currentQueue.push(keywordID);
currentQueue.push(Math.abs(keywordID));
}
await writeFile(filePath, JSON.stringify(currentQueue), { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
@@ -227,13 +227,13 @@ export const retryScrape = async (keywordID: number) : Promise<void> => {
* @returns {void}
*/
export const removeFromRetryQueue = async (keywordID: number) : Promise<void> => {
if (!keywordID) { return; }
if (!keywordID && !Number.isInteger(keywordID)) { return; }
let currentQueue: number[] = [];
const filePath = `${process.cwd()}/data/failed_queue.json`;
const currentQueueRaw = await readFile(filePath, { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
currentQueue = currentQueueRaw ? JSON.parse(currentQueueRaw) : [];
currentQueue = currentQueue.filter((item) => item !== keywordID);
currentQueue = currentQueue.filter((item) => item !== Math.abs(keywordID));
await writeFile(filePath, JSON.stringify(currentQueue), { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
};

View File

@@ -98,7 +98,8 @@ export const filterKeywords = (keywords: KeywordType[], filterParams: KeywordFil
const filteredItems:KeywordType[] = [];
keywords.forEach((keywrd) => {
const countryMatch = filterParams.countries.length === 0 ? true : filterParams.countries && filterParams.countries.includes(keywrd.country);
const searchMatch = !filterParams.search ? true : filterParams.search && keywrd.keyword.includes(filterParams.search);
const searchMatch = !filterParams.search ? true : filterParams.search
&& keywrd.keyword.toLowerCase().includes(filterParams.search.toLowerCase());
const tagsMatch = filterParams.tags.length === 0 ? true : filterParams.tags && keywrd.tags.find((x) => filterParams.tags.includes(x));
if (countryMatch && searchMatch && tagsMatch) {

35
utils/validators.ts Normal file
View File

@@ -0,0 +1,35 @@
/* eslint-disable import/prefer-default-export */
export const isValidDomain = (domain:string): boolean => {
if (typeof domain !== 'string') return false;
if (!domain.includes('.')) return false;
let value = domain;
const validHostnameChars = /^[a-zA-Z0-9-.]{1,253}\.?$/g;
if (!validHostnameChars.test(value)) {
return false;
}
if (value.endsWith('.')) {
value = value.slice(0, value.length - 1);
}
if (value.length > 253) {
return false;
}
const labels = value.split('.');
const isValid = labels.every((label) => {
const validLabelChars = /^([a-zA-Z0-9-]+)$/g;
const validLabel = (
validLabelChars.test(label)
&& label.length < 64
&& !label.startsWith('-')
&& !label.endsWith('-')
);
return validLabel;
});
return isValid;
};