mirror of
https://github.com/towfiqi/serpbear
synced 2025-06-26 18:15:54 +00:00
Compare commits
45 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f164b287be | ||
|
|
97dd0b131b | ||
|
|
454454a422 | ||
|
|
4620f11c4b | ||
|
|
7ab435ed8b | ||
|
|
9feff13f18 | ||
|
|
f57bca23da | ||
|
|
392122a710 | ||
|
|
fc183d246d | ||
|
|
994afbcedb | ||
|
|
d3d336fa71 | ||
|
|
6f34d64fd5 | ||
|
|
a0014c7650 | ||
|
|
dc3c7a722b | ||
|
|
8a949ce4c0 | ||
|
|
312d12f589 | ||
|
|
be80ed7ef3 | ||
|
|
4748ffc382 | ||
|
|
c0470cfa9d | ||
|
|
1d6b2be95a | ||
|
|
0d846b29f1 | ||
|
|
3b96dab9cc | ||
|
|
0a83924ffe | ||
|
|
d9505158c4 | ||
|
|
9757fde02e | ||
|
|
0538a8c016 | ||
|
|
cace34f39a | ||
|
|
dce7c412e8 | ||
|
|
e61dfb5b90 | ||
|
|
b9d58a721d | ||
|
|
b83df5f3db | ||
|
|
3b6d034d6f | ||
|
|
5dd366b91e | ||
|
|
5fc1779783 | ||
|
|
c5af94a146 | ||
|
|
c406588953 | ||
|
|
3c48d130b6 | ||
|
|
99dbbd1dd9 | ||
|
|
acc0b39d80 | ||
|
|
a1108d240e | ||
|
|
d9e0d0107a | ||
|
|
9e9dad7631 | ||
|
|
8139e399c1 | ||
|
|
cb24696a1f | ||
|
|
b50733defc |
87
CHANGELOG.md
87
CHANGELOG.md
@@ -2,6 +2,93 @@
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [0.3.1](https://github.com/towfiqi/serpbear/compare/v0.3.0...v0.3.1) (2023-11-04)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Removes dev files from docker volumes ([454454a](https://github.com/towfiqi/serpbear/commit/454454a422bab4d37a2d43ad95868e293a97b88e))
|
||||
* Updates vulnerable dependencies ([97dd0b1](https://github.com/towfiqi/serpbear/commit/97dd0b131be4cec73d07f35062334dd1881f0013))
|
||||
|
||||
## [0.3.0](https://github.com/towfiqi/serpbear/compare/v0.2.6...v0.3.0) (2023-11-03)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Adds ability to disable/clear retry queue for failed keywords ([dc3c7a7](https://github.com/towfiqi/serpbear/commit/dc3c7a722b18248115969c51f2495ccf1c43926d))
|
||||
* Adds ability to search w/o case sensitivity ([4748ffc](https://github.com/towfiqi/serpbear/commit/4748ffc382161c5d861b8d43e8eba466a031e2bc)), closes [#115](https://github.com/towfiqi/serpbear/issues/115)
|
||||
* Displays the Best position of the keywords ([fc183d2](https://github.com/towfiqi/serpbear/commit/fc183d246d55e0eecf43c91f6da8a59192e8e771)), closes [#89](https://github.com/towfiqi/serpbear/issues/89)
|
||||
* Refresh All feature now shows update real-time ([1d6b2be](https://github.com/towfiqi/serpbear/commit/1d6b2be95aa133b7998f5cf098f15aa32f5badd2))
|
||||
* Remembers last selected coutry ([d3d336f](https://github.com/towfiqi/serpbear/commit/d3d336fa71cc789624b10f3cdd1a2b5983053e6f)), closes [#101](https://github.com/towfiqi/serpbear/issues/101)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Resolves missing keyword scrape spinner issue ([f57bca2](https://github.com/towfiqi/serpbear/commit/f57bca23daa3fe888af4c19a681dcec6b6100d83))
|
||||
* Cron stopped on failing to parse failed queue ([8a949ce](https://github.com/towfiqi/serpbear/commit/8a949ce4c078ff377e91a95c4b86ef2b15dae88b)), closes [#116](https://github.com/towfiqi/serpbear/issues/116)
|
||||
* Fixes import order error in some instances. ([be80ed7](https://github.com/towfiqi/serpbear/commit/be80ed7ef3dd0a315c5ad67d17e61a4797dc274c)), closes [#114](https://github.com/towfiqi/serpbear/issues/114)
|
||||
* Fixes issue with adding hyphenated subdomains. ([c0470cf](https://github.com/towfiqi/serpbear/commit/c0470cfa9d0dac86317c886065b461cfe82ffb16))
|
||||
* Fixes the weekly cron day issue. ([392122a](https://github.com/towfiqi/serpbear/commit/392122a7101683342830e900c6f0c39f9272bb34)), closes [#118](https://github.com/towfiqi/serpbear/issues/118)
|
||||
* Fxies special character keyword scrape issue. ([9feff13](https://github.com/towfiqi/serpbear/commit/9feff13f18a4d72203dde694a147831f990b37fb)), closes [#113](https://github.com/towfiqi/serpbear/issues/113) [#122](https://github.com/towfiqi/serpbear/issues/122)
|
||||
|
||||
### [0.2.6](https://github.com/towfiqi/serpbear/compare/v0.2.5...v0.2.6) (2023-03-29)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add option to Delay Between scrapes. ([0a83924](https://github.com/towfiqi/serpbear/commit/0a83924ffe2243c52849c167c6c15d9688ff1dc7)), closes [#87](https://github.com/towfiqi/serpbear/issues/87)
|
||||
* Integrates Space Serp. ([0538a8c](https://github.com/towfiqi/serpbear/commit/0538a8c01601d2f6365848580591a248528e67c7))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **components:** fix typo "Goolge" -> "Google" ([dce7c41](https://github.com/towfiqi/serpbear/commit/dce7c412e813fc845973f36ad1c9fa91df4a6611))
|
||||
* Fixes first Keryword Error cut off issue. ([d950515](https://github.com/towfiqi/serpbear/commit/d9505158c439a924a1c86eb8243faf2a15bed43e))
|
||||
* Fixes lags when tracking thousands of keywords ([9757fde](https://github.com/towfiqi/serpbear/commit/9757fde02ec83405546733381104c54ed6510681)), closes [#88](https://github.com/towfiqi/serpbear/issues/88)
|
||||
|
||||
### [0.2.5](https://github.com/towfiqi/serpbear/compare/v0.2.4...v0.2.5) (2023-03-07)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Adds current App version Number in Footer. ([b83df5f](https://github.com/towfiqi/serpbear/commit/b83df5f3dbd64db657d31f0526438e7165e1b475))
|
||||
* Adds Keyword Scraping Interval Settings. ([3b6d034](https://github.com/towfiqi/serpbear/commit/3b6d034d6f7da0b4259070220fffff44184dd680)), closes [#81](https://github.com/towfiqi/serpbear/issues/81) [#76](https://github.com/towfiqi/serpbear/issues/76)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fixes Broken Image thumbnail loading issue. ([5dd366b](https://github.com/towfiqi/serpbear/commit/5dd366b91e2a94e658bf5250a8a0fa64c09e1c11))
|
||||
* Settings Update Toast was not showing up. ([b9d58a7](https://github.com/towfiqi/serpbear/commit/b9d58a721df12f3f34220a3ae5da6897e23c83ec))
|
||||
|
||||
### [0.2.4](https://github.com/towfiqi/serpbear/compare/v0.2.3...v0.2.4) (2023-02-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Keyword ranking pages can now be clicked. ([c5af94a](https://github.com/towfiqi/serpbear/commit/c5af94a1469713ed4092253d26953ee0ed28c25d))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fixes broken Login on windows ([c406588](https://github.com/towfiqi/serpbear/commit/c406588953035e4177a64011c13eb0e3aedffe89))
|
||||
* Fixes Node Cron memory leak issue. ([3c48d13](https://github.com/towfiqi/serpbear/commit/3c48d130b6f229a4ac27ec43ef1ea3a6640cecf6))
|
||||
|
||||
### [0.2.3](https://github.com/towfiqi/serpbear/compare/v0.2.2...v0.2.3) (2023-01-12)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Ability to tag multiple keywords at once ([9e9dad7](https://github.com/towfiqi/serpbear/commit/9e9dad7631691b2a836fdd4c522b1f933b17e285)), closes [#54](https://github.com/towfiqi/serpbear/issues/54)
|
||||
* Set USERNAME as well as USER variable ([b50733d](https://github.com/towfiqi/serpbear/commit/b50733defc2c06e0f92ca3e88fd1f74684eee9c0))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fixes Position and View Sort. ([8139e39](https://github.com/towfiqi/serpbear/commit/8139e399c13ab8be767facef9a19c67dec06ed64)), closes [#46](https://github.com/towfiqi/serpbear/issues/46)
|
||||
* Fixes wrong CTR value for Search Console Data ([cb24696](https://github.com/towfiqi/serpbear/commit/cb24696a1f47b02a11c68cd1c673ea8b1bacd144)), closes [#48](https://github.com/towfiqi/serpbear/issues/48)
|
||||
* Mobile Keyword Scraping not working. ([a1108d2](https://github.com/towfiqi/serpbear/commit/a1108d240ea38ab0886ef3722b0c937ec5a45591)), closes [#58](https://github.com/towfiqi/serpbear/issues/58)
|
||||
* ScrapingAnt Mobile Keyword Scrape not working ([acc0b39](https://github.com/towfiqi/serpbear/commit/acc0b39d80d4f9371967a0d425ed205c5d866eea))
|
||||
|
||||
### [0.2.2](https://github.com/towfiqi/serpbear/compare/v0.2.1...v0.2.2) (2022-12-25)
|
||||
|
||||
|
||||
|
||||
@@ -10,6 +10,8 @@ COPY . .
|
||||
FROM node:lts-alpine AS builder
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app ./
|
||||
RUN rm -rf /app/data
|
||||
RUN rm -rf /app/__test__
|
||||
RUN npm run build
|
||||
|
||||
|
||||
@@ -29,7 +31,7 @@ COPY --from=builder --chown=nextjs:nodejs /app/cron.js ./
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/email ./email
|
||||
RUN rm package.json
|
||||
RUN npm init -y
|
||||
RUN npm i cryptr dotenv node-cron @googleapis/searchconsole
|
||||
RUN npm i cryptr dotenv croner @googleapis/searchconsole
|
||||
RUN npm i -g concurrently
|
||||
|
||||
USER nextjs
|
||||
|
||||
@@ -40,6 +40,7 @@ The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SerpA
|
||||
| whatsmyserp.com | $49/mo| 30,000/mo| No |
|
||||
| serply.io | $49/mo | 5000/mo | Yes |
|
||||
| serpapi.com | From $50/mo** | From 5,000/mo** | Yes |
|
||||
| spaceserp.com | $59/lifetime | 15,000/mo | Yes |
|
||||
|
||||
(*) Free upto a limit. If you are using ScrapingAnt you can lookup 10,000 times per month for free.
|
||||
(**) Free up to 100 per month. Paid from 5,000 to 10,000,000+ per month.
|
||||
|
||||
@@ -35,7 +35,7 @@ const ChartSlim = ({ labels, sreies }:ChartProps) => {
|
||||
},
|
||||
};
|
||||
|
||||
return <div className='w-[120px] h-[30px] rounded border border-gray-200'>
|
||||
return <div className='w-[100px] h-[30px] rounded border border-gray-200'>
|
||||
<Line
|
||||
datasetIdKey='XXX'
|
||||
options={options}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import React, { useState } from 'react';
|
||||
import Modal from '../common/Modal';
|
||||
import { useAddDomain } from '../../services/domains';
|
||||
import { isValidDomain } from '../../utils/validators';
|
||||
|
||||
type AddDomainProps = {
|
||||
closeModal: Function
|
||||
@@ -13,7 +14,7 @@ const AddDomain = ({ closeModal }: AddDomainProps) => {
|
||||
|
||||
const addDomain = () => {
|
||||
// console.log('ADD NEW DOMAIN', newDomain);
|
||||
if (/^[a-zA-Z0-9][a-zA-Z0-9-]{0,61}[a-zA-Z0-9](?:\.[a-zA-Z]{2,})+$/.test(newDomain.trim())) {
|
||||
if (isValidDomain(newDomain.trim())) {
|
||||
setNewDomainError(false);
|
||||
// TODO: Domain Action
|
||||
addMutate(newDomain.trim());
|
||||
|
||||
@@ -9,10 +9,11 @@ import Icon from '../common/Icon';
|
||||
type DomainItemProps = {
|
||||
domain: DomainType,
|
||||
selected: boolean,
|
||||
isConsoleIntegrated: boolean
|
||||
isConsoleIntegrated: boolean,
|
||||
thumb: string,
|
||||
}
|
||||
|
||||
const DomainItem = ({ domain, selected, isConsoleIntegrated = false }: DomainItemProps) => {
|
||||
const DomainItem = ({ domain, selected, isConsoleIntegrated = false, thumb }: DomainItemProps) => {
|
||||
const { keywordsUpdated, slug, keywordCount = 0, avgPosition = 0, scVisits = 0, scImpressions = 0, scPosition = 0 } = domain;
|
||||
// const router = useRouter();
|
||||
return (
|
||||
@@ -21,10 +22,10 @@ const DomainItem = ({ domain, selected, isConsoleIntegrated = false }: DomainIte
|
||||
<a className='flex flex-col lg:flex-row'>
|
||||
<div className={`flex-1 p-6 flex ${!isConsoleIntegrated ? 'basis-1/3' : ''}`}>
|
||||
<div className="domain_thumb w-20 h-20 mr-6 bg-slate-100 rounded border border-gray-200 overflow-hidden">
|
||||
<img src={`https://image.thum.io/get/maxAge/96/width/200/https://${domain.domain}`} alt={domain.domain} />
|
||||
{thumb && <img src={thumb} alt={domain.domain} />}
|
||||
</div>
|
||||
<div className="domain_details flex-1">
|
||||
<h3 className='font-semibold text-base mb-2 capitalize'>{domain.domain}</h3>
|
||||
<h3 className='font-semibold text-base mb-2'>{domain.domain}</h3>
|
||||
{keywordsUpdated && (
|
||||
<span className=' text-gray-600 text-xs'>
|
||||
Updated <TimeAgo title={dayjs(keywordsUpdated).format('DD-MMM-YYYY, hh:mm:ss A')} date={keywordsUpdated} />
|
||||
|
||||
@@ -117,7 +117,7 @@ const SCInsight = ({ insight, isLoading = true, isConsoleIntegrated = true }: SC
|
||||
)}
|
||||
{!isConsoleIntegrated && (
|
||||
<p className=' p-9 pt-[10%] text-center text-gray-500'>
|
||||
Goolge Search has not been Integrated yet. Please follow <a className='text-indigo-600 underline' href='https://docs.serpbear.com/miscellaneous/integrate-google-search-console' target="_blank" rel='noreferrer'>These Steps</a> to integrate Google Search Data for this Domain.
|
||||
Google Search has not been Integrated yet. Please follow <a className='text-indigo-600 underline' href='https://docs.serpbear.com/miscellaneous/integrate-google-search-console' target="_blank" rel='noreferrer'>These Steps</a> to integrate Google Search Data for this Domain.
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -21,7 +21,8 @@ type KeywordsInput = {
|
||||
|
||||
const AddKeywords = ({ closeModal, domain, keywords }: AddKeywordsProps) => {
|
||||
const [error, setError] = useState<string>('');
|
||||
const [newKeywordsData, setNewKeywordsData] = useState<KeywordsInput>({ keywords: '', device: 'desktop', country: 'US', domain, tags: '' });
|
||||
const defCountry = localStorage.getItem('default_country') || 'US';
|
||||
const [newKeywordsData, setNewKeywordsData] = useState<KeywordsInput>({ keywords: '', device: 'desktop', country: defCountry, domain, tags: '' });
|
||||
const { mutate: addMutate, isLoading: isAdding } = useAddKeywords(() => closeModal(false));
|
||||
const deviceTabStyle = 'cursor-pointer px-3 py-2 rounded mr-2';
|
||||
|
||||
@@ -64,7 +65,10 @@ const AddKeywords = ({ closeModal, domain, keywords }: AddKeywordsProps) => {
|
||||
selected={[newKeywordsData.country]}
|
||||
options={Object.keys(countries).map((countryISO:string) => { return { label: countries[countryISO][0], value: countryISO }; })}
|
||||
defaultLabel='All Countries'
|
||||
updateField={(updated:string[]) => setNewKeywordsData({ ...newKeywordsData, country: updated[0] })}
|
||||
updateField={(updated:string[]) => {
|
||||
setNewKeywordsData({ ...newKeywordsData, country: updated[0] });
|
||||
localStorage.setItem('default_country', updated[0]);
|
||||
}}
|
||||
rounded='rounded'
|
||||
maxHeight={48}
|
||||
flags={true}
|
||||
|
||||
60
components/keywords/AddTags.tsx
Normal file
60
components/keywords/AddTags.tsx
Normal file
@@ -0,0 +1,60 @@
|
||||
import { useState } from 'react';
|
||||
import { useUpdateKeywordTags } from '../../services/keywords';
|
||||
import Icon from '../common/Icon';
|
||||
import Modal from '../common/Modal';
|
||||
|
||||
type AddTagsProps = {
|
||||
keywords: KeywordType[],
|
||||
closeModal: Function
|
||||
}
|
||||
|
||||
const AddTags = ({ keywords = [], closeModal }: AddTagsProps) => {
|
||||
const [tagInput, setTagInput] = useState('');
|
||||
const [inputError, setInputError] = useState('');
|
||||
const { mutate: updateMutate } = useUpdateKeywordTags(() => { setTagInput(''); });
|
||||
|
||||
const addTag = () => {
|
||||
if (keywords.length === 0) { return; }
|
||||
if (!tagInput) {
|
||||
setInputError('Please Insert a Tag!');
|
||||
setTimeout(() => { setInputError(''); }, 3000);
|
||||
return;
|
||||
}
|
||||
|
||||
const tagsArray = tagInput.split(',').map((t) => t.trim());
|
||||
const tagsPayload:any = {};
|
||||
keywords.forEach((keyword:KeywordType) => {
|
||||
tagsPayload[keyword.ID] = [...keyword.tags, ...tagsArray];
|
||||
});
|
||||
updateMutate({ tags: tagsPayload });
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal closeModal={() => { closeModal(false); }} title={`Add New Tags to ${keywords.length} Selected Keyword`}>
|
||||
<div className="relative">
|
||||
{inputError && <span className="absolute top-[-24px] text-red-400 text-sm font-semibold">{inputError}</span>}
|
||||
<span className='absolute text-gray-400 top-3 left-2'><Icon type="tags" size={16} /></span>
|
||||
<input
|
||||
className='w-full border rounded border-gray-200 py-3 px-4 pl-8 outline-none focus:border-indigo-300'
|
||||
placeholder='Insert Tags. eg: tag1, tag2'
|
||||
value={tagInput}
|
||||
onChange={(e) => setTagInput(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.code === 'Enter') {
|
||||
e.preventDefault();
|
||||
addTag();
|
||||
}
|
||||
}}
|
||||
/>
|
||||
<button
|
||||
className=" absolute right-2 top-2 cursor-pointer rounded p-2 px-4 bg-indigo-600 text-white font-semibold text-sm"
|
||||
onClick={addTag}>
|
||||
Apply
|
||||
</button>
|
||||
</div>
|
||||
</Modal>
|
||||
|
||||
);
|
||||
};
|
||||
|
||||
export default AddTags;
|
||||
@@ -9,6 +9,7 @@ import { generateTheChartData } from '../common/generateChartData';
|
||||
type KeywordProps = {
|
||||
keywordData: KeywordType,
|
||||
selected: boolean,
|
||||
index: number,
|
||||
refreshkeyword: Function,
|
||||
favoriteKeyword: Function,
|
||||
removeKeyword: Function,
|
||||
@@ -18,6 +19,7 @@ type KeywordProps = {
|
||||
lastItem?:boolean,
|
||||
showSCData: boolean,
|
||||
scDataType: string,
|
||||
style: Object
|
||||
}
|
||||
|
||||
const Keyword = (props: KeywordProps) => {
|
||||
@@ -32,6 +34,8 @@ const Keyword = (props: KeywordProps) => {
|
||||
manageTags,
|
||||
lastItem,
|
||||
showSCData = true,
|
||||
style,
|
||||
index,
|
||||
scDataType = 'threeDays',
|
||||
} = props;
|
||||
const {
|
||||
@@ -63,10 +67,23 @@ const Keyword = (props: KeywordProps) => {
|
||||
return status;
|
||||
}, [history, position]);
|
||||
|
||||
const bestPosition: false | {position: number, date: string} = useMemo(() => {
|
||||
let bestPos;
|
||||
if (Object.keys(history).length > 0) {
|
||||
const historyArray = Object.keys(history).map((itemID) => ({ date: itemID, position: history[itemID] }))
|
||||
.sort((a, b) => a.position - b.position);
|
||||
if (historyArray[0]) {
|
||||
bestPos = { ...historyArray[0] };
|
||||
}
|
||||
}
|
||||
|
||||
return bestPos || false;
|
||||
}, [history]);
|
||||
|
||||
const optionsButtonStyle = 'block px-2 py-2 cursor-pointer hover:bg-indigo-50 hover:text-blue-700';
|
||||
|
||||
const renderPosition = (pos:number, type?:string) => {
|
||||
if (pos === 0) {
|
||||
if (!updating && pos === 0) {
|
||||
return <span className='text-gray-400' title='Not in Top 100'>{'>100'}</span>;
|
||||
}
|
||||
if (updating && type !== 'sc') {
|
||||
@@ -78,8 +95,10 @@ const Keyword = (props: KeywordProps) => {
|
||||
return (
|
||||
<div
|
||||
key={keyword}
|
||||
style={style}
|
||||
className={`keyword relative py-5 px-4 text-gray-600 border-b-[1px] border-gray-200 lg:py-4 lg:px-6 lg:border-0
|
||||
lg:flex lg:justify-between lg:items-center ${selected ? ' bg-indigo-50 keyword--selected' : ''} ${lastItem ? 'border-b-0' : ''}`}>
|
||||
|
||||
<div className=' w-3/4 lg:flex-1 lg:basis-20 lg:w-auto font-semibold cursor-pointer'>
|
||||
<button
|
||||
className={`p-0 mr-2 leading-[0px] inline-block rounded-sm pt-0 px-[1px] pb-[3px] border
|
||||
@@ -100,22 +119,40 @@ const Keyword = (props: KeywordProps) => {
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
|
||||
<div
|
||||
className={`keyword_position absolute bg-[#f8f9ff] w-fit min-w-[50px] h-12 p-2 text-base mt-[-20px] rounded right-5 lg:relative
|
||||
lg:bg-transparent lg:w-auto lg:h-auto lg:mt-0 lg:p-0 lg:text-sm lg:flex-1 lg:basis-40 lg:grow-0 lg:right-0 text-center font-semibold`}>
|
||||
lg:bg-transparent lg:w-auto lg:h-auto lg:mt-0 lg:p-0 lg:text-sm lg:flex-1 lg:basis-24 lg:grow-0 lg:right-0 text-center font-semibold`}>
|
||||
{renderPosition(position)}
|
||||
{!updating && positionChange > 0 && <i className=' not-italic ml-1 text-xs text-[#5ed7c3]'>▲ {positionChange}</i>}
|
||||
{!updating && positionChange < 0 && <i className=' not-italic ml-1 text-xs text-red-300'>▼ {positionChange}</i>}
|
||||
</div>
|
||||
|
||||
<div
|
||||
title={bestPosition && bestPosition.date
|
||||
? new Date(bestPosition.date).toLocaleDateString('en-US', { weekday: 'long', year: 'numeric', month: 'short', day: 'numeric' }) : ''
|
||||
}
|
||||
className={`keyword_best hidden bg-[#f8f9ff] w-fit min-w-[50px] h-12 p-2 text-base mt-[-20px] rounded right-5 lg:relative lg:block
|
||||
lg:bg-transparent lg:w-auto lg:h-auto lg:mt-0 lg:p-0 lg:text-sm lg:flex-1 lg:basis-16 lg:grow-0 lg:right-0 text-center font-semibold`}>
|
||||
{bestPosition ? bestPosition.position || '-' : (position || '-')}
|
||||
</div>
|
||||
|
||||
{chartData.labels.length > 0 && (
|
||||
<div className='lg:flex-1 hidden lg:block'>
|
||||
<div
|
||||
className='hidden basis-32 grow-0 cursor-pointer lg:block'
|
||||
onClick={() => showKeywordDetails()}>
|
||||
<ChartSlim labels={chartData.labels} sreies={chartData.sreies} />
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div
|
||||
className={`keyword_url inline-block mt-4 mr-5 ml-5 lg:flex-1 text-gray-400 lg:m-0 max-w-[70px]
|
||||
overflow-hidden text-ellipsis whitespace-nowrap lg:max-w-none lg:pr-5`}>
|
||||
<span className='mr-3 lg:hidden'><Icon type="link-alt" size={14} color="#999" /></span>{turncatedURL || '-'}</div>
|
||||
<a href={url} target="_blank" rel="noreferrer"><span className='mr-3 lg:hidden'>
|
||||
<Icon type="link-alt" size={14} color="#999" /></span>{turncatedURL || '-'}
|
||||
</a>
|
||||
</div>
|
||||
|
||||
<div
|
||||
className='inline-block mt-[4] top-[-5px] relative lg:flex-1 lg:m-0'>
|
||||
<span className='mr-2 lg:hidden'><Icon type="clock" size={14} color="#999" /></span>
|
||||
@@ -168,8 +205,10 @@ const Keyword = (props: KeywordProps) => {
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{lastUpdateError && lastUpdateError.date && showPositionError && (
|
||||
<div className=' absolute mt-[-70px] p-2 bg-white z-30 border border-red-200 rounded w-[220px] left-4 shadow-sm text-xs lg:bottom-12'>
|
||||
<div className={`absolute p-2 bg-white z-30 border border-red-200 rounded w-[220px] left-4 shadow-sm text-xs
|
||||
${index > 2 ? 'lg:bottom-12 mt-[-70px]' : ' top-12'}`}>
|
||||
Error Updating Keyword position (Tried <TimeAgo
|
||||
title={dayjs(lastUpdateError.date).format('DD-MMM-YYYY, hh:mm:ss A')}
|
||||
date={lastUpdateError.date} />)
|
||||
@@ -180,7 +219,8 @@ const Keyword = (props: KeywordProps) => {
|
||||
{lastUpdateError.scraper && <strong className='capitalize'>{lastUpdateError.scraper}: </strong>}{lastUpdateError.error}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
)}
|
||||
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -85,7 +85,7 @@ const KeywordDetails = ({ keyword, closeDetails }:KeywordDetailsProps) => {
|
||||
<h3 className=' text-lg font-bold'>
|
||||
<span title={countries[keyword.country][0]}
|
||||
className={`fflag fflag-${keyword.country} w-[18px] h-[12px] mr-2`} /> {keyword.keyword}
|
||||
<span className='py-1 px-2 rounded bg-blue-50 text-blue-700 text-xs font-bold'>{keyword.position}</span>
|
||||
<span className='py-1 px-2 ml-2 rounded bg-blue-50 text-blue-700 text-xs font-bold'>{keyword.position}</span>
|
||||
</h3>
|
||||
<button
|
||||
className='absolute top-2 right-2 p-2 px-3 text-gray-400 hover:text-gray-700 transition-all hover:rotate-90'
|
||||
|
||||
@@ -82,10 +82,10 @@ const KeywordFilters = (props: KeywordFilterProps) => {
|
||||
{ value: 'alpha_desc', label: 'Alphabetically(Z-A)' },
|
||||
];
|
||||
if (integratedConsole) {
|
||||
sortOptionChoices.push({ value: 'imp_asc', label: `Most Viewed${isConsole ? ' (Default)' : ''}` });
|
||||
sortOptionChoices.push({ value: 'imp_desc', label: 'Least Viewed' });
|
||||
sortOptionChoices.push({ value: 'visits_asc', label: 'Most Visited' });
|
||||
sortOptionChoices.push({ value: 'visits_desc', label: 'Least Visited' });
|
||||
sortOptionChoices.push({ value: 'imp_desc', label: `Most Viewed${isConsole ? ' (Default)' : ''}` });
|
||||
sortOptionChoices.push({ value: 'imp_asc', label: 'Least Viewed' });
|
||||
sortOptionChoices.push({ value: 'visits_desc', label: 'Most Visited' });
|
||||
sortOptionChoices.push({ value: 'visits_asc', label: 'Least Visited' });
|
||||
}
|
||||
if (isConsole) {
|
||||
sortOptionChoices.splice(2, 2);
|
||||
|
||||
@@ -2,6 +2,7 @@ import { useState } from 'react';
|
||||
import { useUpdateKeywordTags } from '../../services/keywords';
|
||||
import Icon from '../common/Icon';
|
||||
import Modal from '../common/Modal';
|
||||
import AddTags from './AddTags';
|
||||
|
||||
type keywordTagManagerProps = {
|
||||
keyword: KeywordType|undefined,
|
||||
@@ -10,9 +11,8 @@ type keywordTagManagerProps = {
|
||||
}
|
||||
|
||||
const KeywordTagManager = ({ keyword, closeModal }: keywordTagManagerProps) => {
|
||||
const [tagInput, setTagInput] = useState('');
|
||||
const [inputError, setInputError] = useState('');
|
||||
const { mutate: updateMutate } = useUpdateKeywordTags(() => { setTagInput(''); });
|
||||
const [showAddTag, setShowAddTag] = useState<boolean>(false);
|
||||
const { mutate: updateMutate } = useUpdateKeywordTags(() => { });
|
||||
|
||||
const removeTag = (tag:String) => {
|
||||
if (!keyword) { return; }
|
||||
@@ -20,24 +20,6 @@ const KeywordTagManager = ({ keyword, closeModal }: keywordTagManagerProps) => {
|
||||
updateMutate({ tags: { [keyword.ID]: newTags } });
|
||||
};
|
||||
|
||||
const addTag = () => {
|
||||
if (!keyword) { return; }
|
||||
if (!tagInput) {
|
||||
setInputError('Please Insert a Tag!');
|
||||
setTimeout(() => { setInputError(''); }, 3000);
|
||||
return;
|
||||
}
|
||||
if (keyword.tags.includes(tagInput)) {
|
||||
setInputError('Tag Exist!');
|
||||
setTimeout(() => { setInputError(''); }, 3000);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('New Tag: ', tagInput);
|
||||
const newTags = [...keyword.tags, tagInput.trim()];
|
||||
updateMutate({ tags: { [keyword.ID]: newTags } });
|
||||
};
|
||||
|
||||
return (
|
||||
<Modal closeModal={() => { closeModal(false); }} title={`Tags for Keyword "${keyword && keyword.keyword}"`}>
|
||||
<div className="text-sm my-8 ">
|
||||
@@ -53,31 +35,27 @@ const KeywordTagManager = ({ keyword, closeModal }: keywordTagManagerProps) => {
|
||||
</button>
|
||||
</li>;
|
||||
})}
|
||||
<li className='inline-block py-1 px-1'>
|
||||
<button
|
||||
title='Add New Tag'
|
||||
className="cursor-pointer rounded p-1 px-3 bg-indigo-600 text-white font-semibold text-sm"
|
||||
onClick={() => setShowAddTag(true)}>+</button>
|
||||
</li>
|
||||
</ul>
|
||||
)}
|
||||
{keyword && keyword.tags.length === 0 && (
|
||||
<div className="text-center w-full text-gray-500">No Tags Added to this Keyword.</div>
|
||||
<div className="text-center w-full text-gray-500">
|
||||
No Tags Added to this Keyword. <button className=' text-indigo-600' onClick={() => setShowAddTag(true)}>+ Add Tag</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<div className="relative">
|
||||
{inputError && <span className="absolute top-[-24px] text-red-400 text-sm font-semibold">{inputError}</span>}
|
||||
<span className='absolute text-gray-400 top-3 left-2'><Icon type="tags" size={16} /></span>
|
||||
<input
|
||||
className='w-full border rounded border-gray-200 py-3 px-4 pl-8 outline-none focus:border-indigo-300'
|
||||
placeholder='Insert Tags'
|
||||
value={tagInput}
|
||||
onChange={(e) => setTagInput(e.target.value)}
|
||||
onKeyDown={(e) => {
|
||||
if (e.code === 'Enter') {
|
||||
e.preventDefault();
|
||||
addTag();
|
||||
}
|
||||
}}
|
||||
/>
|
||||
<button className=" absolute right-2 top-2 cursor-pointer rounded p-1 px-4 bg-blue-600 text-white font-bold" onClick={addTag}>+</button>
|
||||
</div>
|
||||
{showAddTag && keyword && (
|
||||
<AddTags
|
||||
keywords={[keyword]}
|
||||
closeModal={() => setShowAddTag(false)}
|
||||
/>
|
||||
)}
|
||||
</Modal>
|
||||
|
||||
);
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import React, { useState, useMemo } from 'react';
|
||||
import React, { useState, useMemo, useEffect } from 'react';
|
||||
import { Toaster } from 'react-hot-toast';
|
||||
import { CSSTransition } from 'react-transition-group';
|
||||
import { FixedSizeList as List, ListChildComponentProps } from 'react-window';
|
||||
import AddKeywords from './AddKeywords';
|
||||
import { filterKeywords, keywordsByDevice, sortKeywords } from '../../utils/sortFilter';
|
||||
import Icon from '../common/Icon';
|
||||
@@ -10,6 +11,7 @@ import KeywordFilters from './KeywordFilter';
|
||||
import Modal from '../common/Modal';
|
||||
import { useDeleteKeywords, useFavKeywords, useRefreshKeywords } from '../../services/keywords';
|
||||
import KeywordTagManager from './KeywordTagManager';
|
||||
import AddTags from './AddTags';
|
||||
|
||||
type KeywordsTableProps = {
|
||||
domain: DomainType | null,
|
||||
@@ -28,6 +30,9 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
const [showKeyDetails, setShowKeyDetails] = useState<KeywordType|null>(null);
|
||||
const [showRemoveModal, setShowRemoveModal] = useState<boolean>(false);
|
||||
const [showTagManager, setShowTagManager] = useState<null|number>(null);
|
||||
const [showAddTags, setShowAddTags] = useState<boolean>(false);
|
||||
const [isMobile, setIsMobile] = useState<boolean>(false);
|
||||
const [SCListHeight, setSCListHeight] = useState(500);
|
||||
const [filterParams, setFilterParams] = useState<KeywordFilters>({ countries: [], tags: [], search: '' });
|
||||
const [sortBy, setSortBy] = useState<string>('date_asc');
|
||||
const [scDataType, setScDataType] = useState<string>('threeDays');
|
||||
@@ -45,6 +50,16 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
avgThirtyDays: 'Last Thirty Days Avg',
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setIsMobile(!!(window.matchMedia('only screen and (max-width: 760px)').matches));
|
||||
const resizeList = () => setSCListHeight(window.innerHeight - (isMobile ? 200 : 400));
|
||||
resizeList();
|
||||
window.addEventListener('resize', resizeList);
|
||||
return () => {
|
||||
window.removeEventListener('resize', resizeList);
|
||||
};
|
||||
}, [isMobile]);
|
||||
|
||||
const processedKeywords: {[key:string] : KeywordType[]} = useMemo(() => {
|
||||
const procKeywords = keywords.filter((x) => x.device === device);
|
||||
const filteredKeywords = filterKeywords(procKeywords, filterParams);
|
||||
@@ -65,6 +80,27 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
}
|
||||
setSelectedKeywords(updatedSelectd);
|
||||
};
|
||||
const Row = ({ data, index, style }:ListChildComponentProps) => {
|
||||
const keyword = data[index];
|
||||
return (
|
||||
<Keyword
|
||||
key={keyword.ID}
|
||||
style={style}
|
||||
index={index}
|
||||
selected={selectedKeywords.includes(keyword.ID)}
|
||||
selectKeyword={selectKeyword}
|
||||
keywordData={keyword}
|
||||
refreshkeyword={() => refreshMutate({ ids: [keyword.ID] })}
|
||||
favoriteKeyword={favoriteMutate}
|
||||
manageTags={() => setShowTagManager(keyword.ID)}
|
||||
removeKeyword={() => { setSelectedKeywords([keyword.ID]); setShowRemoveModal(true); }}
|
||||
showKeywordDetails={() => setShowKeyDetails(keyword)}
|
||||
lastItem={index === (processedKeywords[device].length - 1)}
|
||||
showSCData={showSCData}
|
||||
scDataType={scDataType}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
const selectedAllItems = selectedKeywords.length === processedKeywords[device].length;
|
||||
|
||||
@@ -79,7 +115,7 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
className='block px-2 py-2 cursor-pointer hover:text-indigo-600'
|
||||
onClick={() => { refreshMutate({ ids: selectedKeywords }); setSelectedKeywords([]); }}
|
||||
>
|
||||
<span className=' bg-indigo-100 text-blue-700 px-1 rounded'><Icon type="reload" size={11} /></span> Refresh Keyword
|
||||
<span className=' bg-indigo-100 text-blue-700 px-1 rounded'><Icon type="reload" size={11} /></span> Refresh Keywords
|
||||
</a>
|
||||
</li>
|
||||
<li className='inline-block mr-4'>
|
||||
@@ -87,7 +123,14 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
className='block px-2 py-2 cursor-pointer hover:text-indigo-600'
|
||||
onClick={() => setShowRemoveModal(true)}
|
||||
>
|
||||
<span className=' bg-red-100 text-red-600 px-1 rounded'><Icon type="trash" size={14} /></span> Remove Keyword</a>
|
||||
<span className=' bg-red-100 text-red-600 px-1 rounded'><Icon type="trash" size={14} /></span> Remove Keywords</a>
|
||||
</li>
|
||||
<li className='inline-block mr-4'>
|
||||
<a
|
||||
className='block px-2 py-2 cursor-pointer hover:text-indigo-600'
|
||||
onClick={() => setShowAddTags(true)}
|
||||
>
|
||||
<span className=' bg-green-100 text-green-500 px-1 rounded'><Icon type="tags" size={14} /></span> Tag Keywords</a>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
@@ -110,7 +153,7 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
<div className=' lg:min-w-[800px]'>
|
||||
<div className={`domKeywords_head domKeywords_head--${sortBy} hidden lg:flex p-3 px-6 bg-[#FCFCFF]
|
||||
text-gray-600 justify-between items-center font-semibold border-y`}>
|
||||
<span className='domKeywords_head_keyword flex-1 basis-20 w-auto '>
|
||||
<span className='domKeywords_head_keyword flex-1 basis-[4rem] w-auto '>
|
||||
{processedKeywords[device].length > 0 && (
|
||||
<button
|
||||
className={`p-0 mr-2 leading-[0px] inline-block rounded-sm pt-0 px-[1px] pb-[3px] border border-slate-300
|
||||
@@ -122,8 +165,9 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
)}
|
||||
Keyword
|
||||
</span>
|
||||
<span className='domKeywords_head_position flex-1 basis-40 grow-0 text-center'>Position</span>
|
||||
<span className='domKeywords_head_history flex-1'>History (7d)</span>
|
||||
<span className='domKeywords_head_position flex-1 basis-24 grow-0 text-center'>Position</span>
|
||||
<span className='domKeywords_head_best flex-1 basis-16 grow-0 text-center'>Best</span>
|
||||
<span className='domKeywords_head_history flex-1 basis-32 grow-0 '>History (7d)</span>
|
||||
<span className='domKeywords_head_url flex-1'>URL</span>
|
||||
<span className='domKeywords_head_updated flex-1'>Updated</span>
|
||||
{showSCData && (
|
||||
@@ -132,7 +176,7 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
<div>
|
||||
<div
|
||||
className=' w-48 select-none cursor-pointer absolute bg-white rounded-full
|
||||
px-2 py-[2px] mt-[-22px] ml-3 border border-gray-200 z-50'
|
||||
px-2 py-[2px] mt-[-22px] ml-3 border border-gray-200 z-40'
|
||||
onClick={() => setShowScDataTypes(!showScDataTypes)}>
|
||||
<Icon type="google" size={13} /> {scDataObject[scDataType]}
|
||||
<Icon classes="ml-2" type={showScDataTypes ? 'caret-up' : 'caret-down'} size={10} />
|
||||
@@ -161,21 +205,19 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
)}
|
||||
</div>
|
||||
<div className='domKeywords_keywords border-gray-200 min-h-[55vh] relative'>
|
||||
{processedKeywords[device] && processedKeywords[device].length > 0
|
||||
&& processedKeywords[device].map((keyword, index) => <Keyword
|
||||
key={keyword.ID}
|
||||
selected={selectedKeywords.includes(keyword.ID)}
|
||||
selectKeyword={selectKeyword}
|
||||
keywordData={keyword}
|
||||
refreshkeyword={() => refreshMutate({ ids: [keyword.ID] })}
|
||||
favoriteKeyword={favoriteMutate}
|
||||
manageTags={() => setShowTagManager(keyword.ID)}
|
||||
removeKeyword={() => { setSelectedKeywords([keyword.ID]); setShowRemoveModal(true); }}
|
||||
showKeywordDetails={() => setShowKeyDetails(keyword)}
|
||||
lastItem={index === (processedKeywords[device].length - 1)}
|
||||
showSCData={showSCData}
|
||||
scDataType={scDataType}
|
||||
/>)}
|
||||
{processedKeywords[device] && processedKeywords[device].length > 0 && (
|
||||
<List
|
||||
innerElementType="div"
|
||||
itemData={processedKeywords[device]}
|
||||
itemCount={processedKeywords[device].length}
|
||||
itemSize={isMobile ? 146 : 57}
|
||||
height={SCListHeight}
|
||||
width={'100%'}
|
||||
className={'styled-scrollbar'}
|
||||
>
|
||||
{Row}
|
||||
</List>
|
||||
)}
|
||||
{!isLoading && processedKeywords[device].length === 0 && (
|
||||
<p className=' p-9 pt-[10%] text-center text-gray-500'>No Keywords Added for this Device Type.</p>
|
||||
)}
|
||||
@@ -222,6 +264,12 @@ const KeywordsTable = (props: KeywordsTableProps) => {
|
||||
closeModal={() => setShowTagManager(null)}
|
||||
/>
|
||||
)}
|
||||
{showAddTags && (
|
||||
<AddTags
|
||||
keywords={keywords.filter((k) => selectedKeywords.includes(k.ID))}
|
||||
closeModal={() => setShowAddTags(false)}
|
||||
/>
|
||||
)}
|
||||
<Toaster position='bottom-center' containerClassName="react_toaster" />
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -26,7 +26,7 @@ const SCKeywordsTable = ({ domain, keywords = [], isLoading = true, isConsoleInt
|
||||
const [device, setDevice] = useState<string>('desktop');
|
||||
const [selectedKeywords, setSelectedKeywords] = useState<string[]>([]);
|
||||
const [filterParams, setFilterParams] = useState<KeywordFilters>({ countries: [], tags: [], search: '' });
|
||||
const [sortBy, setSortBy] = useState<string>('imp_asc');
|
||||
const [sortBy, setSortBy] = useState<string>('imp_desc');
|
||||
const [isMobile, setIsMobile] = useState<boolean>(false);
|
||||
const [SCListHeight, setSCListHeight] = useState(500);
|
||||
const { keywordsData } = useFetchKeywords(router);
|
||||
@@ -214,7 +214,7 @@ const SCKeywordsTable = ({ domain, keywords = [], isLoading = true, isConsoleInt
|
||||
)}
|
||||
{!isConsoleIntegrated && (
|
||||
<p className=' p-9 pt-[10%] text-center text-gray-500'>
|
||||
Goolge Search has not been Integrated yet. Please follow <a className='text-indigo-600 underline' href='https://docs.serpbear.com/miscellaneous/integrate-google-search-console' target="_blank" rel='noreferrer'>These Steps</a> to integrate Google Search Data for this Domain.
|
||||
Google Search has not been Integrated yet. Please follow <a className='text-indigo-600 underline' href='https://docs.serpbear.com/miscellaneous/integrate-google-search-console' target="_blank" rel='noreferrer'>These Steps</a> to integrate Google Search Data for this Domain.
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
112
components/settings/NotificationSettings.tsx
Normal file
112
components/settings/NotificationSettings.tsx
Normal file
@@ -0,0 +1,112 @@
|
||||
import React from 'react';
|
||||
import SelectField from '../common/SelectField';
|
||||
|
||||
type NotificationSettingsProps = {
|
||||
settings: SettingsType,
|
||||
settingsError: null | {
|
||||
type: string,
|
||||
msg: string
|
||||
},
|
||||
updateSettings: Function,
|
||||
}
|
||||
|
||||
const NotificationSettings = ({ settings, settingsError, updateSettings }:NotificationSettingsProps) => {
|
||||
const labelStyle = 'mb-2 font-semibold inline-block text-sm text-gray-700 capitalize';
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className='settings__content styled-scrollbar p-6 text-sm'>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>Notification Frequency</label>
|
||||
<SelectField
|
||||
multiple={false}
|
||||
selected={[settings.notification_interval]}
|
||||
options={[
|
||||
{ label: 'Daily', value: 'daily' },
|
||||
{ label: 'Weekly', value: 'weekly' },
|
||||
{ label: 'Monthly', value: 'monthly' },
|
||||
{ label: 'Never', value: 'never' },
|
||||
]}
|
||||
defaultLabel={'Notification Settings'}
|
||||
updateField={(updated:string[]) => updated[0] && updateSettings('notification_interval', updated[0])}
|
||||
rounded='rounded'
|
||||
maxHeight={48}
|
||||
minWidth={270}
|
||||
/>
|
||||
</div>
|
||||
{settings.notification_interval !== 'never' && (
|
||||
<>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>Notification Emails</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError?.type === 'no_email' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.notification_email}
|
||||
placeholder={'test@gmail.com'}
|
||||
onChange={(event) => updateSettings('notification_email', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>SMTP Server</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError?.type === 'no_smtp_server' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.smtp_server || ''}
|
||||
onChange={(event) => updateSettings('smtp_server', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>SMTP Port</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError && settingsError.type === 'no_smtp_port' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.smtp_port || ''}
|
||||
onChange={(event) => updateSettings('smtp_port', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>SMTP Username</label>
|
||||
<input
|
||||
className={'w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200'}
|
||||
type="text"
|
||||
value={settings?.smtp_username || ''}
|
||||
onChange={(event) => updateSettings('smtp_username', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>SMTP Password</label>
|
||||
<input
|
||||
className={'w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200'}
|
||||
type="text"
|
||||
value={settings?.smtp_password || ''}
|
||||
onChange={(event) => updateSettings('smtp_password', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>From Email Address</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError?.type === 'no_smtp_from' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.notification_email_from || ''}
|
||||
placeholder="no-reply@mydomain.com"
|
||||
onChange={(event) => updateSettings('notification_email_from', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
</div>
|
||||
{settingsError?.msg && (
|
||||
<div className='absolute w-full bottom-16 text-center p-3 bg-red-100 text-red-600 text-sm font-semibold'>
|
||||
{settingsError.msg}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default NotificationSettings;
|
||||
149
components/settings/ScraperSettings.tsx
Normal file
149
components/settings/ScraperSettings.tsx
Normal file
@@ -0,0 +1,149 @@
|
||||
import React from 'react';
|
||||
import { useClearFailedQueue } from '../../services/settings';
|
||||
import Icon from '../common/Icon';
|
||||
import SelectField, { SelectionOption } from '../common/SelectField';
|
||||
|
||||
type ScraperSettingsProps = {
|
||||
settings: SettingsType,
|
||||
settingsError: null | {
|
||||
type: string,
|
||||
msg: string
|
||||
},
|
||||
updateSettings: Function,
|
||||
}
|
||||
|
||||
const ScraperSettings = ({ settings, settingsError, updateSettings }:ScraperSettingsProps) => {
|
||||
const { mutate: clearFailedMutate, isLoading: clearingQueue } = useClearFailedQueue(() => {});
|
||||
|
||||
const scrapingOptions: SelectionOption[] = [
|
||||
{ label: 'Daily', value: 'daily' },
|
||||
{ label: 'Every Other Day', value: 'other_day' },
|
||||
{ label: 'Weekly', value: 'weekly' },
|
||||
{ label: 'Monthly', value: 'monthly' },
|
||||
{ label: 'Never', value: 'never' },
|
||||
];
|
||||
const delayOptions: SelectionOption[] = [
|
||||
{ label: 'No Delay', value: '0' },
|
||||
{ label: '5 Seconds', value: '5000' },
|
||||
{ label: '10 Seconds', value: '10000' },
|
||||
{ label: '30 Seconds', value: '30000' },
|
||||
{ label: '1 Minutes', value: '60000' },
|
||||
{ label: '2 Minutes', value: '120000' },
|
||||
{ label: '5 Minutes', value: '300000' },
|
||||
{ label: '10 Minutes', value: '600000' },
|
||||
{ label: '15 Minutes', value: '900000' },
|
||||
{ label: '30 Minutes', value: '1800000' },
|
||||
];
|
||||
const allScrapers: SelectionOption[] = settings.available_scapers ? settings.available_scapers : [];
|
||||
const scraperOptions: SelectionOption[] = [{ label: 'None', value: 'none' }, ...allScrapers];
|
||||
const labelStyle = 'mb-2 font-semibold inline-block text-sm text-gray-700 capitalize';
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className='settings__content styled-scrollbar p-6 text-sm'>
|
||||
|
||||
<div className="settings__section__select mb-5">
|
||||
<label className={labelStyle}>Scraping Method</label>
|
||||
<SelectField
|
||||
options={scraperOptions}
|
||||
selected={[settings.scraper_type || 'none']}
|
||||
defaultLabel="Select Scraper"
|
||||
updateField={(updatedTime:[string]) => updateSettings('scraper_type', updatedTime[0])}
|
||||
multiple={false}
|
||||
rounded={'rounded'}
|
||||
minWidth={270}
|
||||
/>
|
||||
</div>
|
||||
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi', 'spaceSerp'].includes(settings.scraper_type) && (
|
||||
<div className="settings__section__input mr-3">
|
||||
<label className={labelStyle}>Scraper API Key or Token</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mt-2 mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError?.type === 'no_api_key' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.scaping_api || ''}
|
||||
placeholder={'API Key/Token'}
|
||||
onChange={(event) => updateSettings('scaping_api', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{settings.scraper_type === 'proxy' && (
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>Proxy List</label>
|
||||
<textarea
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 text-xs
|
||||
focus:outline-none min-h-[160px] focus:border-blue-200
|
||||
${settingsError?.type === 'no_email' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
value={settings?.proxy}
|
||||
placeholder={'http://122.123.22.45:5049\nhttps://user:password@122.123.22.45:5049'}
|
||||
onChange={(event) => updateSettings('proxy', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{settings.scraper_type !== 'none' && (
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>Scraping Frequency</label>
|
||||
<SelectField
|
||||
multiple={false}
|
||||
selected={[settings?.scrape_interval || 'daily']}
|
||||
options={scrapingOptions}
|
||||
defaultLabel={'Notification Settings'}
|
||||
updateField={(updated:string[]) => updated[0] && updateSettings('scrape_interval', updated[0])}
|
||||
rounded='rounded'
|
||||
maxHeight={48}
|
||||
minWidth={270}
|
||||
/>
|
||||
<small className=' text-gray-500 pt-2 block'>This option requires Server/Docker Instance Restart to take Effect.</small>
|
||||
</div>
|
||||
)}
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>Delay Between Each keyword Scrape</label>
|
||||
<SelectField
|
||||
multiple={false}
|
||||
selected={[settings?.scrape_delay || '0']}
|
||||
options={delayOptions}
|
||||
defaultLabel={'Delay Settings'}
|
||||
updateField={(updated:string[]) => updated[0] && updateSettings('scrape_delay', updated[0])}
|
||||
rounded='rounded'
|
||||
maxHeight={48}
|
||||
minWidth={270}
|
||||
/>
|
||||
<small className=' text-gray-500 pt-2 block'>This option requires Server/Docker Instance Restart to take Effect.</small>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className="relative inline-flex items-center cursor-pointer w-full justify-between">
|
||||
<span className="text-sm font-medium text-gray-900 dark:text-gray-300 w-56">Auto Retry Failed Keyword Scrape</span>
|
||||
<input
|
||||
type="checkbox"
|
||||
value={settings?.scrape_retry ? 'true' : '' }
|
||||
checked={settings.scrape_retry || false}
|
||||
className="sr-only peer"
|
||||
onChange={() => updateSettings('scrape_retry', !settings.scrape_retry)}
|
||||
/>
|
||||
<div className="relative rounded-3xl w-9 h-5 bg-gray-200 peer-focus:outline-none peer-focus:ring-4
|
||||
peer-focus:ring-blue-300 dark:peer-focus:ring-blue-800rounded-full peer dark:bg-gray-700
|
||||
peer-checked:after:translate-x-full peer-checked:after:border-white after:content-['']
|
||||
after:absolute after:top-[2px] after:left-[2px] after:bg-white after:border-gray-300
|
||||
after:border after:rounded-full after:h-4 after:w-4
|
||||
after:transition-all dark:border-gray-600 peer-checked:bg-blue-600"></div>
|
||||
|
||||
</label>
|
||||
</div>
|
||||
{settings?.scrape_retry && (settings.failed_queue?.length || 0) > 0 && (
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>Clear Failed Retry Queue</label>
|
||||
<button
|
||||
onClick={() => clearFailedMutate()}
|
||||
className=' py-3 px-5 w-full rounded cursor-pointer bg-gray-100 text-gray-800
|
||||
font-semibold text-sm hover:bg-gray-200'>
|
||||
{clearingQueue && <Icon type="loading" size={14} />} Clear Failed Queue
|
||||
({settings.failed_queue?.length || 0} Keywords)
|
||||
</button>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default ScraperSettings;
|
||||
@@ -1,8 +1,9 @@
|
||||
import React, { useEffect, useState } from 'react';
|
||||
// import { useQuery } from 'react-query';
|
||||
import { Toaster } from 'react-hot-toast';
|
||||
import useUpdateSettings, { useFetchSettings } from '../../services/settings';
|
||||
import Icon from '../common/Icon';
|
||||
import SelectField, { SelectionOption } from '../common/SelectField';
|
||||
import NotificationSettings from './NotificationSettings';
|
||||
import ScraperSettings from './ScraperSettings';
|
||||
|
||||
type SettingsProps = {
|
||||
closeSettings: Function,
|
||||
@@ -16,6 +17,8 @@ type SettingsError = {
|
||||
|
||||
const defaultSettings = {
|
||||
scraper_type: 'none',
|
||||
scrape_delay: 'none',
|
||||
scrape_retry: false,
|
||||
notification_interval: 'daily',
|
||||
notification_email: '',
|
||||
smtp_server: '',
|
||||
@@ -57,7 +60,7 @@ const Settings = ({ closeSettings }:SettingsProps) => {
|
||||
if (e.target === e.currentTarget) { closeSettings(); }
|
||||
};
|
||||
|
||||
const updateSettings = (key: string, value:string|number) => {
|
||||
const updateSettings = (key: string, value:string|number|boolean) => {
|
||||
setSettings({ ...settings, [key]: value });
|
||||
};
|
||||
|
||||
@@ -89,17 +92,6 @@ const Settings = ({ closeSettings }:SettingsProps) => {
|
||||
}
|
||||
};
|
||||
|
||||
const labelStyle = 'mb-2 font-semibold inline-block text-sm text-gray-700 capitalize';
|
||||
|
||||
const notificationOptions: SelectionOption[] = [
|
||||
{ label: 'Daily', value: 'daily' },
|
||||
{ label: 'Weekly', value: 'weekly' },
|
||||
{ label: 'Monthly', value: 'monthly' },
|
||||
{ label: 'Never', value: 'never' },
|
||||
];
|
||||
const allScrapers: SelectionOption[] = settings.available_scapers ? settings.available_scapers : [];
|
||||
const scraperOptions: SelectionOption[] = [{ label: 'None', value: 'none' }, ...allScrapers];
|
||||
|
||||
const tabStyle = 'inline-block px-4 py-1 rounded-full mr-3 cursor-pointer text-sm';
|
||||
return (
|
||||
<div className="settings fixed w-full h-screen top-0 left-0 z-50" onClick={closeOnBGClick}>
|
||||
@@ -127,141 +119,13 @@ const Settings = ({ closeSettings }:SettingsProps) => {
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
{currentTab === 'scraper' && (
|
||||
<div>
|
||||
<div className='settings__content styled-scrollbar p-6 text-sm'>
|
||||
|
||||
<div className="settings__section__select mb-5">
|
||||
<label className={labelStyle}>Scraping Method</label>
|
||||
<SelectField
|
||||
options={scraperOptions}
|
||||
selected={[settings.scraper_type || 'none']}
|
||||
defaultLabel="Select Scraper"
|
||||
updateField={(updatedTime:[string]) => updateSettings('scraper_type', updatedTime[0])}
|
||||
multiple={false}
|
||||
rounded={'rounded'}
|
||||
minWidth={270}
|
||||
/>
|
||||
</div>
|
||||
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi'].includes(settings.scraper_type) && (
|
||||
<div className="settings__section__input mr-3">
|
||||
<label className={labelStyle}>Scraper API Key or Token</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mt-2 mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError && settingsError.type === 'no_api_key' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.scaping_api || ''}
|
||||
placeholder={'API Key/Token'}
|
||||
onChange={(event) => updateSettings('scaping_api', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{settings.scraper_type === 'proxy' && (
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>Proxy List</label>
|
||||
<textarea
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 text-xs
|
||||
focus:outline-none min-h-[160px] focus:border-blue-200
|
||||
${settingsError && settingsError.type === 'no_email' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
value={settings?.proxy}
|
||||
placeholder={'http://122.123.22.45:5049\nhttps://user:password@122.123.22.45:5049'}
|
||||
onChange={(event) => updateSettings('proxy', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
{currentTab === 'scraper' && settings && (
|
||||
<ScraperSettings settings={settings} updateSettings={updateSettings} settingsError={settingsError} />
|
||||
)}
|
||||
|
||||
{currentTab === 'notification' && (
|
||||
<div>
|
||||
<div className='settings__content styled-scrollbar p-6 text-sm'>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>Notification Frequency</label>
|
||||
<SelectField
|
||||
multiple={false}
|
||||
selected={[settings.notification_interval]}
|
||||
options={notificationOptions}
|
||||
defaultLabel={'Notification Settings'}
|
||||
updateField={(updated:string[]) => updated[0] && updateSettings('notification_interval', updated[0])}
|
||||
rounded='rounded'
|
||||
maxHeight={48}
|
||||
minWidth={270}
|
||||
/>
|
||||
</div>
|
||||
{settings.notification_interval !== 'never' && (
|
||||
<>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>Notification Emails</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError && settingsError.type === 'no_email' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.notification_email}
|
||||
placeholder={'test@gmail.com'}
|
||||
onChange={(event) => updateSettings('notification_email', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>SMTP Server</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError && settingsError.type === 'no_smtp_server' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.smtp_server || ''}
|
||||
onChange={(event) => updateSettings('smtp_server', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>SMTP Port</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError && settingsError.type === 'no_smtp_port' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.smtp_port || ''}
|
||||
onChange={(event) => updateSettings('smtp_port', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>SMTP Username</label>
|
||||
<input
|
||||
className={'w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200'}
|
||||
type="text"
|
||||
value={settings?.smtp_username || ''}
|
||||
onChange={(event) => updateSettings('smtp_username', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>SMTP Password</label>
|
||||
<input
|
||||
className={'w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200'}
|
||||
type="text"
|
||||
value={settings?.smtp_password || ''}
|
||||
onChange={(event) => updateSettings('smtp_password', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
<div className="settings__section__input mb-5">
|
||||
<label className={labelStyle}>From Email Address</label>
|
||||
<input
|
||||
className={`w-full p-2 border border-gray-200 rounded mb-3 focus:outline-none focus:border-blue-200
|
||||
${settingsError && settingsError.type === 'no_smtp_from' ? ' border-red-400 focus:border-red-400' : ''} `}
|
||||
type="text"
|
||||
value={settings?.notification_email_from || ''}
|
||||
placeholder="no-reply@mydomain.com"
|
||||
onChange={(event) => updateSettings('notification_email_from', event.target.value)}
|
||||
/>
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
|
||||
</div>
|
||||
{settingsError && (
|
||||
<div className='absolute w-full bottom-16 text-center p-3 bg-red-100 text-red-600 text-sm font-semibold'>
|
||||
{settingsError.msg}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
{currentTab === 'notification' && settings && (
|
||||
<NotificationSettings settings={settings} updateSettings={updateSettings} settingsError={settingsError} />
|
||||
)}
|
||||
<div className=' border-t-[1px] border-gray-200 p-2 px-3'>
|
||||
<button
|
||||
onClick={() => performUpdate()}
|
||||
@@ -270,6 +134,7 @@ const Settings = ({ closeSettings }:SettingsProps) => {
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<Toaster position='bottom-center' containerClassName="react_toaster" />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
124
cron.js
124
cron.js
@@ -1,7 +1,7 @@
|
||||
const Cryptr = require('cryptr');
|
||||
const { promises } = require('fs');
|
||||
const { readFile } = require('fs');
|
||||
const cron = require('node-cron');
|
||||
const Cron = require('croner');
|
||||
require('dotenv').config({ path: './.env.local' });
|
||||
|
||||
const getAppSettings = async () => {
|
||||
@@ -49,11 +49,14 @@ const generateCronTime = (interval) => {
|
||||
if (interval === 'daily') {
|
||||
cronTime = '0 0 0 * * *';
|
||||
}
|
||||
if (interval === 'other_day') {
|
||||
cronTime = '0 0 2-30/2 * *';
|
||||
}
|
||||
if (interval === 'daily_morning') {
|
||||
cronTime = '0 0 3 * * *';
|
||||
}
|
||||
if (interval === 'weekly') {
|
||||
cronTime = '0 0 0 */7 * *';
|
||||
cronTime = '0 0 * * 1';
|
||||
}
|
||||
if (interval === 'monthly') {
|
||||
cronTime = '0 0 1 * *'; // Run every first day of the month at 00:00(midnight)
|
||||
@@ -63,66 +66,30 @@ const generateCronTime = (interval) => {
|
||||
};
|
||||
|
||||
const runAppCronJobs = () => {
|
||||
// RUN SERP Scraping CRON (EveryDay at Midnight) 0 0 0 * *
|
||||
const scrapeCronTime = generateCronTime('daily');
|
||||
cron.schedule(scrapeCronTime, () => {
|
||||
// console.log('### Running Keyword Position Cron Job!');
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/cron`, fetchOpts)
|
||||
.then((res) => res.json())
|
||||
.then((data) => console.log(data))
|
||||
.catch((err) => {
|
||||
console.log('ERROR Making Daily Scraper Cron Request..');
|
||||
console.log(err);
|
||||
});
|
||||
}, { scheduled: true });
|
||||
|
||||
// Run Failed scraping CRON (Every Hour)
|
||||
const failedCronTime = generateCronTime('hourly');
|
||||
cron.schedule(failedCronTime, () => {
|
||||
// console.log('### Retrying Failed Scrapes...');
|
||||
|
||||
readFile(`${process.cwd()}/data/failed_queue.json`, { encoding: 'utf-8' }, (err, data) => {
|
||||
if (data) {
|
||||
const keywordsToRetry = data ? JSON.parse(data) : [];
|
||||
if (keywordsToRetry.length > 0) {
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/refresh?id=${keywordsToRetry.join(',')}`, fetchOpts)
|
||||
.then((res) => res.json())
|
||||
.then((refreshedData) => console.log(refreshedData))
|
||||
.catch((fetchErr) => {
|
||||
console.log('ERROR Making failed_queue Cron Request..');
|
||||
console.log(fetchErr);
|
||||
});
|
||||
}
|
||||
} else {
|
||||
console.log('ERROR Reading Failed Scrapes Queue File..', err);
|
||||
}
|
||||
});
|
||||
}, { scheduled: true });
|
||||
|
||||
// Run Google Search Console Scraper Daily
|
||||
if (process.env.SEARCH_CONSOLE_PRIVATE_KEY && process.env.SEARCH_CONSOLE_CLIENT_EMAIL) {
|
||||
const searchConsoleCRONTime = generateCronTime('daily');
|
||||
cron.schedule(searchConsoleCRONTime, () => {
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/searchconsole`, fetchOpts)
|
||||
.then((res) => res.json())
|
||||
.then((data) => console.log(data))
|
||||
.catch((err) => {
|
||||
console.log('ERROR Making Google Search Console Scraper Cron Request..');
|
||||
console.log(err);
|
||||
});
|
||||
}, { scheduled: true });
|
||||
}
|
||||
|
||||
// RUN Email Notification CRON
|
||||
getAppSettings().then((settings) => {
|
||||
// RUN SERP Scraping CRON (EveryDay at Midnight) 0 0 0 * *
|
||||
const scrape_interval = settings.scrape_interval || 'daily';
|
||||
if (scrape_interval !== 'never') {
|
||||
const scrapeCronTime = generateCronTime(scrape_interval);
|
||||
Cron(scrapeCronTime, () => {
|
||||
// console.log('### Running Keyword Position Cron Job!');
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/cron`, fetchOpts)
|
||||
.then((res) => res.json())
|
||||
// .then((data) =>{ console.log(data)})
|
||||
.catch((err) => {
|
||||
console.log('ERROR Making SERP Scraper Cron Request..');
|
||||
console.log(err);
|
||||
});
|
||||
}, { scheduled: true });
|
||||
}
|
||||
|
||||
// RUN Email Notification CRON
|
||||
const notif_interval = (!settings.notification_interval || settings.notification_interval === 'never') ? false : settings.notification_interval;
|
||||
if (notif_interval) {
|
||||
const cronTime = generateCronTime(notif_interval === 'daily' ? 'daily_morning' : notif_interval);
|
||||
if (cronTime) {
|
||||
cron.schedule(cronTime, () => {
|
||||
Cron(cronTime, () => {
|
||||
// console.log('### Sending Notification Email...');
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/notify`, fetchOpts)
|
||||
@@ -136,6 +103,49 @@ const runAppCronJobs = () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Run Failed scraping CRON (Every Hour)
|
||||
const failedCronTime = generateCronTime('hourly');
|
||||
Cron(failedCronTime, () => {
|
||||
// console.log('### Retrying Failed Scrapes...');
|
||||
|
||||
readFile(`${process.cwd()}/data/failed_queue.json`, { encoding: 'utf-8' }, (err, data) => {
|
||||
if (data) {
|
||||
try {
|
||||
const keywordsToRetry = data ? JSON.parse(data) : [];
|
||||
if (keywordsToRetry.length > 0) {
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/refresh?id=${keywordsToRetry.join(',')}`, fetchOpts)
|
||||
.then((res) => res.json())
|
||||
.then((refreshedData) => console.log(refreshedData))
|
||||
.catch((fetchErr) => {
|
||||
console.log('ERROR Making failed_queue Cron Request..');
|
||||
console.log(fetchErr);
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
console.log('ERROR Reading Failed Scrapes Queue File..', error);
|
||||
}
|
||||
} else {
|
||||
console.log('ERROR Reading Failed Scrapes Queue File..', err);
|
||||
}
|
||||
});
|
||||
}, { scheduled: true });
|
||||
|
||||
// Run Google Search Console Scraper Daily
|
||||
if (process.env.SEARCH_CONSOLE_PRIVATE_KEY && process.env.SEARCH_CONSOLE_CLIENT_EMAIL) {
|
||||
const searchConsoleCRONTime = generateCronTime('daily');
|
||||
Cron(searchConsoleCRONTime, () => {
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/searchconsole`, fetchOpts)
|
||||
.then((res) => res.json())
|
||||
.then((data) => console.log(data))
|
||||
.catch((err) => {
|
||||
console.log('ERROR Making Google Search Console Scraper Cron Request..');
|
||||
console.log(err);
|
||||
});
|
||||
}, { scheduled: true });
|
||||
}
|
||||
};
|
||||
|
||||
runAppCronJobs();
|
||||
|
||||
@@ -6,7 +6,7 @@ import Keyword from './models/keyword';
|
||||
const connection = new Sequelize({
|
||||
dialect: 'sqlite',
|
||||
host: '0.0.0.0',
|
||||
username: process.env.USER,
|
||||
username: process.env.USER_NAME ? process.env.USER_NAME : process.env.USER,
|
||||
password: process.env.PASSWORD,
|
||||
database: 'sequelize',
|
||||
dialectModule: sqlite3,
|
||||
|
||||
@@ -6,6 +6,6 @@ import './styles/globals.css';
|
||||
|
||||
// Used for __tests__/testing-library.js
|
||||
// Learn more: https://github.com/testing-library/jest-dom
|
||||
import '@testing-library/jest-dom/extend-expect';
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
global.ResizeObserver = require('resize-observer-polyfill');
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const { version } = require('./package.json');
|
||||
|
||||
const nextConfig = {
|
||||
reactStrictMode: true,
|
||||
swcMinify: false,
|
||||
output: 'standalone',
|
||||
publicRuntimeConfig: {
|
||||
version,
|
||||
},
|
||||
};
|
||||
|
||||
module.exports = nextConfig;
|
||||
|
||||
15366
package-lock.json
generated
15366
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
22
package.json
22
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "serpbear",
|
||||
"version": "0.2.2",
|
||||
"version": "0.3.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
@@ -17,7 +17,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@googleapis/searchconsole": "^1.0.0",
|
||||
"@testing-library/react": "^13.4.0",
|
||||
"@types/react-transition-group": "^4.4.5",
|
||||
"axios": "^1.1.3",
|
||||
"axios-retry": "^3.3.1",
|
||||
@@ -25,15 +24,15 @@
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"concurrently": "^7.6.0",
|
||||
"cookies": "^0.8.0",
|
||||
"croner": "^5.3.5",
|
||||
"cryptr": "^6.0.3",
|
||||
"dayjs": "^1.11.5",
|
||||
"dotenv": "^16.0.3",
|
||||
"https-proxy-agent": "^5.0.1",
|
||||
"isomorphic-fetch": "^3.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"msw": "^0.49.0",
|
||||
"next": "12.3.1",
|
||||
"node-cron": "^3.0.2",
|
||||
"next": "^12.3.4",
|
||||
"nodemailer": "^6.8.0",
|
||||
"react": "18.2.0",
|
||||
"react-chartjs-2": "^4.3.1",
|
||||
@@ -44,12 +43,13 @@
|
||||
"react-transition-group": "^4.4.5",
|
||||
"react-window": "^1.8.8",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"sequelize": "^6.25.2",
|
||||
"sequelize": "^6.34.0",
|
||||
"sequelize-typescript": "^2.1.5",
|
||||
"sqlite3": "^5.1.2"
|
||||
"sqlite3": "^5.1.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@testing-library/jest-dom": "^5.16.5",
|
||||
"@testing-library/jest-dom": "^6.1.4",
|
||||
"@testing-library/react": "^14.0.0",
|
||||
"@types/cookies": "^0.7.7",
|
||||
"@types/cryptr": "^4.0.1",
|
||||
"@types/isomorphic-fetch": "^0.0.36",
|
||||
@@ -64,9 +64,9 @@
|
||||
"eslint": "8.25.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-config-next": "12.3.1",
|
||||
"jest": "^29.3.1",
|
||||
"jest-environment-jsdom": "^29.3.1",
|
||||
"postcss": "^8.4.18",
|
||||
"jest": "^29.7.0",
|
||||
"jest-environment-jsdom": "^29.7.0",
|
||||
"postcss": "^8.4.31",
|
||||
"prettier": "^2.7.1",
|
||||
"resize-observer-polyfill": "^1.5.1",
|
||||
"sass": "^1.55.0",
|
||||
|
||||
29
pages/api/clearfailed.ts
Normal file
29
pages/api/clearfailed.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { writeFile } from 'fs/promises';
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import verifyUser from '../../utils/verifyUser';
|
||||
|
||||
type SettingsGetResponse = {
|
||||
cleared?: boolean,
|
||||
error?: string,
|
||||
}
|
||||
|
||||
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
||||
const authorized = verifyUser(req, res);
|
||||
if (authorized !== 'authorized') {
|
||||
return res.status(401).json({ error: authorized });
|
||||
}
|
||||
if (req.method === 'PUT') {
|
||||
return clearFailedQueue(req, res);
|
||||
}
|
||||
return res.status(502).json({ error: 'Unrecognized Route.' });
|
||||
}
|
||||
|
||||
const clearFailedQueue = async (req: NextApiRequest, res: NextApiResponse<SettingsGetResponse>) => {
|
||||
try {
|
||||
await writeFile(`${process.cwd()}/data/failed_queue.json`, JSON.stringify([]), { encoding: 'utf-8' });
|
||||
return res.status(200).json({ cleared: true });
|
||||
} catch (error) {
|
||||
console.log('[ERROR] Cleraring Failed Queue File.', error);
|
||||
return res.status(200).json({ error: 'Error Cleraring Failed Queue!' });
|
||||
}
|
||||
};
|
||||
@@ -3,7 +3,7 @@ import db from '../../database/database';
|
||||
import Keyword from '../../database/models/keyword';
|
||||
import { getAppSettings } from './settings';
|
||||
import verifyUser from '../../utils/verifyUser';
|
||||
import { refreshAndUpdateKeywords } from './refresh';
|
||||
import refreshAndUpdateKeywords from '../../utils/refresh';
|
||||
|
||||
type CRONRefreshRes = {
|
||||
started: boolean
|
||||
|
||||
@@ -37,7 +37,10 @@ const getDomainSearchConsoleInsight = async (req: NextApiRequest, res: NextApiRe
|
||||
|
||||
// First try and read the Local SC Domain Data file.
|
||||
const localSCData = await readLocalSCData(domainname);
|
||||
if (localSCData && localSCData.stats && localSCData.stats.length) {
|
||||
const oldFetchedDate = localSCData.lastFetched;
|
||||
const fetchTimeDiff = new Date().getTime() - (oldFetchedDate ? new Date(oldFetchedDate as string).getTime() : 0);
|
||||
|
||||
if (localSCData && localSCData.stats && localSCData.stats.length && fetchTimeDiff <= 86400000) {
|
||||
const response = getInsightFromSCData(localSCData);
|
||||
return res.status(200).json({ data: response });
|
||||
}
|
||||
|
||||
@@ -2,11 +2,11 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { Op } from 'sequelize';
|
||||
import db from '../../database/database';
|
||||
import Keyword from '../../database/models/keyword';
|
||||
import { refreshAndUpdateKeywords } from './refresh';
|
||||
import { getAppSettings } from './settings';
|
||||
import verifyUser from '../../utils/verifyUser';
|
||||
import parseKeywords from '../../utils/parseKeywords';
|
||||
import { integrateKeywordSCData, readLocalSCData } from '../../utils/searchConsole';
|
||||
import refreshAndUpdateKeywords from '../../utils/refresh';
|
||||
|
||||
type KeywordsGetResponse = {
|
||||
keywords?: KeywordType[],
|
||||
@@ -153,10 +153,13 @@ const updateKeywords = async (req: NextApiRequest, res: NextApiResponse<Keywords
|
||||
}
|
||||
if (tags) {
|
||||
const tagsKeywordIDs = Object.keys(tags);
|
||||
const multipleKeywords = tagsKeywordIDs.length > 1;
|
||||
for (const keywordID of tagsKeywordIDs) {
|
||||
const response = await Keyword.findOne({ where: { ID: keywordID } });
|
||||
if (response) {
|
||||
await response.update({ tags: JSON.stringify(tags[keywordID]) });
|
||||
const selectedKeyword = await Keyword.findOne({ where: { ID: keywordID } });
|
||||
const currentTags = selectedKeyword && selectedKeyword.tags ? JSON.parse(selectedKeyword.tags) : [];
|
||||
const mergedTags = Array.from(new Set([...currentTags, ...tags[keywordID]]));
|
||||
if (selectedKeyword) {
|
||||
await selectedKeyword.update({ tags: JSON.stringify(multipleKeywords ? mergedTags : tags[keywordID]) });
|
||||
}
|
||||
}
|
||||
return res.status(200).json({ keywords });
|
||||
|
||||
@@ -18,10 +18,11 @@ const loginUser = async (req: NextApiRequest, res: NextApiResponse<loginResponse
|
||||
if (!req.body.username || !req.body.password) {
|
||||
return res.status(401).json({ error: 'Username Password Missing' });
|
||||
}
|
||||
const userName = process.env.USER_NAME ? process.env.USER_NAME : process.env.USER;
|
||||
|
||||
if (req.body.username === process.env.USER
|
||||
if (req.body.username === userName
|
||||
&& req.body.password === process.env.PASSWORD && process.env.SECRET) {
|
||||
const token = jwt.sign({ user: process.env.USER }, process.env.SECRET);
|
||||
const token = jwt.sign({ user: userName }, process.env.SECRET);
|
||||
const cookies = new Cookies(req, res);
|
||||
const expireDate = new Date();
|
||||
const sessDuration = process.env.SESSION_DURATION;
|
||||
@@ -30,7 +31,7 @@ const loginUser = async (req: NextApiRequest, res: NextApiResponse<loginResponse
|
||||
return res.status(200).json({ success: true, error: null });
|
||||
}
|
||||
|
||||
const error = req.body.username !== process.env.USER ? 'Incorrect Username' : 'Incorrect Password';
|
||||
const error = req.body.username !== userName ? 'Incorrect Username' : 'Incorrect Password';
|
||||
|
||||
return res.status(401).json({ success: false, error });
|
||||
};
|
||||
|
||||
@@ -2,11 +2,10 @@ import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import { Op } from 'sequelize';
|
||||
import db from '../../database/database';
|
||||
import Keyword from '../../database/models/keyword';
|
||||
import refreshKeywords from '../../utils/refresh';
|
||||
import refreshAndUpdateKeywords from '../../utils/refresh';
|
||||
import { getAppSettings } from './settings';
|
||||
import verifyUser from '../../utils/verifyUser';
|
||||
import parseKeywords from '../../utils/parseKeywords';
|
||||
import { removeFromRetryQueue, retryScrape } from '../../utils/scraper';
|
||||
|
||||
type KeywordsRefreshRes = {
|
||||
keywords?: KeywordType[]
|
||||
@@ -63,57 +62,3 @@ const refresTheKeywords = async (req: NextApiRequest, res: NextApiResponse<Keywo
|
||||
return res.status(400).json({ error: 'Error refreshing keywords!' });
|
||||
}
|
||||
};
|
||||
|
||||
export const refreshAndUpdateKeywords = async (initKeywords:Keyword[], settings:SettingsType) => {
|
||||
const formattedKeywords = initKeywords.map((el) => el.get({ plain: true }));
|
||||
const refreshed: any = await refreshKeywords(formattedKeywords, settings);
|
||||
// const fetchKeywords = await refreshKeywords(initialKeywords.map( k=> k.keyword ));
|
||||
const updatedKeywords: KeywordType[] = [];
|
||||
|
||||
for (const keywordRaw of initKeywords) {
|
||||
const keywordPrased = parseKeywords([keywordRaw.get({ plain: true })]);
|
||||
const keyword = keywordPrased[0];
|
||||
const udpatedkeyword = refreshed.find((item:any) => item.ID && item.ID === keyword.ID);
|
||||
|
||||
if (udpatedkeyword && keyword) {
|
||||
const newPos = udpatedkeyword.position;
|
||||
const newPosition = newPos !== false ? newPos : keyword.position;
|
||||
const { history } = keyword;
|
||||
const theDate = new Date();
|
||||
history[`${theDate.getFullYear()}-${theDate.getMonth() + 1}-${theDate.getDate()}`] = newPosition;
|
||||
|
||||
const updatedVal = {
|
||||
position: newPosition,
|
||||
updating: false,
|
||||
url: udpatedkeyword.url,
|
||||
lastResult: udpatedkeyword.result,
|
||||
history,
|
||||
lastUpdated: udpatedkeyword.error ? keyword.lastUpdated : theDate.toJSON(),
|
||||
lastUpdateError: udpatedkeyword.error
|
||||
? JSON.stringify({ date: theDate.toJSON(), error: `${udpatedkeyword.error}`, scraper: settings.scraper_type })
|
||||
: 'false',
|
||||
};
|
||||
updatedKeywords.push({ ...keyword, ...{ ...updatedVal, lastUpdateError: JSON.parse(updatedVal.lastUpdateError) } });
|
||||
|
||||
// If failed, Add to Retry Queue Cron
|
||||
if (udpatedkeyword.error) {
|
||||
await retryScrape(keyword.ID);
|
||||
} else {
|
||||
await removeFromRetryQueue(keyword.ID);
|
||||
}
|
||||
|
||||
// Update the Keyword Position in Database
|
||||
try {
|
||||
await keywordRaw.update({
|
||||
...updatedVal,
|
||||
lastResult: Array.isArray(udpatedkeyword.result) ? JSON.stringify(udpatedkeyword.result) : udpatedkeyword.result,
|
||||
history: JSON.stringify(history),
|
||||
});
|
||||
console.log('[SUCCESS] Updating the Keyword: ', keyword.keyword);
|
||||
} catch (error) {
|
||||
console.log('[ERROR] Updating SERP for Keyword', keyword.keyword, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
return updatedKeywords;
|
||||
};
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { writeFile, readFile } from 'fs/promises';
|
||||
import type { NextApiRequest, NextApiResponse } from 'next';
|
||||
import Cryptr from 'cryptr';
|
||||
import { writeFile, readFile } from 'fs/promises';
|
||||
import getConfig from 'next/config';
|
||||
import verifyUser from '../../utils/verifyUser';
|
||||
import allScrapers from '../../scrapers/index';
|
||||
|
||||
@@ -26,7 +27,9 @@ export default async function handler(req: NextApiRequest, res: NextApiResponse)
|
||||
const getSettings = async (req: NextApiRequest, res: NextApiResponse<SettingsGetResponse>) => {
|
||||
const settings = await getAppSettings();
|
||||
if (settings) {
|
||||
return res.status(200).json({ settings });
|
||||
const { publicRuntimeConfig } = getConfig();
|
||||
const version = publicRuntimeConfig?.version;
|
||||
return res.status(200).json({ settings: { ...settings, version } });
|
||||
}
|
||||
return res.status(400).json({ error: 'Error Loading Settings!' });
|
||||
};
|
||||
@@ -54,6 +57,8 @@ const updateSettings = async (req: NextApiRequest, res: NextApiResponse<Settings
|
||||
export const getAppSettings = async () : Promise<SettingsType> => {
|
||||
try {
|
||||
const settingsRaw = await readFile(`${process.cwd()}/data/settings.json`, { encoding: 'utf-8' });
|
||||
const failedQueueRaw = await readFile(`${process.cwd()}/data/failed_queue.json`, { encoding: 'utf-8' });
|
||||
const failedQueue: string[] = failedQueueRaw ? JSON.parse(failedQueueRaw) : [];
|
||||
const settings: SettingsType = settingsRaw ? JSON.parse(settingsRaw) : {};
|
||||
let decryptedSettings = settings;
|
||||
|
||||
@@ -67,6 +72,7 @@ export const getAppSettings = async () : Promise<SettingsType> => {
|
||||
smtp_password,
|
||||
search_console_integrated: !!(process.env.SEARCH_CONSOLE_PRIVATE_KEY && process.env.SEARCH_CONSOLE_CLIENT_EMAIL),
|
||||
available_scapers: allScrapers.map((scraper) => ({ label: scraper.name, value: scraper.id })),
|
||||
failed_queue: failedQueue,
|
||||
};
|
||||
} catch (error) {
|
||||
console.log('Error Decrypting Settings API Keys!');
|
||||
@@ -84,8 +90,14 @@ export const getAppSettings = async () : Promise<SettingsType> => {
|
||||
smtp_port: '',
|
||||
smtp_username: '',
|
||||
smtp_password: '',
|
||||
scrape_retry: false,
|
||||
};
|
||||
const otherSettings = {
|
||||
available_scapers: allScrapers.map((scraper) => ({ label: scraper.name, value: scraper.id })),
|
||||
failed_queue: [],
|
||||
};
|
||||
await writeFile(`${process.cwd()}/data/settings.json`, JSON.stringify(settings), { encoding: 'utf-8' });
|
||||
return settings;
|
||||
await writeFile(`${process.cwd()}/data/failed_queue.json`, JSON.stringify([]), { encoding: 'utf-8' });
|
||||
return { ...settings, ...otherSettings };
|
||||
}
|
||||
};
|
||||
|
||||
@@ -11,16 +11,43 @@ import { useFetchDomains } from '../../services/domains';
|
||||
import DomainItem from '../../components/domains/DomainItem';
|
||||
import Icon from '../../components/common/Icon';
|
||||
|
||||
type thumbImages = { [domain:string] : string }
|
||||
|
||||
const SingleDomain: NextPage = () => {
|
||||
const router = useRouter();
|
||||
const [noScrapprtError, setNoScrapprtError] = useState(false);
|
||||
const [showSettings, setShowSettings] = useState(false);
|
||||
const [showAddDomain, setShowAddDomain] = useState(false);
|
||||
const [domainThumbs, setDomainThumbs] = useState<thumbImages>({});
|
||||
const { data: appSettings } = useFetchSettings();
|
||||
const { data: domainsData, isLoading } = useFetchDomains(router, true);
|
||||
|
||||
useEffect(() => {
|
||||
console.log('Domains Data: ', domainsData);
|
||||
// console.log('Domains Data: ', domainsData);
|
||||
if (domainsData?.domains && domainsData.domains.length > 0) {
|
||||
const domainThumbsRaw = localStorage.getItem('domainThumbs');
|
||||
const domThumbs = domainThumbsRaw ? JSON.parse(domainThumbsRaw) : {};
|
||||
domainsData.domains.forEach(async (domain:DomainType) => {
|
||||
if (domain.domain) {
|
||||
if (!domThumbs[domain.domain]) {
|
||||
const domainImageBlob = await fetch(`https://image.thum.io/get/auth/66909-serpbear/maxAge/96/width/200/https://${domain.domain}`).then((res) => res.blob());
|
||||
if (domainImageBlob) {
|
||||
const reader = new FileReader();
|
||||
await new Promise((resolve, reject) => {
|
||||
reader.onload = resolve;
|
||||
reader.onerror = reject;
|
||||
reader.readAsDataURL(domainImageBlob);
|
||||
});
|
||||
const imageBase: string = reader.result && typeof reader.result === 'string' ? reader.result : '';
|
||||
localStorage.setItem('domainThumbs', JSON.stringify({ ...domThumbs, [domain.domain]: imageBase }));
|
||||
setDomainThumbs((currentThumbs) => ({ ...currentThumbs, [domain.domain]: imageBase }));
|
||||
}
|
||||
} else {
|
||||
setDomainThumbs((currentThumbs) => ({ ...currentThumbs, [domain.domain]: domThumbs[domain.domain] }));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}, [domainsData]);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -31,7 +58,7 @@ const SingleDomain: NextPage = () => {
|
||||
}, [appSettings]);
|
||||
|
||||
return (
|
||||
<div className="Domain ">
|
||||
<div className="Domain flex flex-col min-h-screen">
|
||||
{noScrapprtError && (
|
||||
<div className=' p-3 bg-red-600 text-white text-sm text-center'>
|
||||
A Scrapper/Proxy has not been set up Yet. Open Settings to set it up and start using the app.
|
||||
@@ -62,6 +89,7 @@ const SingleDomain: NextPage = () => {
|
||||
domain={domain}
|
||||
selected={false}
|
||||
isConsoleIntegrated={!!(appSettings && appSettings?.settings?.search_console_integrated) }
|
||||
thumb={domainThumbs[domain.domain]}
|
||||
// isConsoleIntegrated={false}
|
||||
/>;
|
||||
})}
|
||||
@@ -84,6 +112,9 @@ const SingleDomain: NextPage = () => {
|
||||
<CSSTransition in={showSettings} timeout={300} classNames="settings_anim" unmountOnExit mountOnEnter>
|
||||
<Settings closeSettings={() => setShowSettings(false)} />
|
||||
</CSSTransition>
|
||||
<footer className='text-center flex flex-1 justify-center pb-5 items-end'>
|
||||
<span className='text-gray-500 text-xs'><a href='https://github.com/towfiqi/serpbear' target="_blank" rel='noreferrer'>SerpBear v{appSettings?.settings?.version || '0.0.0'}</a></span>
|
||||
</footer>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -2,6 +2,7 @@ import scrapingAnt from './services/scrapingant';
|
||||
import scrapingRobot from './services/scrapingrobot';
|
||||
import serpapi from './services/serpapi';
|
||||
import serply from './services/serply';
|
||||
import spaceserp from './services/spaceserp';
|
||||
import proxy from './services/proxy';
|
||||
|
||||
export default [
|
||||
@@ -9,5 +10,6 @@ export default [
|
||||
scrapingAnt,
|
||||
serpapi,
|
||||
serply,
|
||||
spaceserp,
|
||||
proxy,
|
||||
];
|
||||
|
||||
@@ -2,11 +2,17 @@ const scrapingAnt:ScraperSettings = {
|
||||
id: 'scrapingant',
|
||||
name: 'ScrapingAnt',
|
||||
website: 'scrapingant.com',
|
||||
headers: (keyword) => {
|
||||
// eslint-disable-next-line max-len
|
||||
const mobileAgent = 'Mozilla/5.0 (Linux; Android 10; SM-G996U Build/QP1A.190711.020; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Mobile Safari/537.36';
|
||||
return keyword && keyword.device === 'mobile' ? { 'Ant-User-Agent': mobileAgent } : {};
|
||||
},
|
||||
scrapeURL: (keyword, settings, countryData) => {
|
||||
const scraperCountries = ['AE', 'BR', 'CN', 'DE', 'ES', 'FR', 'GB', 'HK', 'PL', 'IN', 'IT', 'IL', 'JP', 'NL', 'RU', 'SA', 'US', 'CZ'];
|
||||
const country = scraperCountries.includes(keyword.country.toUpperCase()) ? keyword.country : 'US';
|
||||
const lang = countryData[country][2];
|
||||
return `https://api.scrapingant.com/v2/extended?url=https%3A%2F%2Fwww.google.com%2Fsearch%3Fnum%3D100%26hl%3D${lang}%26q%3D${encodeURI(keyword.keyword)}&x-api-key=${settings.scaping_api}&proxy_country=${country}&browser=false`;
|
||||
const url = encodeURI(`https://www.google.com/search?num=100&hl=${lang}&q=${keyword.keyword}`);
|
||||
return `https://api.scrapingant.com/v2/extended?url=${url}&x-api-key=${settings.scaping_api}&proxy_country=${country}&browser=false`;
|
||||
},
|
||||
resultObjectKey: 'result',
|
||||
};
|
||||
|
||||
@@ -4,8 +4,10 @@ const scrapingRobot:ScraperSettings = {
|
||||
website: 'scrapingrobot.com',
|
||||
scrapeURL: (keyword, settings, countryData) => {
|
||||
const country = keyword.country || 'US';
|
||||
const device = keyword.device === 'mobile' ? '&mobile=true' : '';
|
||||
const lang = countryData[country][2];
|
||||
return `https://api.scrapingrobot.com/?token=${settings.scaping_api}&proxyCountry=${country}&render=false${keyword.device === 'mobile' ? '&mobile=true' : ''}&url=https%3A%2F%2Fwww.google.com%2Fsearch%3Fnum%3D100%26hl%3D${lang}%26q%3D${encodeURI(keyword.keyword)}`;
|
||||
const url = encodeURI(`https://www.google.com/search?num=100&hl=${lang}&q=${keyword.keyword}`);
|
||||
return `https://api.scrapingrobot.com/?token=${settings.scaping_api}&proxyCountry=${country}&render=false${device}&url=${url}`;
|
||||
},
|
||||
resultObjectKey: 'result',
|
||||
};
|
||||
|
||||
34
scrapers/services/spaceserp.ts
Normal file
34
scrapers/services/spaceserp.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
interface SpaceSerpResult {
|
||||
title: string,
|
||||
link: string,
|
||||
domain: string,
|
||||
position: number
|
||||
}
|
||||
|
||||
const spaceSerp:ScraperSettings = {
|
||||
id: 'spaceSerp',
|
||||
name: 'Space Serp',
|
||||
website: 'spaceserp.com',
|
||||
scrapeURL: (keyword, settings, countryData) => {
|
||||
const country = keyword.country || 'US';
|
||||
const lang = countryData[country][2];
|
||||
return `https://api.spaceserp.com/google/search?apiKey=${settings.scaping_api}&q=${encodeURI(keyword.keyword)}&pageSize=100&gl=${country}&hl=${lang}${keyword.device === 'mobile' ? '&device=mobile' : ''}&resultBlocks=`;
|
||||
},
|
||||
resultObjectKey: 'organic_results',
|
||||
serpExtractor: (content) => {
|
||||
const extractedResult = [];
|
||||
const results: SpaceSerpResult[] = (typeof content === 'string') ? JSON.parse(content) : content as SpaceSerpResult[];
|
||||
for (const result of results) {
|
||||
if (result.title && result.link) {
|
||||
extractedResult.push({
|
||||
title: result.title,
|
||||
url: result.link,
|
||||
position: result.position,
|
||||
});
|
||||
}
|
||||
}
|
||||
return extractedResult;
|
||||
},
|
||||
};
|
||||
|
||||
export default spaceSerp;
|
||||
@@ -38,4 +38,27 @@ const useUpdateSettings = (onSuccess:Function|undefined) => {
|
||||
});
|
||||
};
|
||||
|
||||
export function useClearFailedQueue(onSuccess:Function) {
|
||||
const queryClient = useQueryClient();
|
||||
return useMutation(async () => {
|
||||
const headers = new Headers({ 'Content-Type': 'application/json', Accept: 'application/json' });
|
||||
const fetchOpts = { method: 'PUT', headers };
|
||||
const res = await fetch(`${window.location.origin}/api/clearfailed`, fetchOpts);
|
||||
if (res.status >= 400 && res.status < 600) {
|
||||
throw new Error('Bad response from server');
|
||||
}
|
||||
return res.json();
|
||||
}, {
|
||||
onSuccess: async () => {
|
||||
onSuccess();
|
||||
toast('Failed Queue Cleared', { icon: '✔️' });
|
||||
queryClient.invalidateQueries(['settings']);
|
||||
},
|
||||
onError: () => {
|
||||
console.log('Error Clearing Failed Queue!!!');
|
||||
toast('Error Clearing Failed Queue.', { icon: '⚠️' });
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export default useUpdateSettings;
|
||||
|
||||
@@ -84,8 +84,8 @@ body {
|
||||
|
||||
.domKeywords_head--alpha_desc .domKeywords_head_keyword::after,
|
||||
.domKeywords_head--pos_desc .domKeywords_head_position::after,
|
||||
.domKeywords_head--imp_desc .domKeywords_head_imp::after,
|
||||
.domKeywords_head--visits_desc .domKeywords_head_visits::after,
|
||||
.domKeywords_head--imp_asc .domKeywords_head_imp::after,
|
||||
.domKeywords_head--visits_asc .domKeywords_head_visits::after,
|
||||
.domKeywords_head--ctr_desc .domKeywords_head_ctr::after {
|
||||
content: "↓";
|
||||
display: inline-block;
|
||||
@@ -98,8 +98,8 @@ body {
|
||||
|
||||
.domKeywords_head--alpha_asc .domKeywords_head_keyword::after,
|
||||
.domKeywords_head--pos_asc .domKeywords_head_position::after,
|
||||
.domKeywords_head--imp_asc .domKeywords_head_imp::after,
|
||||
.domKeywords_head--visits_asc .domKeywords_head_visits::after,
|
||||
.domKeywords_head--imp_desc .domKeywords_head_imp::after,
|
||||
.domKeywords_head--visits_desc .domKeywords_head_visits::after,
|
||||
.domKeywords_head--ctr_asc .domKeywords_head_ctr::after {
|
||||
content: "↑";
|
||||
display: inline-block;
|
||||
|
||||
7
types.d.ts
vendored
7
types.d.ts
vendored
@@ -78,7 +78,12 @@ type SettingsType = {
|
||||
smtp_username?: string,
|
||||
smtp_password?: string,
|
||||
search_console_integrated?: boolean,
|
||||
available_scapers?: Array
|
||||
available_scapers?: Array,
|
||||
scrape_interval?: string,
|
||||
scrape_delay?: string,
|
||||
scrape_retry?: boolean,
|
||||
failed_queue?: string[]
|
||||
version?: string
|
||||
}
|
||||
|
||||
type KeywordSCDataChild = {
|
||||
|
||||
@@ -9,16 +9,16 @@ export const SCsortKeywords = (theKeywords:SCKeywordType[], sortBy:string) : SCK
|
||||
const keywords = theKeywords.map((k) => ({ ...k, position: k.position === 0 ? 111 : k.position }));
|
||||
switch (sortBy) {
|
||||
case 'imp_asc':
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => b.impressions - a.impressions);
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => (a.impressions > b.impressions ? 1 : -1));
|
||||
break;
|
||||
case 'imp_desc':
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => a.impressions - b.impressions);
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => (b.impressions > a.impressions ? 1 : -1));
|
||||
break;
|
||||
case 'visits_asc':
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => b.clicks - a.clicks);
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => (a.clicks > b.clicks ? 1 : -1));
|
||||
break;
|
||||
case 'visits_desc':
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => a.clicks - b.clicks);
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => (b.clicks > a.clicks ? 1 : -1));
|
||||
break;
|
||||
case 'ctr_asc':
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => b.ctr - a.ctr);
|
||||
@@ -27,17 +27,17 @@ export const SCsortKeywords = (theKeywords:SCKeywordType[], sortBy:string) : SCK
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => a.ctr - b.ctr);
|
||||
break;
|
||||
case 'pos_asc':
|
||||
sortedItems = keywords.sort((a: SCKeywordType, b: SCKeywordType) => (b.position > a.position ? 1 : -1));
|
||||
sortedItems = keywords.sort((a: SCKeywordType, b: SCKeywordType) => (b.position < a.position ? 1 : -1));
|
||||
sortedItems = sortedItems.map((k) => ({ ...k, position: k.position === 111 ? 0 : k.position }));
|
||||
break;
|
||||
case 'pos_desc':
|
||||
sortedItems = keywords.sort((a: SCKeywordType, b: SCKeywordType) => (a.position > b.position ? 1 : -1));
|
||||
sortedItems = keywords.sort((a: SCKeywordType, b: SCKeywordType) => (a.position < b.position ? 1 : -1));
|
||||
sortedItems = sortedItems.map((k) => ({ ...k, position: k.position === 111 ? 0 : k.position }));
|
||||
break;
|
||||
case 'alpha_asc':
|
||||
case 'alpha_desc':
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => (b.keyword > a.keyword ? 1 : -1));
|
||||
break;
|
||||
case 'alpha_desc':
|
||||
case 'alpha_asc':
|
||||
sortedItems = theKeywords.sort((a: SCKeywordType, b: SCKeywordType) => (a.keyword > b.keyword ? 1 : -1));
|
||||
break;
|
||||
default:
|
||||
|
||||
109
utils/refresh.ts
109
utils/refresh.ts
@@ -1,32 +1,117 @@
|
||||
import { performance } from 'perf_hooks';
|
||||
import { RefreshResult, scrapeKeywordFromGoogle } from './scraper';
|
||||
import { setTimeout as sleep } from 'timers/promises';
|
||||
import { RefreshResult, removeFromRetryQueue, retryScrape, scrapeKeywordFromGoogle } from './scraper';
|
||||
import parseKeywords from './parseKeywords';
|
||||
import Keyword from '../database/models/keyword';
|
||||
|
||||
/**
|
||||
* Refreshes the Keywords position by Scraping Google Search Result by
|
||||
* Determining whether the keywords should be scraped in Parallel or not
|
||||
* @param {KeywordType[]} keywords - Keywords to scrape
|
||||
* @param {Keyword[]} rawkeyword - Keywords to scrape
|
||||
* @param {SettingsType} settings - The App Settings that contain the Scraper settings
|
||||
* @returns {Promise}
|
||||
*/
|
||||
const refreshKeywords = async (keywords:KeywordType[], settings:SettingsType): Promise<RefreshResult[]> => {
|
||||
if (!keywords || keywords.length === 0) { return []; }
|
||||
const refreshAndUpdateKeywords = async (rawkeyword:Keyword[], settings:SettingsType): Promise<KeywordType[]> => {
|
||||
const keywords:KeywordType[] = rawkeyword.map((el) => el.get({ plain: true }));
|
||||
if (!rawkeyword || rawkeyword.length === 0) { return []; }
|
||||
const start = performance.now();
|
||||
|
||||
let refreshedResults: RefreshResult[] = [];
|
||||
const updatedKeywords: KeywordType[] = [];
|
||||
|
||||
if (['scrapingant', 'serpapi'].includes(settings.scraper_type)) {
|
||||
refreshedResults = await refreshParallel(keywords, settings);
|
||||
const refreshedResults = await refreshParallel(keywords, settings);
|
||||
if (refreshedResults.length > 0) {
|
||||
for (const keyword of rawkeyword) {
|
||||
const refreshedkeywordData = refreshedResults.find((k) => k && k.ID === keyword.id);
|
||||
if (refreshedkeywordData) {
|
||||
const updatedkeyword = await updateKeywordPosition(keyword, refreshedkeywordData, settings);
|
||||
updatedKeywords.push(updatedkeyword);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (const keyword of keywords) {
|
||||
for (const keyword of rawkeyword) {
|
||||
console.log('START SCRAPE: ', keyword.keyword);
|
||||
const refreshedkeywordData = await scrapeKeywordFromGoogle(keyword, settings);
|
||||
refreshedResults.push(refreshedkeywordData);
|
||||
const updatedkeyword = await refreshAndUpdateKeyword(keyword, settings);
|
||||
updatedKeywords.push(updatedkeyword);
|
||||
if (keywords.length > 0 && settings.scrape_delay && settings.scrape_delay !== '0') {
|
||||
await sleep(parseInt(settings.scrape_delay, 10));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const end = performance.now();
|
||||
console.log(`time taken: ${end - start}ms`);
|
||||
return refreshedResults;
|
||||
return updatedKeywords;
|
||||
};
|
||||
|
||||
/**
|
||||
* Scrape Serp for given keyword and update the position in DB.
|
||||
* @param {Keyword} keyword - Keywords to scrape
|
||||
* @param {SettingsType} settings - The App Settings that contain the Scraper settings
|
||||
* @returns {Promise<KeywordType>}
|
||||
*/
|
||||
const refreshAndUpdateKeyword = async (keyword: Keyword, settings: SettingsType): Promise<KeywordType> => {
|
||||
const currentkeyword = keyword.get({ plain: true });
|
||||
const refreshedkeywordData = await scrapeKeywordFromGoogle(currentkeyword, settings);
|
||||
const updatedkeyword = refreshedkeywordData ? await updateKeywordPosition(keyword, refreshedkeywordData, settings) : currentkeyword;
|
||||
return updatedkeyword;
|
||||
};
|
||||
|
||||
/**
|
||||
* Processes the scraped data for the given keyword and updates the keyword serp position in DB.
|
||||
* @param {Keyword} keywordRaw - Keywords to Update
|
||||
* @param {RefreshResult} udpatedkeyword - scraped Data for that Keyword
|
||||
* @param {SettingsType} settings - The App Settings that contain the Scraper settings
|
||||
* @returns {Promise<KeywordType>}
|
||||
*/
|
||||
export const updateKeywordPosition = async (keywordRaw:Keyword, udpatedkeyword: RefreshResult, settings: SettingsType): Promise<KeywordType> => {
|
||||
const keywordPrased = parseKeywords([keywordRaw.get({ plain: true })]);
|
||||
const keyword = keywordPrased[0];
|
||||
// const udpatedkeyword = refreshed;
|
||||
let updated = keyword;
|
||||
|
||||
if (udpatedkeyword && keyword) {
|
||||
const newPos = udpatedkeyword.position;
|
||||
const newPosition = newPos !== 0 ? newPos : keyword.position;
|
||||
const { history } = keyword;
|
||||
const theDate = new Date();
|
||||
const dateKey = `${theDate.getFullYear()}-${theDate.getMonth() + 1}-${theDate.getDate()}`;
|
||||
history[dateKey] = newPosition;
|
||||
|
||||
const updatedVal = {
|
||||
position: newPosition,
|
||||
updating: false,
|
||||
url: udpatedkeyword.url,
|
||||
lastResult: udpatedkeyword.result,
|
||||
history,
|
||||
lastUpdated: udpatedkeyword.error ? keyword.lastUpdated : theDate.toJSON(),
|
||||
lastUpdateError: udpatedkeyword.error
|
||||
? JSON.stringify({ date: theDate.toJSON(), error: `${udpatedkeyword.error}`, scraper: settings.scraper_type })
|
||||
: 'false',
|
||||
};
|
||||
|
||||
// If failed, Add to Retry Queue Cron
|
||||
if (udpatedkeyword.error && settings?.scrape_retry) {
|
||||
await retryScrape(keyword.ID);
|
||||
} else {
|
||||
await removeFromRetryQueue(keyword.ID);
|
||||
}
|
||||
|
||||
// Update the Keyword Position in Database
|
||||
try {
|
||||
await keywordRaw.update({
|
||||
...updatedVal,
|
||||
lastResult: Array.isArray(udpatedkeyword.result) ? JSON.stringify(udpatedkeyword.result) : udpatedkeyword.result,
|
||||
history: JSON.stringify(history),
|
||||
});
|
||||
console.log('[SUCCESS] Updating the Keyword: ', keyword.keyword);
|
||||
updated = { ...keyword, ...updatedVal, lastUpdateError: JSON.parse(updatedVal.lastUpdateError) };
|
||||
} catch (error) {
|
||||
console.log('[ERROR] Updating SERP for Keyword', keyword.keyword, error);
|
||||
}
|
||||
}
|
||||
|
||||
return updated;
|
||||
};
|
||||
|
||||
/**
|
||||
@@ -49,4 +134,4 @@ const refreshParallel = async (keywords:KeywordType[], settings:SettingsType) :
|
||||
});
|
||||
};
|
||||
|
||||
export default refreshKeywords;
|
||||
export default refreshAndUpdateKeywords;
|
||||
|
||||
@@ -12,14 +12,14 @@ type SearchResult = {
|
||||
}
|
||||
|
||||
type SERPObject = {
|
||||
postion:number|boolean,
|
||||
postion:number,
|
||||
url:string
|
||||
}
|
||||
|
||||
export type RefreshResult = false | {
|
||||
ID: number,
|
||||
keyword: string,
|
||||
position:number | boolean,
|
||||
position:number,
|
||||
url: string,
|
||||
result: SearchResult[],
|
||||
error?: boolean | string
|
||||
@@ -49,7 +49,7 @@ export const getScraperClient = (keyword:KeywordType, settings:SettingsType, scr
|
||||
// Set Scraper Header
|
||||
const scrapeHeaders = scraper.headers ? scraper.headers(keyword, settings) : null;
|
||||
const scraperAPIURL = scraper.scrapeURL ? scraper.scrapeURL(keyword, settings, countries) : null;
|
||||
if (scrapeHeaders) {
|
||||
if (scrapeHeaders && Object.keys(scrapeHeaders).length > 0) {
|
||||
Object.keys(scrapeHeaders).forEach((headerItemKey:string) => {
|
||||
headers[headerItemKey] = scrapeHeaders[headerItemKey as keyof object];
|
||||
});
|
||||
@@ -114,7 +114,7 @@ export const scrapeKeywordFromGoogle = async (keyword:KeywordType, settings:Sett
|
||||
const scraperResult = scraperObj?.resultObjectKey && res[scraperObj.resultObjectKey] ? res[scraperObj.resultObjectKey] : '';
|
||||
const scrapeResult:string = (res.data || res.html || res.results || scraperResult || '');
|
||||
if (res && scrapeResult) {
|
||||
const extracted = scraperObj?.serpExtractor ? scraperObj.serpExtractor(scrapeResult) : extractScrapedResult(scrapeResult);
|
||||
const extracted = scraperObj?.serpExtractor ? scraperObj.serpExtractor(scrapeResult) : extractScrapedResult(scrapeResult, keyword.device);
|
||||
// await writeFile('result.txt', JSON.stringify(scrapeResult), { encoding: 'utf-8' }).catch((err) => { console.log(err); });
|
||||
const serp = getSerp(keyword.domain, extracted);
|
||||
refreshedResults = { ID: keyword.ID, keyword: keyword.keyword, position: serp.postion, url: serp.url, result: extracted, error: false };
|
||||
@@ -141,9 +141,10 @@ export const scrapeKeywordFromGoogle = async (keyword:KeywordType, settings:Sett
|
||||
/**
|
||||
* Extracts the Google Search result as object array from the Google Search's HTML content
|
||||
* @param {string} content - scraped google search page html data.
|
||||
* @param {string} device - The device of the keyword.
|
||||
* @returns {SearchResult[]}
|
||||
*/
|
||||
export const extractScrapedResult = (content: string): SearchResult[] => {
|
||||
export const extractScrapedResult = (content: string, device: string): SearchResult[] => {
|
||||
const extractedResult = [];
|
||||
|
||||
const $ = cheerio.load(content);
|
||||
@@ -163,6 +164,24 @@ export const extractScrapedResult = (content: string): SearchResult[] => {
|
||||
}
|
||||
}
|
||||
|
||||
// Mobile Scraper
|
||||
if (extractedResult.length === 0 && device === 'mobile') {
|
||||
const items = $('body').find('#rso > div');
|
||||
for (let i = 0; i < items.length; i += 1) {
|
||||
const item = $(items[i]);
|
||||
const linkDom = item.find('a[role="presentation"]');
|
||||
if (linkDom) {
|
||||
const url = linkDom.attr('href');
|
||||
const titleDom = linkDom.find('[role="link"]');
|
||||
const title = titleDom ? titleDom.text() : '';
|
||||
if (title && url) {
|
||||
lastPosition += 1;
|
||||
extractedResult.push({ title, url, position: lastPosition });
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return extractedResult;
|
||||
};
|
||||
|
||||
@@ -173,7 +192,7 @@ export const extractScrapedResult = (content: string): SearchResult[] => {
|
||||
* @returns {SERPObject}
|
||||
*/
|
||||
export const getSerp = (domain:string, result:SearchResult[]) : SERPObject => {
|
||||
if (result.length === 0 || !domain) { return { postion: false, url: '' }; }
|
||||
if (result.length === 0 || !domain) { return { postion: 0, url: '' }; }
|
||||
const foundItem = result.find((item) => {
|
||||
const itemDomain = item.url.replace('www.', '').match(/^(?:https?:)?(?:\/\/)?([^/?]+)/i);
|
||||
return itemDomain && itemDomain.includes(domain.replace('www.', ''));
|
||||
@@ -188,15 +207,15 @@ export const getSerp = (domain:string, result:SearchResult[]) : SERPObject => {
|
||||
* @returns {void}
|
||||
*/
|
||||
export const retryScrape = async (keywordID: number) : Promise<void> => {
|
||||
if (!keywordID) { return; }
|
||||
if (!keywordID && !Number.isInteger(keywordID)) { return; }
|
||||
let currentQueue: number[] = [];
|
||||
|
||||
const filePath = `${process.cwd()}/data/failed_queue.json`;
|
||||
const currentQueueRaw = await readFile(filePath, { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
|
||||
currentQueue = JSON.parse(currentQueueRaw);
|
||||
currentQueue = currentQueueRaw ? JSON.parse(currentQueueRaw) : [];
|
||||
|
||||
if (!currentQueue.includes(keywordID)) {
|
||||
currentQueue.push(keywordID);
|
||||
currentQueue.push(Math.abs(keywordID));
|
||||
}
|
||||
|
||||
await writeFile(filePath, JSON.stringify(currentQueue), { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
|
||||
@@ -208,13 +227,13 @@ export const retryScrape = async (keywordID: number) : Promise<void> => {
|
||||
* @returns {void}
|
||||
*/
|
||||
export const removeFromRetryQueue = async (keywordID: number) : Promise<void> => {
|
||||
if (!keywordID) { return; }
|
||||
if (!keywordID && !Number.isInteger(keywordID)) { return; }
|
||||
let currentQueue: number[] = [];
|
||||
|
||||
const filePath = `${process.cwd()}/data/failed_queue.json`;
|
||||
const currentQueueRaw = await readFile(filePath, { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
|
||||
currentQueue = JSON.parse(currentQueueRaw);
|
||||
currentQueue = currentQueue.filter((item) => item !== keywordID);
|
||||
currentQueue = currentQueueRaw ? JSON.parse(currentQueueRaw) : [];
|
||||
currentQueue = currentQueue.filter((item) => item !== Math.abs(keywordID));
|
||||
|
||||
await writeFile(filePath, JSON.stringify(currentQueue), { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
|
||||
};
|
||||
|
||||
@@ -54,7 +54,7 @@ const fetchSearchConsoleData = async (domainName:string, days:number, type?:stri
|
||||
date: row.keys[0],
|
||||
clicks: row.clicks,
|
||||
impressions: row.impressions,
|
||||
ctr: row.ctr,
|
||||
ctr: row.ctr * 100,
|
||||
position: row.position,
|
||||
});
|
||||
});
|
||||
@@ -101,7 +101,7 @@ export const parseSearchConsoleItem = (SCItem: SearchAnalyticsRawItem, domainNam
|
||||
const page = SCItem.keys[3] ? SCItem.keys[3].replace('https://', '').replace('http://', '').replace('www', '').replace(domainName, '') : '';
|
||||
const uid = `${country.toLowerCase()}:${device}:${keyword.replaceAll(' ', '_')}`;
|
||||
|
||||
return { keyword, uid, device, country, clicks, impressions, ctr, position, page };
|
||||
return { keyword, uid, device, country, clicks, impressions, ctr: ctr * 100, position, page };
|
||||
};
|
||||
|
||||
export const integrateKeywordSCData = (keyword: KeywordType, SCData:SCDomainDataType) : KeywordType => {
|
||||
|
||||
@@ -28,7 +28,7 @@ export const sortKeywords = (theKeywords:KeywordType[], sortBy:string, scDataTyp
|
||||
case 'alpha_desc':
|
||||
sortedItems = theKeywords.sort((a: KeywordType, b: KeywordType) => (a.keyword > b.keyword ? 1 : -1));
|
||||
break;
|
||||
case 'imp_asc':
|
||||
case 'imp_desc':
|
||||
if (scDataType) {
|
||||
sortedItems = theKeywords.sort((a: KeywordType, b: KeywordType) => {
|
||||
const bImpressionData = b.scData?.impressions[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
@@ -37,7 +37,7 @@ export const sortKeywords = (theKeywords:KeywordType[], sortBy:string, scDataTyp
|
||||
});
|
||||
}
|
||||
break;
|
||||
case 'imp_desc':
|
||||
case 'imp_asc':
|
||||
if (scDataType) {
|
||||
sortedItems = theKeywords.sort((a: KeywordType, b: KeywordType) => {
|
||||
const bImpressionData = b.scData?.impressions[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
@@ -46,21 +46,21 @@ export const sortKeywords = (theKeywords:KeywordType[], sortBy:string, scDataTyp
|
||||
});
|
||||
}
|
||||
break;
|
||||
case 'visits_asc':
|
||||
if (scDataType) {
|
||||
sortedItems = theKeywords.sort((a: KeywordType, b: KeywordType) => {
|
||||
const bImpressionData = b.scData?.visits[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
const aImpressionData = a.scData?.visits[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
return aImpressionData > bImpressionData ? 1 : -1;
|
||||
});
|
||||
}
|
||||
break;
|
||||
case 'visits_desc':
|
||||
if (scDataType) {
|
||||
sortedItems = theKeywords.sort((a: KeywordType, b: KeywordType) => {
|
||||
const bImpressionData = b.scData?.visits[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
const aImpressionData = a.scData?.visits[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
return bImpressionData > aImpressionData ? 1 : -1;
|
||||
const bVisitsData = b.scData?.visits[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
const aVisitsData = a.scData?.visits[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
return aVisitsData > bVisitsData ? 1 : -1;
|
||||
});
|
||||
}
|
||||
break;
|
||||
case 'visits_asc':
|
||||
if (scDataType) {
|
||||
sortedItems = theKeywords.sort((a: KeywordType, b: KeywordType) => {
|
||||
const bVisitsData = b.scData?.visits[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
const aVisitsData = a.scData?.visits[scDataType as keyof KeywordSCDataChild] || 0;
|
||||
return bVisitsData > aVisitsData ? 1 : -1;
|
||||
});
|
||||
}
|
||||
break;
|
||||
@@ -98,7 +98,8 @@ export const filterKeywords = (keywords: KeywordType[], filterParams: KeywordFil
|
||||
const filteredItems:KeywordType[] = [];
|
||||
keywords.forEach((keywrd) => {
|
||||
const countryMatch = filterParams.countries.length === 0 ? true : filterParams.countries && filterParams.countries.includes(keywrd.country);
|
||||
const searchMatch = !filterParams.search ? true : filterParams.search && keywrd.keyword.includes(filterParams.search);
|
||||
const searchMatch = !filterParams.search ? true : filterParams.search
|
||||
&& keywrd.keyword.toLowerCase().includes(filterParams.search.toLowerCase());
|
||||
const tagsMatch = filterParams.tags.length === 0 ? true : filterParams.tags && keywrd.tags.find((x) => filterParams.tags.includes(x));
|
||||
|
||||
if (countryMatch && searchMatch && tagsMatch) {
|
||||
|
||||
35
utils/validators.ts
Normal file
35
utils/validators.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
/* eslint-disable import/prefer-default-export */
|
||||
export const isValidDomain = (domain:string): boolean => {
|
||||
if (typeof domain !== 'string') return false;
|
||||
if (!domain.includes('.')) return false;
|
||||
let value = domain;
|
||||
const validHostnameChars = /^[a-zA-Z0-9-.]{1,253}\.?$/g;
|
||||
if (!validHostnameChars.test(value)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (value.endsWith('.')) {
|
||||
value = value.slice(0, value.length - 1);
|
||||
}
|
||||
|
||||
if (value.length > 253) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const labels = value.split('.');
|
||||
|
||||
const isValid = labels.every((label) => {
|
||||
const validLabelChars = /^([a-zA-Z0-9-]+)$/g;
|
||||
|
||||
const validLabel = (
|
||||
validLabelChars.test(label)
|
||||
&& label.length < 64
|
||||
&& !label.startsWith('-')
|
||||
&& !label.endsWith('-')
|
||||
);
|
||||
|
||||
return validLabel;
|
||||
});
|
||||
|
||||
return isValid;
|
||||
};
|
||||
Reference in New Issue
Block a user