mirror of
https://github.com/towfiqi/serpbear
synced 2025-06-26 18:15:54 +00:00
Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d01b65db04 | ||
|
|
691055811c | ||
|
|
6d7cfec953 | ||
|
|
8c8064f222 | ||
|
|
3d1c690076 | ||
|
|
1ed298f633 | ||
|
|
38dc164514 |
16
CHANGELOG.md
16
CHANGELOG.md
@@ -2,6 +2,22 @@
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [0.1.4](https://github.com/towfiqi/serpbear/compare/v0.1.3...v0.1.4) (2022-12-01)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Failed scrape now shows error details in UI. ([8c8064f](https://github.com/towfiqi/serpbear/commit/8c8064f222ea8177b26b6dd28866d1f421faca39))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Domains with www weren't loading keywords. ([3d1c690](https://github.com/towfiqi/serpbear/commit/3d1c690076a03598f0ac3f3663d905479d945897)), closes [#8](https://github.com/towfiqi/serpbear/issues/8)
|
||||
* Emails were sending serps of previous day. ([6910558](https://github.com/towfiqi/serpbear/commit/691055811c2ae70ce1b878346300048c1e23f2eb))
|
||||
* Fixes Broken ScrapingRobot Integration. ([1ed298f](https://github.com/towfiqi/serpbear/commit/1ed298f633a9ae5b402b431f1e50b35ffd44a6dc))
|
||||
* scraper fails if matched domain has www ([38dc164](https://github.com/towfiqi/serpbear/commit/38dc164514b066b2007f2f3b2ae68005621963cc)), closes [#6](https://github.com/towfiqi/serpbear/issues/6) [#7](https://github.com/towfiqi/serpbear/issues/7)
|
||||
* scraper fails when result has domain w/o www ([6d7cfec](https://github.com/towfiqi/serpbear/commit/6d7cfec95304fa7a61beaab07f7cd6af215255c3))
|
||||
|
||||
### [0.1.3](https://github.com/towfiqi/serpbear/compare/v0.1.2...v0.1.3) (2022-12-01)
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ type KeywordProps = {
|
||||
const Keyword = (props: KeywordProps) => {
|
||||
const { keywordData, refreshkeyword, favoriteKeyword, removeKeyword, selectKeyword, selected, showKeywordDetails, manageTags, lastItem } = props;
|
||||
const {
|
||||
keyword, domain, ID, position, url = '', lastUpdated, country, sticky, history = {}, updating = false, lastUpdateError = 'false',
|
||||
keyword, domain, ID, position, url = '', lastUpdated, country, sticky, history = {}, updating = false, lastUpdateError = false,
|
||||
} = keywordData;
|
||||
const [showOptions, setShowOptions] = useState(false);
|
||||
const [showPositionError, setPositionError] = useState(false);
|
||||
@@ -77,7 +77,7 @@ const Keyword = (props: KeywordProps) => {
|
||||
<span className={`fflag fflag-${country} w-[18px] h-[12px] mr-2`} title={countries[country][0]} />{keyword}
|
||||
</a>
|
||||
{sticky && <button className='ml-2 relative top-[2px]' title='Favorite'><Icon type="star-filled" size={16} color="#fbd346" /></button>}
|
||||
{lastUpdateError !== 'false'
|
||||
{lastUpdateError && lastUpdateError.date
|
||||
&& <button className='ml-2 relative top-[2px]' onClick={() => setPositionError(true)}>
|
||||
<Icon type="error" size={18} color="#FF3672" />
|
||||
</button>
|
||||
@@ -133,16 +133,19 @@ const Keyword = (props: KeywordProps) => {
|
||||
</ul>
|
||||
)}
|
||||
</div>
|
||||
{lastUpdateError !== 'false' && showPositionError
|
||||
&& <div className=' absolute mt-[-70px] p-2 bg-white z-30 border border-red-200 rounded w-[220px] left-4 shadow-sm text-xs'>
|
||||
{lastUpdateError && lastUpdateError.date && showPositionError && (
|
||||
<div className=' absolute mt-[-70px] p-2 bg-white z-30 border border-red-200 rounded w-[220px] left-4 shadow-sm text-xs lg:bottom-12'>
|
||||
Error Updating Keyword position (Tried <TimeAgo
|
||||
title={dayjs(lastUpdateError).format('DD-MMM-YYYY, hh:mm:ss A')}
|
||||
date={lastUpdateError} />)
|
||||
title={dayjs(lastUpdateError.date).format('DD-MMM-YYYY, hh:mm:ss A')}
|
||||
date={lastUpdateError.date} />)
|
||||
<i className='absolute top-0 right-0 ml-2 p-2 font-semibold not-italic cursor-pointer' onClick={() => setPositionError(false)}>
|
||||
<Icon type="close" size={16} color="#999" />
|
||||
</i>
|
||||
<div className=' border-t-[1px] border-red-100 mt-2 pt-1'>
|
||||
{lastUpdateError.scraper && <strong className='capitalize'>{lastUpdateError.scraper}: </strong>}{lastUpdateError.error}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
5
cron.js
5
cron.js
@@ -50,6 +50,9 @@ const generateCronTime = (interval) => {
|
||||
if (interval === 'daily') {
|
||||
cronTime = '0 0 0 * * *';
|
||||
}
|
||||
if (interval === 'daily_morning') {
|
||||
cronTime = '0 0 0 7 * *';
|
||||
}
|
||||
if (interval === 'weekly') {
|
||||
cronTime = '0 0 0 */7 * *';
|
||||
}
|
||||
@@ -103,7 +106,7 @@ const runAppCronJobs = () => {
|
||||
getAppSettings().then((settings) => {
|
||||
const notif_interval = (!settings.notification_interval || settings.notification_interval === 'never') ? false : settings.notification_interval;
|
||||
if (notif_interval) {
|
||||
const cronTime = generateCronTime(notif_interval);
|
||||
const cronTime = generateCronTime(notif_interval === 'daily' ? 'daily_morning' : notif_interval);
|
||||
if (cronTime) {
|
||||
cron.schedule(cronTime, () => {
|
||||
// console.log('### Sending Notification Email...');
|
||||
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "serpbear",
|
||||
"version": "0.1.3",
|
||||
"version": "0.1.4",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "serpbear",
|
||||
"version": "0.1.3",
|
||||
"version": "0.1.4",
|
||||
"dependencies": {
|
||||
"@testing-library/react": "^13.4.0",
|
||||
"@types/react-transition-group": "^4.4.5",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "serpbear",
|
||||
"version": "0.1.3",
|
||||
"version": "0.1.4",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
|
||||
@@ -44,7 +44,8 @@ const getKeywords = async (req: NextApiRequest, res: NextApiResponse<KeywordsGet
|
||||
if (!req.query.domain && typeof req.query.domain !== 'string') {
|
||||
return res.status(400).json({ error: 'Domain is Required!' });
|
||||
}
|
||||
const domain = (req.query.domain as string).replace('-', '.');
|
||||
const domain = (req.query.domain as string).replaceAll('-', '.');
|
||||
|
||||
try {
|
||||
const allKeywords:Keyword[] = await Keyword.findAll({ where: { domain } });
|
||||
const keywords: KeywordType[] = parseKeywords(allKeywords.map((e) => e.get({ plain: true })));
|
||||
|
||||
@@ -79,18 +79,21 @@ export const refreshAndUpdateKeywords = async (initKeywords:Keyword[], settings:
|
||||
const newPos = udpatedkeyword.position;
|
||||
const newPosition = newPos !== false ? newPos : keyword.position;
|
||||
const { history } = keyword;
|
||||
const currentDate = new Date();
|
||||
history[`${currentDate.getFullYear()}-${currentDate.getMonth() + 1}-${currentDate.getDate()}`] = newPosition;
|
||||
const theDate = new Date();
|
||||
history[`${theDate.getFullYear()}-${theDate.getMonth() + 1}-${theDate.getDate()}`] = newPosition;
|
||||
|
||||
const updatedVal = {
|
||||
position: newPosition,
|
||||
updating: false,
|
||||
url: udpatedkeyword.url,
|
||||
lastResult: udpatedkeyword.result,
|
||||
history,
|
||||
lastUpdated: udpatedkeyword.error ? keyword.lastUpdated : new Date().toJSON(),
|
||||
lastUpdateError: udpatedkeyword.error ? new Date().toJSON() : 'false',
|
||||
lastUpdated: udpatedkeyword.error ? keyword.lastUpdated : theDate.toJSON(),
|
||||
lastUpdateError: udpatedkeyword.error
|
||||
? JSON.stringify({ date: theDate.toJSON(), error: `${udpatedkeyword.error}`, scraper: settings.scraper_type })
|
||||
: 'false',
|
||||
};
|
||||
updatedKeywords.push({ ...keyword, ...updatedVal });
|
||||
updatedKeywords.push({ ...keyword, ...{ ...updatedVal, lastUpdateError: JSON.parse(updatedVal.lastUpdateError) } });
|
||||
|
||||
// If failed, Add to Retry Queue Cron
|
||||
if (udpatedkeyword.error) {
|
||||
|
||||
2
types.d.ts
vendored
2
types.d.ts
vendored
@@ -31,7 +31,7 @@ type KeywordType = {
|
||||
url: string,
|
||||
tags: string[],
|
||||
updating: boolean,
|
||||
lastUpdateError: string
|
||||
lastUpdateError: {date: string, error: string, scraper: string} | false
|
||||
}
|
||||
|
||||
type KeywordLastResult = {
|
||||
|
||||
@@ -11,6 +11,7 @@ const parseKeywords = (allKeywords: Keyword[]) : KeywordType[] => {
|
||||
history: JSON.parse(keywrd.history),
|
||||
tags: JSON.parse(keywrd.tags),
|
||||
lastResult: JSON.parse(keywrd.lastResult),
|
||||
lastUpdateError: keywrd.lastUpdateError !== 'false' && keywrd.lastUpdateError.includes('{') ? JSON.parse(keywrd.lastUpdateError) : false,
|
||||
}));
|
||||
return parsedItems;
|
||||
};
|
||||
|
||||
@@ -20,10 +20,10 @@ type SERPObject = {
|
||||
export type RefreshResult = false | {
|
||||
ID: number,
|
||||
keyword: string,
|
||||
position:number|boolean,
|
||||
position:number | boolean,
|
||||
url: string,
|
||||
result: SearchResult[],
|
||||
error?: boolean
|
||||
error?: boolean | string
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -56,7 +56,7 @@ export const getScraperClient = (keyword:KeywordType, settings:SettingsType): Pr
|
||||
if (settings && settings.scraper_type === 'scrapingrobot' && settings.scaping_api) {
|
||||
const country = keyword.country || 'US';
|
||||
const lang = countries[country][2];
|
||||
apiURL = `https://api.scrapingrobot.com/?url=https%3A%2F%2Fwww.google.com%2Fsearch%3Fnum%3D100%26hl%3D${lang}%26q%3D${encodeURI(keyword.keyword)}&token=${settings.scaping_api}&proxyCountry=${country}&render=false${keyword.device === 'mobile' ? '&mobile=true' : ''}`;
|
||||
apiURL = `https://api.scrapingrobot.com/?token=${settings.scaping_api}&proxyCountry=${country}&render=false${keyword.device === 'mobile' ? '&mobile=true' : ''}&url=https%3A%2F%2Fwww.google.com%2Fsearch%3Fnum%3D100%26hl%3D${lang}%26q%3D${encodeURI(keyword.keyword)}`;
|
||||
}
|
||||
|
||||
if (settings && settings.scraper_type === 'proxy' && settings.proxy) {
|
||||
@@ -76,7 +76,7 @@ export const getScraperClient = (keyword:KeywordType, settings:SettingsType): Pr
|
||||
const axiosClient = axios.create(axiosConfig);
|
||||
client = axiosClient.get(`https://www.google.com/search?num=100&q=${encodeURI(keyword.keyword)}`);
|
||||
} else {
|
||||
client = fetch(apiURL, { method: 'GET', headers }).then((res) => res.json());
|
||||
client = fetch(apiURL, { method: 'GET', headers });
|
||||
}
|
||||
|
||||
return client;
|
||||
@@ -100,18 +100,26 @@ export const scrapeKeywordFromGoogle = async (keyword:KeywordType, settings:Sett
|
||||
const scraperClient = getScraperClient(keyword, settings);
|
||||
|
||||
if (!scraperClient) { return false; }
|
||||
|
||||
let res:any = null; let scraperError:any = null;
|
||||
try {
|
||||
const res:any = await scraperClient;
|
||||
if (res && (res.data || res.html)) {
|
||||
// writeFile('result.txt', res.data, { encoding: 'utf-8' });
|
||||
const extracted = extractScrapedResult(res.data || res.html, settings.scraper_type);
|
||||
if (settings && settings.scraper_type === 'proxy' && settings.proxy) {
|
||||
res = await scraperClient;
|
||||
} else {
|
||||
res = await scraperClient.then((result:any) => result.json());
|
||||
}
|
||||
|
||||
if (res && (res.data || res.html || res.result)) {
|
||||
const extracted = extractScrapedResult(res.data || res.html || res.result, settings.scraper_type);
|
||||
const serp = getSerp(keyword.domain, extracted);
|
||||
refreshedResults = { ID: keyword.ID, keyword: keyword.keyword, position: serp.postion, url: serp.url, result: extracted, error: false };
|
||||
console.log('SERP: ', keyword.keyword, serp.postion, serp.url);
|
||||
} else {
|
||||
scraperError = res.detail || res.error || 'Unknown Error';
|
||||
throw new Error(res);
|
||||
}
|
||||
} catch (error:any) {
|
||||
console.log('#### SCRAPE ERROR: ', keyword.keyword, error?.code, error?.response?.status, error?.response?.data, error);
|
||||
console.log('#### SCRAPE ERROR: ', keyword.keyword, '. Error: ', scraperError);
|
||||
refreshedResults.error = scraperError;
|
||||
}
|
||||
|
||||
return refreshedResults;
|
||||
@@ -164,8 +172,8 @@ export const extractScrapedResult = (content:string, scraper_type:string): Searc
|
||||
export const getSerp = (domain:string, result:SearchResult[]) : SERPObject => {
|
||||
if (result.length === 0 || !domain) { return { postion: false, url: '' }; }
|
||||
const foundItem = result.find((item) => {
|
||||
const itemDomain = item.url.match(/^(?:https?:)?(?:\/\/)?([^/?]+)/i);
|
||||
return itemDomain && itemDomain.includes(domain);
|
||||
const itemDomain = item.url.replace('www.', '').match(/^(?:https?:)?(?:\/\/)?([^/?]+)/i);
|
||||
return itemDomain && itemDomain.includes(domain.replace('www.', ''));
|
||||
});
|
||||
return { postion: foundItem ? foundItem.position : 0, url: foundItem && foundItem.url ? foundItem.url : '' };
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user