mirror of
https://github.com/towfiqi/serpbear
synced 2025-06-26 18:15:54 +00:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c870250fbd | ||
|
|
da92f11afa | ||
|
|
9b9b74af4c | ||
|
|
291aa60bbb | ||
|
|
8a35e358e6 | ||
|
|
f164b287be | ||
|
|
97dd0b131b | ||
|
|
454454a422 | ||
|
|
4620f11c4b |
15
CHANGELOG.md
15
CHANGELOG.md
@@ -2,6 +2,21 @@
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [0.3.2](https://github.com/towfiqi/serpbear/compare/v0.3.1...v0.3.2) (2023-11-09)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Resolves issue with adding long tld emails ([9b9b74a](https://github.com/towfiqi/serpbear/commit/9b9b74af4c249e27458d29ba052e96ab2db8b640)), closes [#127](https://github.com/towfiqi/serpbear/issues/127)
|
||||
|
||||
### [0.3.1](https://github.com/towfiqi/serpbear/compare/v0.3.0...v0.3.1) (2023-11-04)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Removes dev files from docker volumes ([454454a](https://github.com/towfiqi/serpbear/commit/454454a422bab4d37a2d43ad95868e293a97b88e))
|
||||
* Updates vulnerable dependencies ([97dd0b1](https://github.com/towfiqi/serpbear/commit/97dd0b131be4cec73d07f35062334dd1881f0013))
|
||||
|
||||
## [0.3.0](https://github.com/towfiqi/serpbear/compare/v0.2.6...v0.3.0) (2023-11-03)
|
||||
|
||||
|
||||
|
||||
@@ -10,6 +10,8 @@ COPY . .
|
||||
FROM node:lts-alpine AS builder
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app ./
|
||||
RUN rm -rf /app/data
|
||||
RUN rm -rf /app/__test__
|
||||
RUN npm run build
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ SerpBear is an Open Source Search Engine Position Tracking App. It allows you to
|
||||
- **Zero Cost to RUN:** Run the App on mogenius.com or Fly.io for free.
|
||||
|
||||
#### How it Works
|
||||
The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SerpApi or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. Also, When you connect your Googel Search Console account, the app shows actual search visits for each tracked keywords. You can also discover new keywords, and find the most performing keywords, countries, pages.
|
||||
The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SearchApi, SerpApi or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. Also, When you connect your Googel Search Console account, the app shows actual search visits for each tracked keywords. You can also discover new keywords, and find the most performing keywords, countries, pages.
|
||||
|
||||
#### Getting Started
|
||||
- **Step 1:** Deploy & Run the App.
|
||||
@@ -41,6 +41,7 @@ The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SerpA
|
||||
| serply.io | $49/mo | 5000/mo | Yes |
|
||||
| serpapi.com | From $50/mo** | From 5,000/mo** | Yes |
|
||||
| spaceserp.com | $59/lifetime | 15,000/mo | Yes |
|
||||
| SearchApi.io | From $40/mo | From 10,000/mo | Yes |
|
||||
|
||||
(*) Free upto a limit. If you are using ScrapingAnt you can lookup 10,000 times per month for free.
|
||||
(**) Free up to 100 per month. Paid from 5,000 to 10,000,000+ per month.
|
||||
|
||||
@@ -41,7 +41,7 @@ const DomainSettings = ({ domain, closeModal }: DomainSettingsProps) => {
|
||||
let error: DomainSettingsError | null = null;
|
||||
if (domainSettings.notification_emails) {
|
||||
const notification_emails = domainSettings.notification_emails.split(',');
|
||||
const invalidEmails = notification_emails.find((x) => /^\w+([.-]?\w+)*@\w+([.-]?\w+)*(\.\w{2,3})+$/.test(x) === false);
|
||||
const invalidEmails = notification_emails.find((x) => /^\w+([.-]?\w+)*@\w+([.-]?\w+)*(\.\w{2,15})+$/.test(x) === false);
|
||||
console.log('invalidEmails: ', invalidEmails);
|
||||
if (invalidEmails) {
|
||||
error = { type: 'email', msg: 'Invalid Email' };
|
||||
|
||||
@@ -54,7 +54,7 @@ const ScraperSettings = ({ settings, settingsError, updateSettings }:ScraperSett
|
||||
minWidth={270}
|
||||
/>
|
||||
</div>
|
||||
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi', 'spaceSerp'].includes(settings.scraper_type) && (
|
||||
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi', 'spaceSerp', 'searchapi'].includes(settings.scraper_type) && (
|
||||
<div className="settings__section__input mr-3">
|
||||
<label className={labelStyle}>Scraper API Key or Token</label>
|
||||
<input
|
||||
|
||||
@@ -6,6 +6,6 @@ import './styles/globals.css';
|
||||
|
||||
// Used for __tests__/testing-library.js
|
||||
// Learn more: https://github.com/testing-library/jest-dom
|
||||
import '@testing-library/jest-dom/extend-expect';
|
||||
import '@testing-library/jest-dom';
|
||||
|
||||
global.ResizeObserver = require('resize-observer-polyfill');
|
||||
|
||||
15341
package-lock.json
generated
15341
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
22
package.json
22
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "serpbear",
|
||||
"version": "0.3.0",
|
||||
"version": "0.3.2",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
@@ -17,7 +17,6 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@googleapis/searchconsole": "^1.0.0",
|
||||
"@testing-library/react": "^13.4.0",
|
||||
"@types/react-transition-group": "^4.4.5",
|
||||
"axios": "^1.1.3",
|
||||
"axios-retry": "^3.3.1",
|
||||
@@ -31,10 +30,9 @@
|
||||
"dotenv": "^16.0.3",
|
||||
"https-proxy-agent": "^5.0.1",
|
||||
"isomorphic-fetch": "^3.0.0",
|
||||
"jsonwebtoken": "^8.5.1",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"msw": "^0.49.0",
|
||||
"next": "12.3.1",
|
||||
"node-cron": "^3.0.2",
|
||||
"next": "^12.3.4",
|
||||
"nodemailer": "^6.8.0",
|
||||
"react": "18.2.0",
|
||||
"react-chartjs-2": "^4.3.1",
|
||||
@@ -45,15 +43,17 @@
|
||||
"react-transition-group": "^4.4.5",
|
||||
"react-window": "^1.8.8",
|
||||
"reflect-metadata": "^0.1.13",
|
||||
"sequelize": "^6.25.2",
|
||||
"sequelize": "^6.34.0",
|
||||
"sequelize-typescript": "^2.1.5",
|
||||
"sqlite3": "^5.1.2"
|
||||
"sqlite3": "^5.1.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@testing-library/jest-dom": "^5.16.5",
|
||||
"@testing-library/jest-dom": "^6.1.4",
|
||||
"@testing-library/react": "^14.0.0",
|
||||
"@types/cookies": "^0.7.7",
|
||||
"@types/cryptr": "^4.0.1",
|
||||
"@types/isomorphic-fetch": "^0.0.36",
|
||||
"@types/jest": "^29.5.8",
|
||||
"@types/jsonwebtoken": "^8.5.9",
|
||||
"@types/node": "18.11.0",
|
||||
"@types/nodemailer": "^6.4.6",
|
||||
@@ -65,9 +65,9 @@
|
||||
"eslint": "8.25.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-config-next": "12.3.1",
|
||||
"jest": "^29.3.1",
|
||||
"jest-environment-jsdom": "^29.3.1",
|
||||
"postcss": "^8.4.18",
|
||||
"jest": "^29.7.0",
|
||||
"jest-environment-jsdom": "^29.7.0",
|
||||
"postcss": "^8.4.31",
|
||||
"prettier": "^2.7.1",
|
||||
"resize-observer-polyfill": "^1.5.1",
|
||||
"sass": "^1.55.0",
|
||||
|
||||
@@ -92,7 +92,12 @@ export const getAppSettings = async () : Promise<SettingsType> => {
|
||||
smtp_password: '',
|
||||
scrape_retry: false,
|
||||
};
|
||||
const otherSettings = {
|
||||
available_scapers: allScrapers.map((scraper) => ({ label: scraper.name, value: scraper.id })),
|
||||
failed_queue: [],
|
||||
};
|
||||
await writeFile(`${process.cwd()}/data/settings.json`, JSON.stringify(settings), { encoding: 'utf-8' });
|
||||
return settings;
|
||||
await writeFile(`${process.cwd()}/data/failed_queue.json`, JSON.stringify([]), { encoding: 'utf-8' });
|
||||
return { ...settings, ...otherSettings };
|
||||
}
|
||||
};
|
||||
|
||||
@@ -4,6 +4,7 @@ import serpapi from './services/serpapi';
|
||||
import serply from './services/serply';
|
||||
import spaceserp from './services/spaceserp';
|
||||
import proxy from './services/proxy';
|
||||
import searchapi from './services/searchapi';
|
||||
|
||||
export default [
|
||||
scrapingRobot,
|
||||
@@ -12,4 +13,5 @@ export default [
|
||||
serply,
|
||||
spaceserp,
|
||||
proxy,
|
||||
searchapi,
|
||||
];
|
||||
|
||||
38
scrapers/services/searchapi.ts
Normal file
38
scrapers/services/searchapi.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
const searchapi:ScraperSettings = {
|
||||
id: 'searchapi',
|
||||
name: 'SearchApi.io',
|
||||
website: 'searchapi.io',
|
||||
headers: (keyword, settings) => {
|
||||
return {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${settings.scaping_api}`,
|
||||
};
|
||||
},
|
||||
scrapeURL: (keyword) => {
|
||||
return `https://www.searchapi.io/api/v1/search?engine=google&q=${encodeURI(keyword.keyword)}&num=100&gl=${keyword.country}&device=${keyword.device}`;
|
||||
},
|
||||
resultObjectKey: 'organic_results',
|
||||
serpExtractor: (content) => {
|
||||
const extractedResult = [];
|
||||
const results: SearchApiResult[] = (typeof content === 'string') ? JSON.parse(content) : content as SearchApiResult[];
|
||||
|
||||
for (const { link, title, position } of results) {
|
||||
if (title && link) {
|
||||
extractedResult.push({
|
||||
title,
|
||||
url: link,
|
||||
position,
|
||||
});
|
||||
}
|
||||
}
|
||||
return extractedResult;
|
||||
},
|
||||
};
|
||||
|
||||
interface SearchApiResult {
|
||||
title: string,
|
||||
link: string,
|
||||
position: number,
|
||||
}
|
||||
|
||||
export default searchapi;
|
||||
@@ -17,11 +17,11 @@ const refreshAndUpdateKeywords = async (rawkeyword:Keyword[], settings:SettingsT
|
||||
const start = performance.now();
|
||||
const updatedKeywords: KeywordType[] = [];
|
||||
|
||||
if (['scrapingant', 'serpapi'].includes(settings.scraper_type)) {
|
||||
if (['scrapingant', 'serpapi', 'searchapi'].includes(settings.scraper_type)) {
|
||||
const refreshedResults = await refreshParallel(keywords, settings);
|
||||
if (refreshedResults.length > 0) {
|
||||
for (const keyword of rawkeyword) {
|
||||
const refreshedkeywordData = refreshedResults.find((k) => k && k.ID === keyword.id);
|
||||
const refreshedkeywordData = refreshedResults.find((k) => k && k.ID === keyword.ID);
|
||||
if (refreshedkeywordData) {
|
||||
const updatedkeyword = await updateKeywordPosition(keyword, refreshedkeywordData, settings);
|
||||
updatedKeywords.push(updatedkeyword);
|
||||
|
||||
Reference in New Issue
Block a user