9 Commits

Author SHA1 Message Date
towfiqi
c870250fbd chore(release): 0.3.2 2023-11-09 20:35:49 +06:00
towfiqi
da92f11afa chore: resolves linter nag. 2023-11-09 20:35:22 +06:00
towfiqi
9b9b74af4c fix: Resolves issue with adding long tld emails
closes #127
2023-11-09 20:35:01 +06:00
Towfiq I
291aa60bbb Merge pull request #129 from SearchApi/feature/integrate-searchapi
feat: Integrates SearchAPI
fix: Resolves build issue due to missing jest types.
fix: Resolves keyword SERP fetch issue.
2023-11-09 20:01:02 +06:00
SebastjanPrachovskij
8a35e358e6 Integrate SearchApi to SerpBear
Remove unnecessary spacing

Fix keyword.id & authorization for searchapi
2023-11-09 14:50:11 +02:00
towfiqi
f164b287be chore(release): 0.3.1 2023-11-04 22:05:59 +06:00
towfiqi
97dd0b131b fix: Updates vulnerable dependencies 2023-11-04 22:05:10 +06:00
towfiqi
454454a422 fix: Removes dev files from docker volumes 2023-11-04 11:25:56 +06:00
towfiqi
4620f11c4b Fixed: Resolves missing Scrapers list on new install 2023-11-04 10:47:22 +06:00
12 changed files with 3825 additions and 11615 deletions

View File

@@ -2,6 +2,21 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
### [0.3.2](https://github.com/towfiqi/serpbear/compare/v0.3.1...v0.3.2) (2023-11-09)
### Bug Fixes
* Resolves issue with adding long tld emails ([9b9b74a](https://github.com/towfiqi/serpbear/commit/9b9b74af4c249e27458d29ba052e96ab2db8b640)), closes [#127](https://github.com/towfiqi/serpbear/issues/127)
### [0.3.1](https://github.com/towfiqi/serpbear/compare/v0.3.0...v0.3.1) (2023-11-04)
### Bug Fixes
* Removes dev files from docker volumes ([454454a](https://github.com/towfiqi/serpbear/commit/454454a422bab4d37a2d43ad95868e293a97b88e))
* Updates vulnerable dependencies ([97dd0b1](https://github.com/towfiqi/serpbear/commit/97dd0b131be4cec73d07f35062334dd1881f0013))
## [0.3.0](https://github.com/towfiqi/serpbear/compare/v0.2.6...v0.3.0) (2023-11-03)

View File

@@ -10,6 +10,8 @@ COPY . .
FROM node:lts-alpine AS builder
WORKDIR /app
COPY --from=deps /app ./
RUN rm -rf /app/data
RUN rm -rf /app/__test__
RUN npm run build

View File

@@ -18,7 +18,7 @@ SerpBear is an Open Source Search Engine Position Tracking App. It allows you to
- **Zero Cost to RUN:** Run the App on mogenius.com or Fly.io for free.
#### How it Works
The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SerpApi or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. Also, When you connect your Googel Search Console account, the app shows actual search visits for each tracked keywords. You can also discover new keywords, and find the most performing keywords, countries, pages.
The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SearchApi, SerpApi or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. Also, When you connect your Googel Search Console account, the app shows actual search visits for each tracked keywords. You can also discover new keywords, and find the most performing keywords, countries, pages.
#### Getting Started
- **Step 1:** Deploy & Run the App.
@@ -41,6 +41,7 @@ The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SerpA
| serply.io | $49/mo | 5000/mo | Yes |
| serpapi.com | From $50/mo** | From 5,000/mo** | Yes |
| spaceserp.com | $59/lifetime | 15,000/mo | Yes |
| SearchApi.io | From $40/mo | From 10,000/mo | Yes |
(*) Free upto a limit. If you are using ScrapingAnt you can lookup 10,000 times per month for free.
(**) Free up to 100 per month. Paid from 5,000 to 10,000,000+ per month.

View File

@@ -41,7 +41,7 @@ const DomainSettings = ({ domain, closeModal }: DomainSettingsProps) => {
let error: DomainSettingsError | null = null;
if (domainSettings.notification_emails) {
const notification_emails = domainSettings.notification_emails.split(',');
const invalidEmails = notification_emails.find((x) => /^\w+([.-]?\w+)*@\w+([.-]?\w+)*(\.\w{2,3})+$/.test(x) === false);
const invalidEmails = notification_emails.find((x) => /^\w+([.-]?\w+)*@\w+([.-]?\w+)*(\.\w{2,15})+$/.test(x) === false);
console.log('invalidEmails: ', invalidEmails);
if (invalidEmails) {
error = { type: 'email', msg: 'Invalid Email' };

View File

@@ -54,7 +54,7 @@ const ScraperSettings = ({ settings, settingsError, updateSettings }:ScraperSett
minWidth={270}
/>
</div>
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi', 'spaceSerp'].includes(settings.scraper_type) && (
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi', 'spaceSerp', 'searchapi'].includes(settings.scraper_type) && (
<div className="settings__section__input mr-3">
<label className={labelStyle}>Scraper API Key or Token</label>
<input

View File

@@ -6,6 +6,6 @@ import './styles/globals.css';
// Used for __tests__/testing-library.js
// Learn more: https://github.com/testing-library/jest-dom
import '@testing-library/jest-dom/extend-expect';
import '@testing-library/jest-dom';
global.ResizeObserver = require('resize-observer-polyfill');

15341
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "serpbear",
"version": "0.3.0",
"version": "0.3.2",
"private": true,
"scripts": {
"dev": "next dev",
@@ -17,7 +17,6 @@
},
"dependencies": {
"@googleapis/searchconsole": "^1.0.0",
"@testing-library/react": "^13.4.0",
"@types/react-transition-group": "^4.4.5",
"axios": "^1.1.3",
"axios-retry": "^3.3.1",
@@ -31,10 +30,9 @@
"dotenv": "^16.0.3",
"https-proxy-agent": "^5.0.1",
"isomorphic-fetch": "^3.0.0",
"jsonwebtoken": "^8.5.1",
"jsonwebtoken": "^9.0.2",
"msw": "^0.49.0",
"next": "12.3.1",
"node-cron": "^3.0.2",
"next": "^12.3.4",
"nodemailer": "^6.8.0",
"react": "18.2.0",
"react-chartjs-2": "^4.3.1",
@@ -45,15 +43,17 @@
"react-transition-group": "^4.4.5",
"react-window": "^1.8.8",
"reflect-metadata": "^0.1.13",
"sequelize": "^6.25.2",
"sequelize": "^6.34.0",
"sequelize-typescript": "^2.1.5",
"sqlite3": "^5.1.2"
"sqlite3": "^5.1.6"
},
"devDependencies": {
"@testing-library/jest-dom": "^5.16.5",
"@testing-library/jest-dom": "^6.1.4",
"@testing-library/react": "^14.0.0",
"@types/cookies": "^0.7.7",
"@types/cryptr": "^4.0.1",
"@types/isomorphic-fetch": "^0.0.36",
"@types/jest": "^29.5.8",
"@types/jsonwebtoken": "^8.5.9",
"@types/node": "18.11.0",
"@types/nodemailer": "^6.4.6",
@@ -65,9 +65,9 @@
"eslint": "8.25.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-config-next": "12.3.1",
"jest": "^29.3.1",
"jest-environment-jsdom": "^29.3.1",
"postcss": "^8.4.18",
"jest": "^29.7.0",
"jest-environment-jsdom": "^29.7.0",
"postcss": "^8.4.31",
"prettier": "^2.7.1",
"resize-observer-polyfill": "^1.5.1",
"sass": "^1.55.0",

View File

@@ -92,7 +92,12 @@ export const getAppSettings = async () : Promise<SettingsType> => {
smtp_password: '',
scrape_retry: false,
};
const otherSettings = {
available_scapers: allScrapers.map((scraper) => ({ label: scraper.name, value: scraper.id })),
failed_queue: [],
};
await writeFile(`${process.cwd()}/data/settings.json`, JSON.stringify(settings), { encoding: 'utf-8' });
return settings;
await writeFile(`${process.cwd()}/data/failed_queue.json`, JSON.stringify([]), { encoding: 'utf-8' });
return { ...settings, ...otherSettings };
}
};

View File

@@ -4,6 +4,7 @@ import serpapi from './services/serpapi';
import serply from './services/serply';
import spaceserp from './services/spaceserp';
import proxy from './services/proxy';
import searchapi from './services/searchapi';
export default [
scrapingRobot,
@@ -12,4 +13,5 @@ export default [
serply,
spaceserp,
proxy,
searchapi,
];

View File

@@ -0,0 +1,38 @@
const searchapi:ScraperSettings = {
id: 'searchapi',
name: 'SearchApi.io',
website: 'searchapi.io',
headers: (keyword, settings) => {
return {
'Content-Type': 'application/json',
Authorization: `Bearer ${settings.scaping_api}`,
};
},
scrapeURL: (keyword) => {
return `https://www.searchapi.io/api/v1/search?engine=google&q=${encodeURI(keyword.keyword)}&num=100&gl=${keyword.country}&device=${keyword.device}`;
},
resultObjectKey: 'organic_results',
serpExtractor: (content) => {
const extractedResult = [];
const results: SearchApiResult[] = (typeof content === 'string') ? JSON.parse(content) : content as SearchApiResult[];
for (const { link, title, position } of results) {
if (title && link) {
extractedResult.push({
title,
url: link,
position,
});
}
}
return extractedResult;
},
};
interface SearchApiResult {
title: string,
link: string,
position: number,
}
export default searchapi;

View File

@@ -17,11 +17,11 @@ const refreshAndUpdateKeywords = async (rawkeyword:Keyword[], settings:SettingsT
const start = performance.now();
const updatedKeywords: KeywordType[] = [];
if (['scrapingant', 'serpapi'].includes(settings.scraper_type)) {
if (['scrapingant', 'serpapi', 'searchapi'].includes(settings.scraper_type)) {
const refreshedResults = await refreshParallel(keywords, settings);
if (refreshedResults.length > 0) {
for (const keyword of rawkeyword) {
const refreshedkeywordData = refreshedResults.find((k) => k && k.ID === keyword.id);
const refreshedkeywordData = refreshedResults.find((k) => k && k.ID === keyword.ID);
if (refreshedkeywordData) {
const updatedkeyword = await updateKeywordPosition(keyword, refreshedkeywordData, settings);
updatedKeywords.push(updatedkeyword);