6 Commits

Author SHA1 Message Date
towfiqi
d7279512cf chore(release): 2.0.6 2024-11-15 10:43:12 +06:00
towfiqi
4fef1a9abc fix: Ensures Docker build uses matching npm package versions from package.json 2024-11-14 23:11:09 +06:00
towfiqi
aeed1f8559 fix: Resolves broken Docker build due to croner package version mismatch.
closes #247
2024-11-14 23:08:54 +06:00
towfiqi
12eac2b012 fix: Resolves Google Ads search volume data loading issue. 2024-11-14 18:31:40 +06:00
towfiqi
649f412303 fix: Resolves broken Proxy Scraper functionality.
closes #248
2024-11-14 18:30:56 +06:00
towfiqi
a2edabbdf9 chore: Upgrades vulnerable dependecies. 2024-11-14 18:28:43 +06:00
8 changed files with 1827 additions and 1010 deletions

View File

@@ -2,6 +2,16 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
### [2.0.6](https://github.com/towfiqi/serpbear/compare/v2.0.5...v2.0.6) (2024-11-15)
### Bug Fixes
* Ensures Docker build uses matching npm package versions from package.json ([4fef1a9](https://github.com/towfiqi/serpbear/commit/4fef1a9abc737da67ab1ea0c4efce8194890545e))
* Resolves broken Docker build due to croner package version mismatch. ([aeed1f8](https://github.com/towfiqi/serpbear/commit/aeed1f8559e044bf658d930a22fa91f38cfedc6b)), closes [#247](https://github.com/towfiqi/serpbear/issues/247)
* Resolves broken Proxy Scraper functionality. ([649f412](https://github.com/towfiqi/serpbear/commit/649f412303dd50127b3736740962863f735f76eb)), closes [#248](https://github.com/towfiqi/serpbear/issues/248)
* Resolves Google Ads search volume data loading issue. ([12eac2b](https://github.com/towfiqi/serpbear/commit/12eac2b01235e9eae06882d6a2c50c793b890661))
### [2.0.5](https://github.com/towfiqi/serpbear/compare/v2.0.4...v2.0.5) (2024-11-12)

View File

@@ -40,7 +40,7 @@ COPY --from=builder --chown=nextjs:nodejs /app/.sequelizerc ./.sequelizerc
COPY --from=builder --chown=nextjs:nodejs /app/entrypoint.sh ./entrypoint.sh
RUN rm package.json
RUN npm init -y
RUN npm i cryptr dotenv croner @googleapis/searchconsole sequelize-cli @isaacs/ttlcache
RUN npm i cryptr@6.0.3 dotenv@16.0.3 croner@9.0.0 @googleapis/searchconsole@1.0.5 sequelize-cli@6.6.2 @isaacs/ttlcache@1.4.1
RUN npm i -g concurrently
USER nextjs

View File

@@ -1,22 +1,26 @@
import { render } from '@testing-library/react';
import { rest } from 'msw';
import { http } from 'msw';
import * as React from 'react';
import { QueryClient, QueryClientProvider } from 'react-query';
export const handlers = [
rest.get(
http.get(
'*/react-query',
(req, res, ctx) => {
return res(
ctx.status(200),
ctx.json({
({ request, params }) => {
return new Response(
JSON.stringify({
name: 'mocked-react-query',
}),
{
status: 200,
headers: {
'Content-Type': 'application/json',
},
},
);
},
),
];
const createTestQueryClient = () => new QueryClient({
defaultOptions: {
queries: {

11
cron.js
View File

@@ -1,7 +1,8 @@
/* eslint-disable no-new */
const Cryptr = require('cryptr');
const { promises } = require('fs');
const { readFile } = require('fs');
const Cron = require('croner');
const { Cron } = require('croner');
require('dotenv').config({ path: './.env.local' });
const getAppSettings = async () => {
@@ -71,7 +72,7 @@ const runAppCronJobs = () => {
const scrape_interval = settings.scrape_interval || 'daily';
if (scrape_interval !== 'never') {
const scrapeCronTime = generateCronTime(scrape_interval);
Cron(scrapeCronTime, () => {
new Cron(scrapeCronTime, () => {
// console.log('### Running Keyword Position Cron Job!');
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/cron`, fetchOpts)
@@ -89,7 +90,7 @@ const runAppCronJobs = () => {
if (notif_interval) {
const cronTime = generateCronTime(notif_interval === 'daily' ? 'daily_morning' : notif_interval);
if (cronTime) {
Cron(cronTime, () => {
new Cron(cronTime, () => {
// console.log('### Sending Notification Email...');
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/notify`, fetchOpts)
@@ -106,7 +107,7 @@ const runAppCronJobs = () => {
// Run Failed scraping CRON (Every Hour)
const failedCronTime = generateCronTime('hourly');
Cron(failedCronTime, () => {
new Cron(failedCronTime, () => {
// console.log('### Retrying Failed Scrapes...');
readFile(`${process.cwd()}/data/failed_queue.json`, { encoding: 'utf-8' }, (err, data) => {
@@ -135,7 +136,7 @@ const runAppCronJobs = () => {
// Run Google Search Console Scraper Daily
if (process.env.SEARCH_CONSOLE_PRIVATE_KEY && process.env.SEARCH_CONSOLE_CLIENT_EMAIL) {
const searchConsoleCRONTime = generateCronTime('daily');
Cron(searchConsoleCRONTime, () => {
new Cron(searchConsoleCRONTime, () => {
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/searchconsole`, fetchOpts)
.then((res) => res.json())

2763
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "serpbear",
"version": "2.0.5",
"version": "2.0.6",
"private": true,
"scripts": {
"dev": "next dev",
@@ -18,16 +18,16 @@
"release": "standard-version"
},
"dependencies": {
"@googleapis/searchconsole": "^1.0.0",
"@googleapis/searchconsole": "^1.0.5",
"@isaacs/ttlcache": "^1.4.1",
"@types/react-transition-group": "^4.4.5",
"axios": "^1.1.3",
"axios": "^1.7.7",
"axios-retry": "^3.3.1",
"chart.js": "^3.9.1",
"cheerio": "^1.0.0",
"concurrently": "^7.6.0",
"cookies": "^0.8.0",
"croner": "^5.3.5",
"croner": "^9.0.0",
"cryptr": "^6.0.3",
"dayjs": "^1.11.5",
"dotenv": "^16.0.3",
@@ -35,7 +35,6 @@
"https-proxy-agent": "^5.0.1",
"isomorphic-fetch": "^3.0.0",
"jsonwebtoken": "^9.0.2",
"msw": "^0.49.0",
"next": "^12.3.4",
"nodemailer": "^6.9.9",
"react": "18.2.0",
@@ -50,11 +49,11 @@
"reflect-metadata": "^0.1.13",
"sequelize": "^6.34.0",
"sequelize-typescript": "^2.1.6",
"sqlite3": "^5.1.6",
"umzug": "^3.6.1"
"sqlite3": "^5.1.7",
"umzug": "^3.8.2"
},
"devDependencies": {
"@testing-library/jest-dom": "^6.1.4",
"@testing-library/jest-dom": "^6.6.3",
"@testing-library/react": "^14.0.0",
"@types/cookies": "^0.7.7",
"@types/cryptr": "^4.0.1",
@@ -74,15 +73,16 @@
"jest": "^29.7.0",
"jest-environment-jsdom": "^29.7.0",
"jest-fetch-mock": "^3.0.3",
"msw": "^2.6.4",
"next-router-mock": "^0.9.10",
"postcss": "^8.4.31",
"postcss": "^8.4.49",
"prettier": "^2.7.1",
"resize-observer-polyfill": "^1.5.1",
"sass": "^1.55.0",
"sass": "^1.80.7",
"sequelize-cli": "^6.6.2",
"standard-version": "^9.5.0",
"stylelint-config-standard": "^29.0.0",
"tailwindcss": "^3.1.8",
"typescript": "4.8.4"
"tailwindcss": "^3.4.14",
"typescript": "^4.8.4"
}
}

View File

@@ -36,14 +36,13 @@ const updatekeywordVolume = async (req: NextApiRequest, res: NextApiResponse<Key
keywordsToSend = parseKeywords(allKeywords.map((e) => e.get({ plain: true })));
}
if (domain) {
// const allDomain = domain === 'all';
// const allKeywords:Keyword[] = allDomain ? await Keyword.findAll() : await Keyword.findAll(allDomain ? {} : { where: { domain } });
// keywordsToSend = parseKeywords(allKeywords.map((e) => e.get({ plain: true })));
const allDomain = domain === 'all';
const allKeywords:Keyword[] = allDomain ? await Keyword.findAll() : await Keyword.findAll(allDomain ? {} : { where: { domain } });
keywordsToSend = parseKeywords(allKeywords.map((e) => e.get({ plain: true })));
}
if (keywordsToSend.length > 0) {
const keywordsVolumeData = await getKeywordsVolume(keywordsToSend);
// console.log('keywordsVolumeData :', keywordsVolumeData);
if (keywordsVolumeData.error) {
return res.status(400).json({ keywords: [], error: keywordsVolumeData.error });
}

View File

@@ -1,4 +1,4 @@
import cheerio from 'cheerio';
import * as cheerio from 'cheerio';
const proxy:ScraperSettings = {
id: 'proxy',