mirror of
https://github.com/towfiqi/serpbear
synced 2025-06-26 18:15:54 +00:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5fc1779783 | ||
|
|
c5af94a146 | ||
|
|
c406588953 | ||
|
|
3c48d130b6 |
13
CHANGELOG.md
13
CHANGELOG.md
@@ -2,6 +2,19 @@
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [0.2.4](https://github.com/towfiqi/serpbear/compare/v0.2.3...v0.2.4) (2023-02-15)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Keyword ranking pages can now be clicked. ([c5af94a](https://github.com/towfiqi/serpbear/commit/c5af94a1469713ed4092253d26953ee0ed28c25d))
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Fixes broken Login on windows ([c406588](https://github.com/towfiqi/serpbear/commit/c406588953035e4177a64011c13eb0e3aedffe89))
|
||||
* Fixes Node Cron memory leak issue. ([3c48d13](https://github.com/towfiqi/serpbear/commit/3c48d130b6f229a4ac27ec43ef1ea3a6640cecf6))
|
||||
|
||||
### [0.2.3](https://github.com/towfiqi/serpbear/compare/v0.2.2...v0.2.3) (2023-01-12)
|
||||
|
||||
|
||||
|
||||
@@ -29,7 +29,7 @@ COPY --from=builder --chown=nextjs:nodejs /app/cron.js ./
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/email ./email
|
||||
RUN rm package.json
|
||||
RUN npm init -y
|
||||
RUN npm i cryptr dotenv node-cron @googleapis/searchconsole
|
||||
RUN npm i cryptr dotenv croner @googleapis/searchconsole
|
||||
RUN npm i -g concurrently
|
||||
|
||||
USER nextjs
|
||||
|
||||
@@ -115,7 +115,10 @@ const Keyword = (props: KeywordProps) => {
|
||||
<div
|
||||
className={`keyword_url inline-block mt-4 mr-5 ml-5 lg:flex-1 text-gray-400 lg:m-0 max-w-[70px]
|
||||
overflow-hidden text-ellipsis whitespace-nowrap lg:max-w-none lg:pr-5`}>
|
||||
<span className='mr-3 lg:hidden'><Icon type="link-alt" size={14} color="#999" /></span>{turncatedURL || '-'}</div>
|
||||
<a href={url} target="_blank" rel="noreferrer"><span className='mr-3 lg:hidden'>
|
||||
<Icon type="link-alt" size={14} color="#999" /></span>{turncatedURL || '-'}
|
||||
</a>
|
||||
</div>
|
||||
<div
|
||||
className='inline-block mt-[4] top-[-5px] relative lg:flex-1 lg:m-0'>
|
||||
<span className='mr-2 lg:hidden'><Icon type="clock" size={14} color="#999" /></span>
|
||||
|
||||
12
cron.js
12
cron.js
@@ -1,7 +1,7 @@
|
||||
const Cryptr = require('cryptr');
|
||||
const { promises } = require('fs');
|
||||
const { readFile } = require('fs');
|
||||
const cron = require('node-cron');
|
||||
const Cron = require('croner');
|
||||
require('dotenv').config({ path: './.env.local' });
|
||||
|
||||
const getAppSettings = async () => {
|
||||
@@ -65,12 +65,12 @@ const generateCronTime = (interval) => {
|
||||
const runAppCronJobs = () => {
|
||||
// RUN SERP Scraping CRON (EveryDay at Midnight) 0 0 0 * *
|
||||
const scrapeCronTime = generateCronTime('daily');
|
||||
cron.schedule(scrapeCronTime, () => {
|
||||
Cron(scrapeCronTime, () => {
|
||||
// console.log('### Running Keyword Position Cron Job!');
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/cron`, fetchOpts)
|
||||
.then((res) => res.json())
|
||||
.then((data) => console.log(data))
|
||||
// .then((data) =>{ console.log(data)})
|
||||
.catch((err) => {
|
||||
console.log('ERROR Making Daily Scraper Cron Request..');
|
||||
console.log(err);
|
||||
@@ -79,7 +79,7 @@ const runAppCronJobs = () => {
|
||||
|
||||
// Run Failed scraping CRON (Every Hour)
|
||||
const failedCronTime = generateCronTime('hourly');
|
||||
cron.schedule(failedCronTime, () => {
|
||||
Cron(failedCronTime, () => {
|
||||
// console.log('### Retrying Failed Scrapes...');
|
||||
|
||||
readFile(`${process.cwd()}/data/failed_queue.json`, { encoding: 'utf-8' }, (err, data) => {
|
||||
@@ -104,7 +104,7 @@ const runAppCronJobs = () => {
|
||||
// Run Google Search Console Scraper Daily
|
||||
if (process.env.SEARCH_CONSOLE_PRIVATE_KEY && process.env.SEARCH_CONSOLE_CLIENT_EMAIL) {
|
||||
const searchConsoleCRONTime = generateCronTime('daily');
|
||||
cron.schedule(searchConsoleCRONTime, () => {
|
||||
Cron(searchConsoleCRONTime, () => {
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/searchconsole`, fetchOpts)
|
||||
.then((res) => res.json())
|
||||
@@ -122,7 +122,7 @@ const runAppCronJobs = () => {
|
||||
if (notif_interval) {
|
||||
const cronTime = generateCronTime(notif_interval === 'daily' ? 'daily_morning' : notif_interval);
|
||||
if (cronTime) {
|
||||
cron.schedule(cronTime, () => {
|
||||
Cron(cronTime, () => {
|
||||
// console.log('### Sending Notification Email...');
|
||||
const fetchOpts = { method: 'POST', headers: { Authorization: `Bearer ${process.env.APIKEY}` } };
|
||||
fetch(`${process.env.NEXT_PUBLIC_APP_URL}/api/notify`, fetchOpts)
|
||||
|
||||
@@ -6,7 +6,7 @@ import Keyword from './models/keyword';
|
||||
const connection = new Sequelize({
|
||||
dialect: 'sqlite',
|
||||
host: '0.0.0.0',
|
||||
username: process.env.USERNAME ? process.env.USERNAME : process.env.USER,
|
||||
username: process.env.USER_NAME ? process.env.USER_NAME : process.env.USER,
|
||||
password: process.env.PASSWORD,
|
||||
database: 'sequelize',
|
||||
dialectModule: sqlite3,
|
||||
|
||||
18
package-lock.json
generated
18
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "serpbear",
|
||||
"version": "0.2.3",
|
||||
"version": "0.2.4",
|
||||
"lockfileVersion": 2,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "serpbear",
|
||||
"version": "0.2.3",
|
||||
"version": "0.2.4",
|
||||
"dependencies": {
|
||||
"@googleapis/searchconsole": "^1.0.0",
|
||||
"@testing-library/react": "^13.4.0",
|
||||
@@ -17,6 +17,7 @@
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"concurrently": "^7.6.0",
|
||||
"cookies": "^0.8.0",
|
||||
"croner": "^5.3.5",
|
||||
"cryptr": "^6.0.3",
|
||||
"dayjs": "^1.11.5",
|
||||
"dotenv": "^16.0.3",
|
||||
@@ -3707,6 +3708,14 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/croner": {
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/croner/-/croner-5.3.5.tgz",
|
||||
"integrity": "sha512-VqaplJOVtaGuAxhsw2HM9GG0DLpVi3W9IsV7bKMAC12O7wMIOcZpCYHBw+xkFABzT3xp5MvUqTfbTewCgxgN+A==",
|
||||
"engines": {
|
||||
"node": ">=6.0"
|
||||
}
|
||||
},
|
||||
"node_modules/cross-spawn": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
|
||||
@@ -15220,6 +15229,11 @@
|
||||
"yaml": "^1.10.0"
|
||||
}
|
||||
},
|
||||
"croner": {
|
||||
"version": "5.3.5",
|
||||
"resolved": "https://registry.npmjs.org/croner/-/croner-5.3.5.tgz",
|
||||
"integrity": "sha512-VqaplJOVtaGuAxhsw2HM9GG0DLpVi3W9IsV7bKMAC12O7wMIOcZpCYHBw+xkFABzT3xp5MvUqTfbTewCgxgN+A=="
|
||||
},
|
||||
"cross-spawn": {
|
||||
"version": "7.0.3",
|
||||
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "serpbear",
|
||||
"version": "0.2.3",
|
||||
"version": "0.2.4",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
@@ -25,6 +25,7 @@
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"concurrently": "^7.6.0",
|
||||
"cookies": "^0.8.0",
|
||||
"croner": "^5.3.5",
|
||||
"cryptr": "^6.0.3",
|
||||
"dayjs": "^1.11.5",
|
||||
"dotenv": "^16.0.3",
|
||||
|
||||
@@ -18,7 +18,8 @@ const loginUser = async (req: NextApiRequest, res: NextApiResponse<loginResponse
|
||||
if (!req.body.username || !req.body.password) {
|
||||
return res.status(401).json({ error: 'Username Password Missing' });
|
||||
}
|
||||
const userName = process.env.USERNAME ? process.env.USERNAME : process.env.USER;
|
||||
const userName = process.env.USER_NAME ? process.env.USER_NAME : process.env.USER;
|
||||
|
||||
if (req.body.username === userName
|
||||
&& req.body.password === process.env.PASSWORD && process.env.SECRET) {
|
||||
const token = jwt.sign({ user: userName }, process.env.SECRET);
|
||||
|
||||
@@ -212,7 +212,7 @@ export const retryScrape = async (keywordID: number) : Promise<void> => {
|
||||
|
||||
const filePath = `${process.cwd()}/data/failed_queue.json`;
|
||||
const currentQueueRaw = await readFile(filePath, { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
|
||||
currentQueue = JSON.parse(currentQueueRaw);
|
||||
currentQueue = currentQueueRaw ? JSON.parse(currentQueueRaw) : [];
|
||||
|
||||
if (!currentQueue.includes(keywordID)) {
|
||||
currentQueue.push(keywordID);
|
||||
@@ -232,7 +232,7 @@ export const removeFromRetryQueue = async (keywordID: number) : Promise<void> =>
|
||||
|
||||
const filePath = `${process.cwd()}/data/failed_queue.json`;
|
||||
const currentQueueRaw = await readFile(filePath, { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
|
||||
currentQueue = JSON.parse(currentQueueRaw);
|
||||
currentQueue = currentQueueRaw ? JSON.parse(currentQueueRaw) : [];
|
||||
currentQueue = currentQueue.filter((item) => item !== keywordID);
|
||||
|
||||
await writeFile(filePath, JSON.stringify(currentQueue), { encoding: 'utf-8' }).catch((err) => { console.log(err); return '[]'; });
|
||||
|
||||
Reference in New Issue
Block a user