Skip to content

Commit

Permalink
Integrate SearchApi to SerpBear
Browse files Browse the repository at this point in the history
Remove unnecessary spacing

Fix keyword.id & authorization for searchapi
  • Loading branch information
SebastjanPrachovskij committed Nov 9, 2023
1 parent f164b28 commit 8a35e35
Show file tree
Hide file tree
Showing 7 changed files with 89 additions and 4 deletions.
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ SerpBear is an Open Source Search Engine Position Tracking App. It allows you to
- **Zero Cost to RUN:** Run the App on mogenius.com or Fly.io for free.

#### How it Works
The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SerpApi or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. Also, When you connect your Googel Search Console account, the app shows actual search visits for each tracked keywords. You can also discover new keywords, and find the most performing keywords, countries, pages.
The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SearchApi, SerpApi or Your given Proxy ips to scrape google search results to see if your domain appears in the search result for the given keyword. Also, When you connect your Googel Search Console account, the app shows actual search visits for each tracked keywords. You can also discover new keywords, and find the most performing keywords, countries, pages.

#### Getting Started
- **Step 1:** Deploy & Run the App.
Expand All @@ -41,6 +41,7 @@ The App uses third party website scrapers like ScrapingAnt, ScrapingRobot, SerpA
| serply.io | $49/mo | 5000/mo | Yes |
| serpapi.com | From $50/mo** | From 5,000/mo** | Yes |
| spaceserp.com | $59/lifetime | 15,000/mo | Yes |
| SearchApi.io | From $40/mo | From 10,000/mo | Yes |

(*) Free upto a limit. If you are using ScrapingAnt you can lookup 10,000 times per month for free.
(**) Free up to 100 per month. Paid from 5,000 to 10,000,000+ per month.
Expand Down
2 changes: 1 addition & 1 deletion components/settings/ScraperSettings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ const ScraperSettings = ({ settings, settingsError, updateSettings }:ScraperSett
minWidth={270}
/>
</div>
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi', 'spaceSerp'].includes(settings.scraper_type) && (
{['scrapingant', 'scrapingrobot', 'serply', 'serpapi', 'spaceSerp', 'searchapi'].includes(settings.scraper_type) && (
<div className="settings__section__input mr-3">
<label className={labelStyle}>Scraper API Key or Token</label>
<input
Expand Down
43 changes: 43 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@
"@types/cookies": "^0.7.7",
"@types/cryptr": "^4.0.1",
"@types/isomorphic-fetch": "^0.0.36",
"@types/jest": "^29.5.8",
"@types/jsonwebtoken": "^8.5.9",
"@types/node": "18.11.0",
"@types/nodemailer": "^6.4.6",
Expand Down
2 changes: 2 additions & 0 deletions scrapers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import serpapi from './services/serpapi';
import serply from './services/serply';
import spaceserp from './services/spaceserp';
import proxy from './services/proxy';
import searchapi from './services/searchapi';

export default [
scrapingRobot,
Expand All @@ -12,4 +13,5 @@ export default [
serply,
spaceserp,
proxy,
searchapi,
];
38 changes: 38 additions & 0 deletions scrapers/services/searchapi.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
const searchapi:ScraperSettings = {
id: 'searchapi',
name: 'SearchApi.io',
website: 'searchapi.io',
headers: (keyword, settings) => {
return {
'Content-Type': 'application/json',
'Authorization': `Bearer ${settings.scaping_api}`,
};
},
scrapeURL: (keyword, settings) => {
return `https://www.searchapi.io/api/v1/search?engine=google&q=${encodeURI(keyword.keyword)}&num=100&gl=${keyword.country}&device=${keyword.device}`;
},
resultObjectKey: 'organic_results',
serpExtractor: (content) => {
const extractedResult = [];
const results: SearchApiResult[] = (typeof content === 'string') ? JSON.parse(content) : content as SearchApiResult[];

for (const { link, title, position } of results) {
if (title && link) {
extractedResult.push({
title,
url: link,
position,
});
}
}
return extractedResult;
},
};

interface SearchApiResult {
title: string,
link: string,
position: number,
}

export default searchapi;
4 changes: 2 additions & 2 deletions utils/refresh.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ const refreshAndUpdateKeywords = async (rawkeyword:Keyword[], settings:SettingsT
const start = performance.now();
const updatedKeywords: KeywordType[] = [];

if (['scrapingant', 'serpapi'].includes(settings.scraper_type)) {
if (['scrapingant', 'serpapi', 'searchapi'].includes(settings.scraper_type)) {
const refreshedResults = await refreshParallel(keywords, settings);
if (refreshedResults.length > 0) {
for (const keyword of rawkeyword) {
const refreshedkeywordData = refreshedResults.find((k) => k && k.ID === keyword.id);
const refreshedkeywordData = refreshedResults.find((k) => k && k.ID === keyword.ID);
if (refreshedkeywordData) {
const updatedkeyword = await updateKeywordPosition(keyword, refreshedkeywordData, settings);
updatedKeywords.push(updatedkeyword);
Expand Down

0 comments on commit 8a35e35

Please sign in to comment.