Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
156 changes: 97 additions & 59 deletions updateProject.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -28,15 +28,16 @@ async function fetchCollaborators(url, githubToken) {

// Function to get collaborators of a repository
async function getCollaborators(repoData, githubToken) {

if (repoData.fork && repoData.parent?.contributors_url) {
// If the repository is a fork and has a parent, fetch collaborators from both
const [c1, c2] = await Promise.all([
fetchCollaborators(repoData.contributors_url, githubToken),
fetchCollaborators(repoData.parent.contributors_url, githubToken)
fetchCollaborators(repoData.parent.contributors_url, githubToken),
]);
// Filter out collaborators from the repository who are also in the parent
return c1.filter(collab1 => !c2.some(collab2 => collab1.login === collab2.login));
return c1.filter(
(collab1) => !c2.some((collab2) => collab1.login === collab2.login)
);
}
// Otherwise, fetch collaborators directly from the repository
return fetchCollaborators(repoData.contributors_url, githubToken);
Expand All @@ -49,13 +50,14 @@ async function updateProjects() {
try {
process.env.NODE_TLS_REJECT_UNAUTHORIZED = 0;
// Fetch data for current projects and upcoming projects
const [currentProjectsData, upcomingProjectsData, repositories] = await Promise.all([
// Fetch current projects data
fetchData("https://directus.ourgoalplan.co.in/graphql", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
query: `query getCurrentProjects {
const [currentProjectsData, upcomingProjectsData, repositories] =
await Promise.all([
// Fetch current projects data
fetchData("https://directus.ourgoalplan.co.in/graphql", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
query: `query getCurrentProjects {
foss_projects(filter: {project_type: { _eq: "current" }}) {
id,
title,
Expand All @@ -67,15 +69,15 @@ async function updateProjects() {
date_updated,
status,
}
}`
}`,
}),
}),
}),
// Fetch upcoming projects data
fetchData("https://directus.ourgoalplan.co.in/graphql", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
query: `query getUpcomingProjects {
// Fetch upcoming projects data
fetchData("https://directus.ourgoalplan.co.in/graphql", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
query: `query getUpcomingProjects {
foss_projects(filter: {project_type: { _eq: "upcoming" }}) {
id,
title,
Expand All @@ -87,66 +89,102 @@ async function updateProjects() {
date_updated,
status,
}
}`
}`,
}),
}),
}),
// Fetch repositories data from GitHub
fetchData("https://api.github.com/users/mindfiredigital/repos", {
headers: {
Authorization: `token ${githubToken}`,
Accept: "application/vnd.github.v3+json",
},
})
]);
// Fetch repositories data from GitHub
fetchData("https://api.github.com/users/mindfiredigital/repos", {
headers: {
Authorization: `token ${githubToken}`,
Accept: "application/vnd.github.v3+json",
},
}),
]);

// Process and write data for current projects
const currentProjects = currentProjectsData.data.foss_projects.map(entry => ({
...entry,
id: parseInt(entry.id),
shortDescription: entry.short_description,
githubUrl: entry.github_repository_link,
documentationUrl: entry.documentation_link,
}));
fs.writeFileSync(path.join(__dirname, "src/app/projects/assets/projects.json"), JSON.stringify(currentProjects, null, 2));
const currentProjects = currentProjectsData.data.foss_projects.map(
(entry) => ({
...entry,
id: parseInt(entry.id),
shortDescription: entry.short_description,
githubUrl: entry.github_repository_link,
documentationUrl: entry.documentation_link,
})
);
fs.writeFileSync(
path.join(__dirname, "src/app/projects/assets/projects.json"),
JSON.stringify(currentProjects, null, 2)
);
console.log("Current projects updated successfully.");

// Process and write data for upcoming projects
const upcomingProjects = upcomingProjectsData.data.foss_projects.map(entry => ({
...entry,
id: parseInt(entry.id),
shortDescription: entry.short_description,
githubUrl: entry.github_repository_link,
documentationUrl: entry.documentation_link,
}));
fs.writeFileSync(path.join(__dirname, "src/app/projects/assets/upcomingProjects.json"), JSON.stringify(upcomingProjects, null, 2));
const upcomingProjects = upcomingProjectsData.data.foss_projects.map(
(entry) => ({
...entry,
id: parseInt(entry.id),
shortDescription: entry.short_description,
githubUrl: entry.github_repository_link,
documentationUrl: entry.documentation_link,
})
);
fs.writeFileSync(
path.join(__dirname, "src/app/projects/assets/upcomingProjects.json"),
JSON.stringify(upcomingProjects, null, 2)
);
console.log("Upcoming projects updated successfully.");

// Fetch and process contributors data for repositories
const repoNames = repositories.map(repo => repo.name);
const repoNames = repositories.map((repo) => repo.name);
const contributorsObject = {};
for (const repoName of repoNames) {
const repoData = await fetchData(`https://api.github.com/repos/mindfiredigital/${repoName}`, {
headers: {
Authorization: `token ${githubToken}`,
Accept: "application/vnd.github.v3+json",
},
})
contributorsObject[repoName] = await getCollaborators(repoData, githubToken);
const repoData = await fetchData(
`https://api.github.com/repos/mindfiredigital/${repoName}`,
{
headers: {
Authorization: `token ${githubToken}`,
Accept: "application/vnd.github.v3+json",
},
}
);
contributorsObject[repoName] = await getCollaborators(
repoData,
githubToken
);
}

// Aggregate contributor from contributors
const contributionsMap = {};

for (const repo in contributorsObject) {
contributorsObject[repo].forEach(contributor => {
const { login, contributions, id, avatar_url, html_url } = contributor;
contributionsMap[login] = contributionsMap[login] || { id, contributions: 0, html_url, avatar_url, login };
contributionsMap[login].contributions += contributions;
});
if (contributorsObject.hasOwnProperty(repo)) {
contributorsObject[repo].forEach((contributor) => {
if (contributor.login === "github-actions[bot]") {
// Skip processing GitHub Actions bot contributions
return;
}
const { login, contributions, id, avatar_url, html_url } =
contributor;
// Update contributions map
contributionsMap[login] = {
id,
contributions:
(contributionsMap[login]?.contributions || 0) + contributions,
html_url,
avatar_url,
login,
};
});
}
}

// Sort contributions and write data to file
const sortedContributions = Object.values(contributionsMap).sort((a, b) => b.contributions - a.contributions);
fs.writeFileSync(path.join(__dirname, "src/app/projects/assets/contributors.json"), JSON.stringify(sortedContributions, null, 2));
const sortedContributions = Object.values(contributionsMap).sort(
(a, b) => b.contributions - a.contributions
);
fs.writeFileSync(
path.join(__dirname, "src/app/projects/assets/contributors.json"),
JSON.stringify(sortedContributions, null, 2)
);
console.log("Contributors list updated successfully.");
} catch (error) {
console.error("An error occurred:", error);
Expand Down