Skip to content

Commit

Permalink
Implemented the colloborators section list based on the top contribut…
Browse files Browse the repository at this point in the history
…ors over a week
  • Loading branch information
ayushhunt committed Dec 1, 2024
1 parent b6be48d commit e886863
Show file tree
Hide file tree
Showing 15 changed files with 440 additions and 230 deletions.
6 changes: 3 additions & 3 deletions apps/triggers/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@
"@repo/cache": "workspace:*",
"@repo/db": "workspace:*",
"@supabase/supabase-js": "^2.46.1",
"@trigger.dev/sdk": "^3.1.2"
"@trigger.dev/sdk": "3.3.1"
},
"devDependencies": {
"@trigger.dev/build": "^3.1.2"
"@trigger.dev/build": "3.3.1"
}
}
}
68 changes: 68 additions & 0 deletions apps/triggers/src/collaborator.status.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
import { cache } from '@repo/cache';
import { logger, schedules } from '@trigger.dev/sdk/v3';

export const publishContributorsTask = schedules.task({
id: "publish-contributors",
cron: "0 0 * * 0", // Runs every Sunday at midnight
maxDuration: 60,
run: async () => {
const owner = 'SkidGod4444'; // Replace with the repository owner's username
const repo = 'plura'; // Replace with the repository name
const GITHUB_TOKEN = process.env.GITHUB_TOKEN;

let contributors = [];
let page = 1;

try {
do {
const response = await fetch(
`https://api.github.com/repos/${owner}/${repo}/contributors?per_page=100&page=${page}`,
{
headers: {
Authorization: `token ${GITHUB_TOKEN}`,
Accept: 'application/vnd.github.v3+json',
},
}
);

if (!response.ok) {
logger.error(`GitHub API request failed with status ${response.status}`);
return;
}

const data = await response.json();

if (data.length === 0) {
break;
}

contributors = contributors.concat(data);
page += 1;
} while (true);

// Filter out bots based on type or if 'bot' appears in their login
const filteredContributors = contributors.filter(
(contributor) =>
contributor.type !== 'Bot' &&
!contributor.login.toLowerCase().includes('bot')
);

// Prepare data: list of { login, id, avatar_url, html_url }
const contributorData = filteredContributors.map((contributor) => ({
login: contributor.login,
id: contributor.id,
avatar_url: contributor.avatar_url,
html_url: contributor.html_url,
}));

// Store data in Redis under a fixed key
const redisKey = 'contributors';
await cache.del(redisKey); // Clear existing data
await cache.rpush(redisKey, ...contributorData.map((c) => JSON.stringify(c)));

logger.log('Published contributors data', { contributorData });
} catch (error) {
logger.error('Error fetching contributors from GitHub', { error });
}
},
});
166 changes: 83 additions & 83 deletions apps/triggers/src/db.status.ts
Original file line number Diff line number Diff line change
@@ -1,98 +1,98 @@
import { cache } from "@repo/cache";
import { logger, schedules, wait } from "@trigger.dev/sdk/v3";
import { createClient } from '@supabase/supabase-js';
// import { cache } from "@repo/cache";
// import { logger, schedules, wait } from "@trigger.dev/sdk/v3";
// import { createClient } from '@supabase/supabase-js';

const supabaseUrl = process.env.SUPABASE_URL!;
const supabaseKey = process.env.SUPABASE_KEY!;
const db = createClient(supabaseUrl, supabaseKey);
// const supabaseUrl = process.env.SUPABASE_URL!;
// const supabaseKey = process.env.SUPABASE_KEY!;
// const db = createClient(supabaseUrl, supabaseKey);

export const dbStatusTask = schedules.task({
id: "db-status",
cron: "*/12 * * * *",
maxDuration: 600,
run: async (payload, { ctx }) => {
const latencies: Record<string, number | null> = {};
let totalLatency = 0;
let operationCount = 0;
// export const dbStatusTask = schedules.task({
// id: "db-status",
// cron: "*/12 * * * *",
// maxDuration: 600,
// run: async (payload, { ctx }) => {
// const latencies: Record<string, number | null> = {};
// let totalLatency = 0;
// let operationCount = 0;

const measureAndCacheLatency = async (operationName: string, operation: () => Promise<any>) => {
const startTime = Date.now();
try {
await operation();
const latency = Date.now() - startTime;
latencies[operationName] = latency;
totalLatency += latency;
operationCount++;
// const measureAndCacheLatency = async (operationName: string, operation: () => Promise<any>) => {
// const startTime = Date.now();
// try {
// await operation();
// const latency = Date.now() - startTime;
// latencies[operationName] = latency;
// totalLatency += latency;
// operationCount++;

logger.log(`Latency for ${operationName}`, { latency });
} catch (error) {
logger.error(`${operationName} failed`, { error });
latencies[operationName] = null;
}
};
// logger.log(`Latency for ${operationName}`, { latency });
// } catch (error) {
// logger.error(`${operationName} failed`, { error });
// latencies[operationName] = null;
// }
// };

const massOperationsCount = 100;
// const massOperationsCount = 100;

await measureAndCacheLatency("mass_create", async () => {
const createPromises = Array.from({ length: massOperationsCount }, (_, i) =>
db
.from('triggers')
.insert([{ id: `mass-${i}`, name: `Mass Test ${i}`, email: `mass${i}@test.com`, email_verified: true }])
);
await Promise.all(createPromises);
});
// await measureAndCacheLatency("mass_create", async () => {
// const createPromises = Array.from({ length: massOperationsCount }, (_, i) =>
// db
// .from('triggers')
// .insert([{ id: `mass-${i}`, name: `Mass Test ${i}`, email: `mass${i}@test.com`, email_verified: true }])
// );
// await Promise.all(createPromises);
// });

await wait.for({ seconds: 10 });
// await wait.for({ seconds: 10 });

await measureAndCacheLatency("mass_read", async () => {
const { data, error } = await db
.from('triggers')
.select('*')
if (error) {
logger.error('Error fetching data:', { error });
return null;
}
logger.log('Read data', { data });
return data;
});
// await measureAndCacheLatency("mass_read", async () => {
// const { data, error } = await db
// .from('triggers')
// .select('*')
// if (error) {
// logger.error('Error fetching data:', { error });
// return null;
// }
// logger.log('Read data', { data });
// return data;
// });

await wait.for({ seconds: 15 });
// await wait.for({ seconds: 15 });

await measureAndCacheLatency("mass_update", async () => {
const updatePromises = Array.from({ length: massOperationsCount }, (_, i) =>
db
.from('triggers')
.update({ name: `Updated Mass Test ${i}`, email_verified: false })
.eq('id', `mass-${i}`)
);
await Promise.all(updatePromises);
});
// await measureAndCacheLatency("mass_update", async () => {
// const updatePromises = Array.from({ length: massOperationsCount }, (_, i) =>
// db
// .from('triggers')
// .update({ name: `Updated Mass Test ${i}`, email_verified: false })
// .eq('id', `mass-${i}`)
// );
// await Promise.all(updatePromises);
// });

await wait.for({ seconds: 10 });
// await wait.for({ seconds: 10 });

await measureAndCacheLatency("mass_delete", async () => {
const deletePromises = Array.from({ length: massOperationsCount }, (_, i) =>
db
.from('triggers')
.delete()
.eq('id', `mass-${i}`)
);
await Promise.all(deletePromises);
});
// await measureAndCacheLatency("mass_delete", async () => {
// const deletePromises = Array.from({ length: massOperationsCount }, (_, i) =>
// db
// .from('triggers')
// .delete()
// .eq('id', `mass-${i}`)
// );
// await Promise.all(deletePromises);
// });

await wait.for({ seconds: 5 });
// await wait.for({ seconds: 5 });

const averageLatency = operationCount > 0 ? totalLatency / operationCount : null;
const latencyRecord = {
timestamp: new Date().toISOString(),
latencies,
totalLatency,
operationCount,
averageLatency,
massOperationsCount,
};
// const averageLatency = operationCount > 0 ? totalLatency / operationCount : null;
// const latencyRecord = {
// timestamp: new Date().toISOString(),
// latencies,
// totalLatency,
// operationCount,
// averageLatency,
// massOperationsCount,
// };

await cache.rpush("db-latency:history", JSON.stringify(latencyRecord));
await cache.ltrim("db-latency:history", -120, -1);
},
});
// await cache.rpush("db-latency:history", JSON.stringify(latencyRecord));
// await cache.ltrim("db-latency:history", -120, -1);
// },
// });
Loading

0 comments on commit e886863

Please sign in to comment.