-
-
Notifications
You must be signed in to change notification settings - Fork 32
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Implemented the colloborators section list based on the top contribut…
…ors over a week
- Loading branch information
Showing
15 changed files
with
440 additions
and
230 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,68 @@ | ||
import { cache } from '@repo/cache'; | ||
import { logger, schedules } from '@trigger.dev/sdk/v3'; | ||
|
||
export const publishContributorsTask = schedules.task({ | ||
id: "publish-contributors", | ||
cron: "0 0 * * 0", // Runs every Sunday at midnight | ||
maxDuration: 60, | ||
run: async () => { | ||
const owner = 'SkidGod4444'; // Replace with the repository owner's username | ||
const repo = 'plura'; // Replace with the repository name | ||
const GITHUB_TOKEN = process.env.GITHUB_TOKEN; | ||
|
||
let contributors = []; | ||
let page = 1; | ||
|
||
try { | ||
do { | ||
const response = await fetch( | ||
`https://api.github.com/repos/${owner}/${repo}/contributors?per_page=100&page=${page}`, | ||
{ | ||
headers: { | ||
Authorization: `token ${GITHUB_TOKEN}`, | ||
Accept: 'application/vnd.github.v3+json', | ||
}, | ||
} | ||
); | ||
|
||
if (!response.ok) { | ||
logger.error(`GitHub API request failed with status ${response.status}`); | ||
return; | ||
} | ||
|
||
const data = await response.json(); | ||
|
||
if (data.length === 0) { | ||
break; | ||
} | ||
|
||
contributors = contributors.concat(data); | ||
page += 1; | ||
} while (true); | ||
|
||
// Filter out bots based on type or if 'bot' appears in their login | ||
const filteredContributors = contributors.filter( | ||
(contributor) => | ||
contributor.type !== 'Bot' && | ||
!contributor.login.toLowerCase().includes('bot') | ||
); | ||
|
||
// Prepare data: list of { login, id, avatar_url, html_url } | ||
const contributorData = filteredContributors.map((contributor) => ({ | ||
login: contributor.login, | ||
id: contributor.id, | ||
avatar_url: contributor.avatar_url, | ||
html_url: contributor.html_url, | ||
})); | ||
|
||
// Store data in Redis under a fixed key | ||
const redisKey = 'contributors'; | ||
await cache.del(redisKey); // Clear existing data | ||
await cache.rpush(redisKey, ...contributorData.map((c) => JSON.stringify(c))); | ||
|
||
logger.log('Published contributors data', { contributorData }); | ||
} catch (error) { | ||
logger.error('Error fetching contributors from GitHub', { error }); | ||
} | ||
}, | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,98 +1,98 @@ | ||
import { cache } from "@repo/cache"; | ||
import { logger, schedules, wait } from "@trigger.dev/sdk/v3"; | ||
import { createClient } from '@supabase/supabase-js'; | ||
// import { cache } from "@repo/cache"; | ||
// import { logger, schedules, wait } from "@trigger.dev/sdk/v3"; | ||
// import { createClient } from '@supabase/supabase-js'; | ||
|
||
const supabaseUrl = process.env.SUPABASE_URL!; | ||
const supabaseKey = process.env.SUPABASE_KEY!; | ||
const db = createClient(supabaseUrl, supabaseKey); | ||
// const supabaseUrl = process.env.SUPABASE_URL!; | ||
// const supabaseKey = process.env.SUPABASE_KEY!; | ||
// const db = createClient(supabaseUrl, supabaseKey); | ||
|
||
export const dbStatusTask = schedules.task({ | ||
id: "db-status", | ||
cron: "*/12 * * * *", | ||
maxDuration: 600, | ||
run: async (payload, { ctx }) => { | ||
const latencies: Record<string, number | null> = {}; | ||
let totalLatency = 0; | ||
let operationCount = 0; | ||
// export const dbStatusTask = schedules.task({ | ||
// id: "db-status", | ||
// cron: "*/12 * * * *", | ||
// maxDuration: 600, | ||
// run: async (payload, { ctx }) => { | ||
// const latencies: Record<string, number | null> = {}; | ||
// let totalLatency = 0; | ||
// let operationCount = 0; | ||
|
||
const measureAndCacheLatency = async (operationName: string, operation: () => Promise<any>) => { | ||
const startTime = Date.now(); | ||
try { | ||
await operation(); | ||
const latency = Date.now() - startTime; | ||
latencies[operationName] = latency; | ||
totalLatency += latency; | ||
operationCount++; | ||
// const measureAndCacheLatency = async (operationName: string, operation: () => Promise<any>) => { | ||
// const startTime = Date.now(); | ||
// try { | ||
// await operation(); | ||
// const latency = Date.now() - startTime; | ||
// latencies[operationName] = latency; | ||
// totalLatency += latency; | ||
// operationCount++; | ||
|
||
logger.log(`Latency for ${operationName}`, { latency }); | ||
} catch (error) { | ||
logger.error(`${operationName} failed`, { error }); | ||
latencies[operationName] = null; | ||
} | ||
}; | ||
// logger.log(`Latency for ${operationName}`, { latency }); | ||
// } catch (error) { | ||
// logger.error(`${operationName} failed`, { error }); | ||
// latencies[operationName] = null; | ||
// } | ||
// }; | ||
|
||
const massOperationsCount = 100; | ||
// const massOperationsCount = 100; | ||
|
||
await measureAndCacheLatency("mass_create", async () => { | ||
const createPromises = Array.from({ length: massOperationsCount }, (_, i) => | ||
db | ||
.from('triggers') | ||
.insert([{ id: `mass-${i}`, name: `Mass Test ${i}`, email: `mass${i}@test.com`, email_verified: true }]) | ||
); | ||
await Promise.all(createPromises); | ||
}); | ||
// await measureAndCacheLatency("mass_create", async () => { | ||
// const createPromises = Array.from({ length: massOperationsCount }, (_, i) => | ||
// db | ||
// .from('triggers') | ||
// .insert([{ id: `mass-${i}`, name: `Mass Test ${i}`, email: `mass${i}@test.com`, email_verified: true }]) | ||
// ); | ||
// await Promise.all(createPromises); | ||
// }); | ||
|
||
await wait.for({ seconds: 10 }); | ||
// await wait.for({ seconds: 10 }); | ||
|
||
await measureAndCacheLatency("mass_read", async () => { | ||
const { data, error } = await db | ||
.from('triggers') | ||
.select('*') | ||
if (error) { | ||
logger.error('Error fetching data:', { error }); | ||
return null; | ||
} | ||
logger.log('Read data', { data }); | ||
return data; | ||
}); | ||
// await measureAndCacheLatency("mass_read", async () => { | ||
// const { data, error } = await db | ||
// .from('triggers') | ||
// .select('*') | ||
// if (error) { | ||
// logger.error('Error fetching data:', { error }); | ||
// return null; | ||
// } | ||
// logger.log('Read data', { data }); | ||
// return data; | ||
// }); | ||
|
||
await wait.for({ seconds: 15 }); | ||
// await wait.for({ seconds: 15 }); | ||
|
||
await measureAndCacheLatency("mass_update", async () => { | ||
const updatePromises = Array.from({ length: massOperationsCount }, (_, i) => | ||
db | ||
.from('triggers') | ||
.update({ name: `Updated Mass Test ${i}`, email_verified: false }) | ||
.eq('id', `mass-${i}`) | ||
); | ||
await Promise.all(updatePromises); | ||
}); | ||
// await measureAndCacheLatency("mass_update", async () => { | ||
// const updatePromises = Array.from({ length: massOperationsCount }, (_, i) => | ||
// db | ||
// .from('triggers') | ||
// .update({ name: `Updated Mass Test ${i}`, email_verified: false }) | ||
// .eq('id', `mass-${i}`) | ||
// ); | ||
// await Promise.all(updatePromises); | ||
// }); | ||
|
||
await wait.for({ seconds: 10 }); | ||
// await wait.for({ seconds: 10 }); | ||
|
||
await measureAndCacheLatency("mass_delete", async () => { | ||
const deletePromises = Array.from({ length: massOperationsCount }, (_, i) => | ||
db | ||
.from('triggers') | ||
.delete() | ||
.eq('id', `mass-${i}`) | ||
); | ||
await Promise.all(deletePromises); | ||
}); | ||
// await measureAndCacheLatency("mass_delete", async () => { | ||
// const deletePromises = Array.from({ length: massOperationsCount }, (_, i) => | ||
// db | ||
// .from('triggers') | ||
// .delete() | ||
// .eq('id', `mass-${i}`) | ||
// ); | ||
// await Promise.all(deletePromises); | ||
// }); | ||
|
||
await wait.for({ seconds: 5 }); | ||
// await wait.for({ seconds: 5 }); | ||
|
||
const averageLatency = operationCount > 0 ? totalLatency / operationCount : null; | ||
const latencyRecord = { | ||
timestamp: new Date().toISOString(), | ||
latencies, | ||
totalLatency, | ||
operationCount, | ||
averageLatency, | ||
massOperationsCount, | ||
}; | ||
// const averageLatency = operationCount > 0 ? totalLatency / operationCount : null; | ||
// const latencyRecord = { | ||
// timestamp: new Date().toISOString(), | ||
// latencies, | ||
// totalLatency, | ||
// operationCount, | ||
// averageLatency, | ||
// massOperationsCount, | ||
// }; | ||
|
||
await cache.rpush("db-latency:history", JSON.stringify(latencyRecord)); | ||
await cache.ltrim("db-latency:history", -120, -1); | ||
}, | ||
}); | ||
// await cache.rpush("db-latency:history", JSON.stringify(latencyRecord)); | ||
// await cache.ltrim("db-latency:history", -120, -1); | ||
// }, | ||
// }); |
Oops, something went wrong.