diff --git a/src/utils/db/db.ts b/src/utils/db/db.ts index 5da5d4c2..4e7ac923 100644 --- a/src/utils/db/db.ts +++ b/src/utils/db/db.ts @@ -632,15 +632,10 @@ export class DB { distinctId: string ): Promise { const insertResult = await client.query( - 'INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id) VALUES ($1, $2, $3) ON CONFLICT DO NOTHING RETURNING *', + 'INSERT INTO posthog_persondistinctid (distinct_id, person_id, team_id) VALUES ($1, $2, $3) RETURNING *', [distinctId, person.id, person.team_id] ) - // some other thread already added this ID - if (insertResult.rows.length === 0) { - return - } - const personDistinctIdCreated = insertResult.rows[0] as PersonDistinctId if (this.kafkaProducer) { return { diff --git a/src/worker/ingestion/process-event.ts b/src/worker/ingestion/process-event.ts index 4ad7a837..d1dab8e5 100644 --- a/src/worker/ingestion/process-event.ts +++ b/src/worker/ingestion/process-event.ts @@ -313,8 +313,7 @@ export class EventsProcessor { try { await this.db.addDistinctId(oldPerson, distinctId) // Catch race case when somebody already added this distinct_id between .get and .addDistinctId - } catch (error) { - Sentry.captureException(error) + } catch { // integrity error if (retryIfFailed) { // run everything again to merge the users if needed @@ -328,8 +327,7 @@ export class EventsProcessor { try { await this.db.addDistinctId(newPerson, previousDistinctId) // Catch race case when somebody already added this distinct_id between .get and .addDistinctId - } catch (error) { - Sentry.captureException(error) + } catch { // integrity error if (retryIfFailed) { // run everything again to merge the users if needed @@ -345,8 +343,7 @@ export class EventsProcessor { distinctId, previousDistinctId, ]) - } catch (error) { - Sentry.captureException(error) + } catch { // Catch race condition where in between getting and creating, // another request already created this person if (retryIfFailed) {