Skip to content

Commit

Permalink
Merge pull request #149 from premieroctet/feature/coolify
Browse files Browse the repository at this point in the history
Feature/coolify
  • Loading branch information
tlenclos authored Oct 10, 2024
2 parents 7d61019 + 141dbce commit 21cf4c4
Show file tree
Hide file tree
Showing 3 changed files with 80 additions and 4 deletions.
75 changes: 75 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
FROM node:18-alpine AS base

# Install dependencies only when needed
FROM base AS deps

# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
RUN apk add --no-cache libc6-compat
WORKDIR /app

# Install Python and other necessary packages
RUN apk add --no-cache \
python3 \
py3-pip \
py3-setuptools \
py3-wheel \
bash \
g++ \
linux-headers \
make

# Create a symlink for python3 as python
RUN ln -sf python3 /usr/bin/python

# Install dependencies based on the preferred package manager
COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* ./
RUN yarn --frozen-lockfile

# Rebuild the source code only when needed
FROM base AS builder
WORKDIR /app

COPY --from=deps /app/node_modules ./node_modules
COPY . .

# Next.js collects completely anonymous telemetry data about general usage.
# Learn more here: https://nextjs.org/telemetry
# Uncomment the following line in case you want to disable telemetry during the build.
ENV NEXT_TELEMETRY_DISABLED 1

RUN yarn --frozen-lockfile
RUN npx prisma generate
RUN yarn run build

# Production image, copy all the files and run next
FROM base AS runner
WORKDIR /app

ENV NODE_ENV production
# Uncomment the following line in case you want to disable telemetry during runtime.
# ENV NEXT_TELEMETRY_DISABLED 1

RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs

COPY --from=builder /app/public ./public

# Set the correct permission for prerender cache
RUN mkdir .next
RUN chown nextjs:nodejs .next

# Automatically leverage output traces to reduce image size
# https://nextjs.org/docs/advanced-features/output-file-tracing
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
COPY --from=builder --chown=nextjs:nodejs /app/prisma ./prisma/

USER nextjs

EXPOSE 3000

ENV PORT 3000

# server.js is created by next build from the standalone output
# https://nextjs.org/docs/pages/api-reference/next-config-js/output
CMD HOSTNAME="0.0.0.0" node server.js
1 change: 1 addition & 0 deletions next.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ const { withContentlayer } = require('next-contentlayer');

/** @type {import('next').NextConfig} */
const nextConfig = {
output: 'standalone',
experimental: {
serverActions: true,
serverComponentsExternalPackages: ['mjml', 'mjml-react'],
Expand Down
8 changes: 4 additions & 4 deletions src/utils/openai.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
import OpenAI from 'openai';

const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY!,
});

export const openAiCompletion = async ({
prompt,
model = 'gpt-4o',
}: {
prompt: string;
model?: 'gpt-4o' | 'gpt-4-turbo';
}) => {
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY!,
});

const chatCompletion = await openai.chat.completions.create({
messages: [{ role: 'user', content: prompt }],
model,
Expand Down

0 comments on commit 21cf4c4

Please sign in to comment.