Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 0 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -74,9 +74,5 @@ jobs:
if: matrix.task == 'test-run'
run: pnpm test:setup

- name: Run migrations
if: matrix.task == 'test-run'
run: pnpm --filter core migrate-test

- name: Run task
run: NODE_ENV=test pnpm ${{ matrix.task }}
38 changes: 19 additions & 19 deletions .github/workflows/deploy-template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -104,19 +104,19 @@ jobs:
# Complication when the number of containers in the task are unknown:
# we have to know where to get the inputs for each step, including the upload
# step.
- name: Fill in the new image ID in the Amazon ECS task definition for migrations
id: task-def-migration
if: inputs.service == 'core'
uses: aws-actions/amazon-ecs-render-task-definition@c804dfbdd57f713b6c079302a4c01db7017a36fc
with:
task-definition: ${{ steps.task-def-service.outputs.task-definition }}
container-name: migrations
image: ${{ steps.label.outputs.base_label }}
# - name: Fill in the new image ID in the Amazon ECS task definition for migrations
# id: task-def-migration
# if: inputs.service == 'core'
# uses: aws-actions/amazon-ecs-render-task-definition@c804dfbdd57f713b6c079302a4c01db7017a36fc
# with:
# task-definition: ${{ steps.task-def-service.outputs.task-definition }}
# container-name: migrations
# image: ${{ steps.label.outputs.base_label }}

- name: Deploy Amazon ECS task definition
id: deploy-service-only
# This one is different. The single-image case is when not deploying core.
if: inputs.service != 'core'
# if: inputs.service != 'core'
uses: aws-actions/amazon-ecs-deploy-task-definition@16f052ed696e6e5bf88c208a8e5ba1af7ced3310
with:
# it is because of this line that the two steps need different if conditions
Expand All @@ -125,13 +125,13 @@ jobs:
cluster: ${{ env.ECS_CLUSTER }}
wait-for-service-stability: true

- name: Deploy Amazon ECS task definition including migrations
id: deploy-service-and-migrations
if: inputs.service == 'core'
uses: aws-actions/amazon-ecs-deploy-task-definition@16f052ed696e6e5bf88c208a8e5ba1af7ced3310
with:
# it is because of this line that the two steps need different if conditions
task-definition: ${{ steps.task-def-migration.outputs.task-definition }}
service: ${{ env.ECS_SERVICE }}
cluster: ${{ env.ECS_CLUSTER }}
wait-for-service-stability: true
# - name: Deploy Amazon ECS task definition including migrations
# id: deploy-service-and-migrations
# if: inputs.service == 'core'
# uses: aws-actions/amazon-ecs-deploy-task-definition@16f052ed696e6e5bf88c208a8e5ba1af7ced3310
# with:
# # it is because of this line that the two steps need different if conditions
# task-definition: ${{ steps.task-def-migration.outputs.task-definition }}
# service: ${{ env.ECS_SERVICE }}
# cluster: ${{ env.ECS_CLUSTER }}
# wait-for-service-stability: true
3 changes: 2 additions & 1 deletion .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ jobs:
- name: Run migrations and seed
run: pnpm --filter core reset-base
env:
# 20241126: this prevents the arcadia seed from running, which contains a ton of pubs which potentially might slow down the tests
# 20241126: this prevents the legacy seed from running, which contains a ton of pubs which potentially might slow down the tests
MINIMAL_SEED: true
SKIP_VALIDATION: true

Expand All @@ -124,6 +124,7 @@ jobs:
run: echo "CONTAINER_ID=$(docker compose -f docker-compose.test.yml ps integration-tests -q)" >> $GITHUB_OUTPUT

- name: Wait until container is healthy
timeout-minutes: 3
run: while [ "`docker inspect -f {{.State.Health.Status}} ${{steps.log-container-id.outputs.CONTAINER_ID}}`" != "healthy" ]; do sleep .2; done

- name: Run integration tests
Expand Down
1 change: 0 additions & 1 deletion .github/workflows/on_main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,6 @@ jobs:
with:
PLATFORM_IMAGE: ${{ needs.build-all.outputs.core-image }}
JOBS_IMAGE: ${{ needs.build-all.outputs.jobs-image }}
MIGRATIONS_IMAGE: ${{ needs.build-all.outputs.base-image }}
SITE_BUILDER_IMAGE: ${{ needs.build-all.outputs.site-builder-image }}
AWS_REGION: "us-east-1"
ALWAYS_ON: "main"
Expand Down
3 changes: 0 additions & 3 deletions .github/workflows/on_pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,9 @@ jobs:
with:
# PLATFORM_IMAGE: 246372085946.dkr.ecr.us-east-1.amazonaws.com/pubpub-v7-core:2b9a81a279c4e405bbedcdbb697c897ded52fbc0
# JOBS_IMAGE: 246372085946.dkr.ecr.us-east-1.amazonaws.com/pubpub-v7-jobs:c786662f4899de16a621e366a485eca5adda4d6a
# MIGRATIONS_IMAGE: 246372085946.dkr.ecr.us-east-1.amazonaws.com/pubpub-v7:c786662f4899de16a621e366a485eca5adda4d6a
# SITE_BUILDER_IMAGE: 246372085946.dkr.ecr.us-east-1.amazonaws.com/pubpub-v7-site-builder:c786662f4899de16a621e366a485eca5adda4d6a
PLATFORM_IMAGE: ${{ needs.build-all.outputs.core-image }}
JOBS_IMAGE: ${{ needs.build-all.outputs.jobs-image }}
MIGRATIONS_IMAGE: ${{ needs.build-all.outputs.base-image }}
SITE_BUILDER_IMAGE: ${{ needs.build-all.outputs.site-builder-image }}
AWS_REGION: "us-east-1"
COMPOSE_FILES: docker-compose.preview.pr.yml
Expand All @@ -89,7 +87,6 @@ jobs:
with:
PLATFORM_IMAGE: "x" # not used
JOBS_IMAGE: "x" # not used
MIGRATIONS_IMAGE: "x" # not used
SITE_BUILDER_IMAGE: "x" # not used
AWS_REGION: "us-east-1"
secrets:
Expand Down
5 changes: 0 additions & 5 deletions .github/workflows/pull-preview.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,6 @@ on:
JOBS_IMAGE:
required: true
type: string
MIGRATIONS_IMAGE:
required: true
type: string
SITE_BUILDER_IMAGE:
required: true
type: string
Expand Down Expand Up @@ -54,12 +51,10 @@ jobs:
env:
PLATFORM_IMAGE: ${{ inputs.PLATFORM_IMAGE }}
JOBS_IMAGE: ${{ inputs.JOBS_IMAGE }}
MIGRATIONS_IMAGE: ${{ inputs.MIGRATIONS_IMAGE }}
SITE_BUILDER_IMAGE: ${{ inputs.SITE_BUILDER_IMAGE }}
run: |
sed -i "s|image: PLATFORM_IMAGE|image: $PLATFORM_IMAGE|" docker-compose.preview.yml
sed -i "s|image: JOBS_IMAGE|image: $JOBS_IMAGE|" docker-compose.preview.yml
sed -i "s|image: MIGRATIONS_IMAGE|image: $MIGRATIONS_IMAGE|" docker-compose.preview.yml
sed -i "s|image: SITE_BUILDER_IMAGE|image: $SITE_BUILDER_IMAGE|" docker-compose.preview.yml
sed -i "s|DATACITE_REPOSITORY_ID: DATACITE_REPOSITORY_ID|DATACITE_REPOSITORY_ID: ${{ secrets.PREVIEW_DATACITE_REPOSITORY_ID }}|" docker-compose.preview.yml
sed -i "s|DATACITE_PASSWORD: DATACITE_PASSWORD|DATACITE_PASSWORD: ${{ secrets.PREVIEW_DATACITE_PASSWORD }}|" docker-compose.preview.yml
Expand Down
6 changes: 4 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
# If you need more help, visit the Dockerfile reference guide at
# https://docs.docker.com/go/dockerfile-reference/

ARG NODE_VERSION=22.13.1
ARG ALPINE_VERSION=3.20
ARG NODE_VERSION=24.6.0
ARG ALPINE_VERSION=3.21

ARG PACKAGE
ARG PORT=3000
Expand Down Expand Up @@ -136,5 +136,7 @@ COPY --from=withpackage --chown=node:node /usr/src/app/core/.next/static ./core/
COPY --from=withpackage --chown=node:node /usr/src/app/core/public ./core/public
# needed to set the database url correctly based on PGHOST variables
COPY --from=withpackage --chown=node:node /usr/src/app/core/.env.docker ./core/.env
# needed to run migrations
COPY --from=withpackage --chown=node:node /usr/src/app/core/prisma ./core/prisma

CMD ["node", "core/server.js"]
24 changes: 24 additions & 0 deletions core/instrumentation.migrate.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import { logger } from "logger";

export async function runMigrations() {
const { execSync } = await import("child_process");

try {
logger.info("Running Prisma migrations...");
execSync("npx prisma migrate deploy --config prisma/prisma.config.ts", {
stdio: "pipe",
// eslint-disable-next-line no-restricted-properties
env: { ...process.env },
});

logger.info("Prisma migrations completed successfully");
} catch (error) {
logger.error({ msg: "Failed to run Prisma migrations:", err: error });
throw error;
}
}

runMigrations().catch((error) => {
logger.error({ msg: "Migration auto-run failed:", err: error });
process.exit(1);
});
82 changes: 46 additions & 36 deletions core/instrumentation.node.mts
Original file line number Diff line number Diff line change
Expand Up @@ -6,45 +6,55 @@ import { logger } from "logger";

import { env } from "./lib/env/env";

// function hook() {
logger.info("Running instrumentation hook for nodejs...");

if (env.NODE_ENV === "production") {
logger.info("Instrumenting Sentry...");
Sentry.init({
dsn: "https://[email protected]/4505959187480576",

// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,

// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
integrations: [
Sentry.redisIntegration({
cachePrefixes: ["nextjs:"],
async function main() {
if (process.env.NODE_ENV === "development") {
logger.info(
"NEXT_RUNTIME is `nodejs` and NODE_ENV is `development`; skipping OTEL + Sentry registration."
);
return;
}

logger.info("Running instrumentation hook for nodejs...");

if (env.NODE_ENV === "production") {
logger.info("Instrumenting Sentry...");
Sentry.init({
dsn: "https://[email protected]/4505959187480576",

// Adjust this value in production, or use tracesSampler for greater control
tracesSampleRate: 1,

// Setting this option to true will print useful information to the console while you're setting up Sentry.
debug: false,
integrations: [
Sentry.redisIntegration({
cachePrefixes: ["nextjs:"],
}),
],
});
logger.info("✅ Successfully instrumented Sentry");
}

logger.info("Instrumenting Honeycomb...");
const sdk = new HoneycombSDK({
instrumentations: [
getNodeAutoInstrumentations({
// We recommend disabling fs automatic instrumentation because
// it can be noisy and expensive during startup
"@opentelemetry/instrumentation-fs": {
enabled: false,
},
}),
],
});
logger.info("✅ Successfully instrumented Sentry");
}

logger.info("Instrumenting Honeycomb...");
const sdk = new HoneycombSDK({
instrumentations: [
getNodeAutoInstrumentations({
// We recommend disabling fs automatic instrumentation because
// it can be noisy and expensive during startup
"@opentelemetry/instrumentation-fs": {
enabled: false,
},
}),
],
});

sdk.start();
logger.info("✅ Successfully instrumented Honeycomb");
sdk.start();
logger.info("✅ Successfully instrumented Honeycomb");

logger.info("instrumentation hooked in for nodejs.");
// }
logger.info("instrumentation hooked in for nodejs.");
}

// hook();
main().catch((error) => {
logger.error("Instrumentation hook failed:", error);
process.exit(1);
});
7 changes: 1 addition & 6 deletions core/instrumentation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,8 @@ export async function register() {

logger.info(`Registering instrumentation hook for ${process.env.NEXT_RUNTIME}`);
if (process.env.NEXT_RUNTIME === "nodejs") {
if (process.env.NODE_ENV === "development") {
logger.info(
"NEXT_RUNTIME is `nodejs` and NODE_ENV is `development`; skipping OTEL + Sentry registration."
);
return;
}
await import("./instrumentation.node.mts");
await import("./instrumentation.migrate.ts");
} else {
logger.info("NEXT_RUNTIME is not `nodejs`; skipping OTEL registration.");
}
Expand Down
60 changes: 56 additions & 4 deletions core/next.config.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
// @ts-check

import type { NextConfig, normalizeConfig } from "next/dist/server/config";

import { PHASE_PRODUCTION_BUILD } from "next/dist/shared/lib/constants.js";
import withPreconstruct from "@preconstruct/next";
import { withSentryConfig } from "@sentry/nextjs";
import { nodeFileTrace } from "@vercel/nft";

import { env } from "./lib/env/env";

// import { PHASE_DEVELOPMENT_SERVER } from "next/constants";

const nextConfig: NextConfig = {
output: "standalone",
outputFileTracingRoot: new URL("./..", import.meta.url).pathname,

typescript: {
// this gets checked in CI already
ignoreBuildErrors: true,
Expand Down Expand Up @@ -123,7 +124,7 @@ const modifiedConfig = withPreconstruct(
})
);

const config: typeof normalizeConfig = async (phase, { defaultConfig }) => {
const config: typeof normalizeConfig = async (phase, { defaultConfig }): Promise<NextConfig> => {
if (!env.SENTRY_AUTH_TOKEN) {
console.warn("⚠️ SENTRY_AUTH_TOKEN is not set");
}
Expand All @@ -136,7 +137,58 @@ const config: typeof normalizeConfig = async (phase, { defaultConfig }) => {
}
console.log("✅ SENTRY_AUTH_TOKEN is successfully set");
}
return modifiedConfig;

// see https://github.com/vercel/next.js/discussions/66327#discussioncomment-13247142
const {
fileList: additionalTracedFiles,
esmFileList,
reasons,
warnings,
} = await nodeFileTrace(
// add entry points for the missing packages or any additional scripts you need here
[
require.resolve("prisma/build/index.js"),
require.resolve("@prisma/engines"),
require.resolve("@prisma/config"),
require.resolve("@prisma/driver-adapter-utils"),
require.resolve("@prisma/engines-version"),
require.resolve("@prisma/fetch-engine"),
require.resolve("@prisma/get-platform"),
require.resolve("effect"),
],
{
analysis: {
emitGlobs: true,
},

conditions: ["import", "require", "node"],
}
);
console.log(additionalTracedFiles, esmFileList, warnings);

reasons.forEach((reason, key) =>
console.log(key, reason, Array.from(reason.parents.entries()))
);

return {
...modifiedConfig,
outputFileTracingIncludes: {
"**": [
...additionalTracedFiles,
"./node_modules/.bin/prisma",
"./node_modules/prisma/**",
"./node_modules/@prisma*/**",
"./prisma/migrations/*",
"./prisma/prisma.config.ts",
"./prisma/schema/*",
"./prisma/create-admin-user.cts",
"./node_modules/effect/**",
"./node_modules/empathic/**",
"./node_modules/deepmerge-ts/**",
"./node_modules/c12/**",
],
},
};
};

export default config;
Loading
Loading