Skip to content
Merged
113 changes: 113 additions & 0 deletions .github/workflows/docs-preview-links.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
name: Docs Preview Links

on:
pull_request:
types:
- opened
- synchronize
- reopened
- ready_for_review

permissions:
contents: read
pull-requests: write

jobs:
comment-preview-links:
runs-on: ubuntu-latest
steps:
- name: Check out repository
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Set up Node.js
uses: actions/setup-node@v4
with:
node-version: '20'

- name: Compute preview base URL
id: preview-url
env:
REPO_PREVIEW_BASE_URL: ${{ vars.DOCS_PREVIEW_BASE_URL }}
REPO_PREVIEW_TEMPLATE: ${{ vars.DOCS_PREVIEW_BASE_URL_TEMPLATE }}
run: |
BRANCH_NAME="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME}}"
BRANCH_SLUG=$(echo "$BRANCH_NAME" | tr '[:upper:]' '[:lower:]' | sed 's/[^a-z0-9]/-/g' | sed 's/-\+/-/g' | sed 's/^-//' | sed 's/-$//')

if [ -n "$REPO_PREVIEW_BASE_URL" ]; then
BASE_URL="$REPO_PREVIEW_BASE_URL"
elif [ -n "$REPO_PREVIEW_TEMPLATE" ]; then
BASE_URL="${REPO_PREVIEW_TEMPLATE//\{branch\}/$BRANCH_SLUG}"
else
BASE_URL="https://temporal-documentation-git-${BRANCH_SLUG}.preview.thundergun.io"
fi

echo "DOCS_PREVIEW_BASE_URL=$BASE_URL" >> "$GITHUB_ENV"
echo "BRANCH_SLUG=$BRANCH_SLUG" >> "$GITHUB_ENV"
echo "base_url=$BASE_URL" >> "$GITHUB_OUTPUT"

- name: Generate docs preview list
env:
BASE_SHA: ${{ github.event.pull_request.base.sha }}
DOCS_PREVIEW_BASE_URL: ${{ env.DOCS_PREVIEW_BASE_URL }}
run: |
SUMMARY_FILE="temp/doc-preview-summary.md"
mkdir -p "$(dirname "$SUMMARY_FILE")"
node bin/generate-docs-preview-list.js > "$SUMMARY_FILE"
if [ -s "$SUMMARY_FILE" ]; then
echo "has_changes=true" >> "$GITHUB_ENV"
else
echo "This PR does not change any pages in /docs. If you make updates, links to the modified pages will appear here." > "$SUMMARY_FILE"
echo "has_changes=false" >> "$GITHUB_ENV"
fi
echo "SUMMARY_FILE_PATH=$SUMMARY_FILE" >> "$GITHUB_ENV"

- name: Comment with changed docs
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const fs = require('fs');
const marker = '<!-- docs-preview-links -->';
const summaryPath = process.env.SUMMARY_FILE_PATH || 'temp/doc-preview-summary.md';
const summary = fs.readFileSync(summaryPath, 'utf8').trim();
const hasChanges = process.env.has_changes === 'true';
core.info(`Docs changes detected: ${hasChanges}`);

const body = [
marker,
'',
'### Docs preview links',
'',
summary,
].join('\n');

const { data: comments } = await github.rest.issues.listComments({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
per_page: 100,
});

const existing = comments.find(
(comment) => comment.user?.login === 'github-actions[bot]' && comment.body?.includes(marker),
);

if (existing) {
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: existing.id,
body,
});
core.info(`Updated existing preview comment (${existing.id}).`);
} else {
const { data: newComment } = await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.issue.number,
body,
});
core.info(`Created new preview comment (${newComment.id}).`);
}
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ build
# Generated files
.docusaurus
.cache-loader
temp/

# Misc
.DS_Store
Expand Down Expand Up @@ -40,4 +41,4 @@ package-lock.json
/assembly/.env

# Tests
test-results/*
test-results/*
199 changes: 199 additions & 0 deletions bin/generate-docs-preview-list.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,199 @@
#!/usr/bin/env node

const { execSync } = require('child_process');
const fs = require('fs');
const path = require('path');

const DOCS_DIR = path.join(process.cwd(), 'docs');
const BASE_SHA = process.env.BASE_SHA;
const PREVIEW_BASE_URL = process.env.DOCS_PREVIEW_BASE_URL || '';

if (!BASE_SHA) {
console.error('BASE_SHA environment variable is required.');
process.exit(1);
}

function getChangedDocFiles(baseBranch = 'origin/main') {
// Find the common ancestor (merge base)
const mergeBase = execSync(`git merge-base HEAD ${baseBranch}`, { encoding: 'utf8' }).trim();

const diffCommand = ['git', 'diff', '--name-only', `${mergeBase}..HEAD`, '--', 'docs/'].join(' ');

const output = execSync(diffCommand, { encoding: 'utf8' });

return output
.split('\n')
.map((line) => line.trim())
.filter((line) => line.length > 0)
.filter((line) => /\.(mdx|md)$/.test(line))
.filter((line) => fs.existsSync(line));
}

function extractFrontMatter(filePath) {
const content = fs.readFileSync(filePath, 'utf8');
const match = content.match(/^---\n([\s\S]*?)\n---\n?/);
if (!match) {
return {};
}

const block = match[1];
const result = {};

['slug', 'title', 'sidebar_label'].forEach((key) => {
const pattern = new RegExp(`^${key}:\\s*(.+)$`, 'm');
const valueMatch = block.match(pattern);
if (valueMatch) {
result[key] = valueMatch[1].trim().replace(/^['"]|['"]$/g, '');
}
});

return result;
}

function relativeSlugFromPath(filePath) {
const relativePath = path.relative(DOCS_DIR, filePath);
const withoutExtension = relativePath.replace(/\.[^.]+$/, '');
const parts = withoutExtension.split(path.sep);

if (parts[parts.length - 1] === 'index') {
parts.pop();
}

return parts.join('/');
}

function normalizeSlug(slug, filePath) {
if (typeof slug === 'string' && slug.trim().length > 0) {
const trimmed = slug.trim();
return trimmed.startsWith('/') ? trimmed.slice(1) : trimmed;
}

return relativeSlugFromPath(filePath);
}

function humanizeSegment(segment) {
return segment
.split(/[-_]/)
.filter(Boolean)
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
.join(' ');
}

function buildUrlForSlug(slug) {
const normalized = slug.length > 0 ? `/${slug}` : '/';
if (!PREVIEW_BASE_URL) {
return normalized;
}

const base = PREVIEW_BASE_URL.endsWith('/') ? PREVIEW_BASE_URL.slice(0, -1) : PREVIEW_BASE_URL;
return `${base}${normalized}`;
}

function collectDocInfo(filePath) {
const frontMatter = extractFrontMatter(filePath);
const slug = normalizeSlug(frontMatter.slug, filePath);
const segments = slug.split('/').filter((segment) => segment.length > 0);
const label =
frontMatter.sidebar_label ||
frontMatter.title ||
humanizeSegment(segments[segments.length - 1] || path.basename(filePath, path.extname(filePath)));
const url = buildUrlForSlug(slug);

return { filePath, slug, segments, label, url };
}

function insertIntoTree(tree, docInfo) {
let currentLevel = tree;

docInfo.segments.forEach((segment, index) => {
if (!currentLevel.children.has(segment)) {
currentLevel.children.set(segment, {
segment,
label: humanizeSegment(segment),
children: new Map(),
});
}

currentLevel = currentLevel.children.get(segment);

if (index === docInfo.segments.length - 1) {
currentLevel.label = docInfo.label;
currentLevel.url = docInfo.url;
currentLevel.filePath = docInfo.filePath;
}
});

if (docInfo.segments.length === 0) {
tree.roots.push({
label: docInfo.label,
url: docInfo.url,
filePath: docInfo.filePath,
children: new Map(),
});
}
}

function renderNode(node, depth) {
const indentation = ' '.repeat(depth);
const label = node.url ? `[${node.label}](${node.url})` : node.label;
const lines = [`${indentation}- ${label}`];

const sortedChildren = Array.from(node.children.values()).sort((a, b) => a.label.localeCompare(b.label));

sortedChildren.forEach((child) => {
lines.push(...renderNode(child, depth + 1));
});

return lines;
}

function renderTree(tree) {
const lines = [];

Array.from(tree.children.values())
.sort((a, b) => a.label.localeCompare(b.label))
.forEach((child) => {
lines.push(...renderNode(child, 0));
});

tree.roots
.sort((a, b) => a.label.localeCompare(b.label))
.forEach((root) => {
lines.push(...renderNode(root, 0));
});

return lines.join('\n');
}

function main() {
const changedFiles = getChangedDocFiles('origin/main');

if (changedFiles.length === 0) {
process.stdout.write('');
return;
}

const tree = { children: new Map(), roots: [] };

changedFiles
.map((filePath) => collectDocInfo(path.resolve(filePath)))
.forEach((docInfo) => {
insertIntoTree(tree, docInfo);
});

const rendered = renderTree(tree);
process.stdout.write(rendered);
}

if (require.main === module) {
main();
} else {
module.exports = {
getChangedDocFiles,
extractFrontMatter,
collectDocInfo,
insertIntoTree,
renderTree,
main,
};
}
27 changes: 16 additions & 11 deletions docs/encyclopedia/activities/activities.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,9 @@
id: activities
title: What is a Temporal Activity?
sidebar_label: Activities
description: Understand Temporal Activities, including Activity Definitions, Types, Executions, idempotency, cancellations, and Local Activities.
description:
Understand Temporal Activities, including Activity Definitions, Types, Executions, idempotency, cancellations, and
Local Activities.
slug: /activities
toc_max_heading_level: 4
keywords:
Expand All @@ -15,16 +17,19 @@ tags:
- Durable Execution
---

This guide provides a comprehensive overview of Temporal Activities including [Activity Definition](/activity-definition), [Activity Type](/activity-definition#activity-type), [Activity Execution](/activity-execution), and [Local Activity](/local-activity).
This guide provides a comprehensive overview of Temporal Activities including
[Activity Definition](/activity-definition), [Activity Type](/activity-definition#activity-type),
[Activity Execution](/activity-execution), and [Local Activity](/local-activity).

An Activity is a normal function or method that executes a single, well-defined action (either short or long running), such as calling another service, transcoding a media file, or sending an email message.
Activity code can be non-deterministic.
We recommend that it be [idempotent](/activity-definition#idempotency).
An Activity is a normal function or method that executes a single, well-defined action, such as calling another service,
transcoding a media file, or sending an email message. An Activity can either be short or long-running. Activity code
can be non-deterministic. We recommend that it be [idempotent](/activity-definition#idempotency).

Workflow code orchestrates the execution of Activities, persisting the results.
If an Activity Function Execution fails, any future execution starts from initial state (except [Heartbeats](/encyclopedia/detecting-activity-failures#activity-heartbeat)).
Workflow code orchestrates the execution of Activities, persisting the results. If an Activity Function Execution fails,
any future execution starts from initial state (except
[Heartbeats](/encyclopedia/detecting-activity-failures#activity-heartbeat)).

Activity Functions are executed by Worker Processes.
When the Activity Function returns, the Worker sends the results back to the Temporal Service as part of the [ActivityTaskCompleted](/references/events#activitytaskcompleted) Event.
The Event is added to the Workflow Execution's Event History.
For other Activity-related Events, see [Activity Events](/workflow-execution/event#activity-events).
Activity Functions are executed by Worker Processes. When the Activity Function returns, the Worker sends the results
back to the Temporal Service as part of the [ActivityTaskCompleted](/references/events#activitytaskcompleted) Event. The
Event is added to the Workflow Execution's Event History. For other Activity-related Events, see
[Activity Events](/workflow-execution/event#activity-events).