From 0d0c747fca0334f9648c691a60f5e77094edd9d7 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 09:59:23 -0500 Subject: [PATCH 01/32] feat: add copy-discussions shell script --- gh-cli/copy-discussions.sh | 1369 ++++++++++++++++++++++++++++++++++++ 1 file changed, 1369 insertions(+) create mode 100755 gh-cli/copy-discussions.sh diff --git a/gh-cli/copy-discussions.sh b/gh-cli/copy-discussions.sh new file mode 100755 index 0000000..bc3fd9b --- /dev/null +++ b/gh-cli/copy-discussions.sh @@ -0,0 +1,1369 @@ +#!/bin/bash + +# Copy Discussions between repositories in different enterprises +# This script copies discussions from a source repository to a target repository +# using different GitHub tokens for authentication to support cross-enterprise copying +# +# Usage: ./copy-discussions.sh +# Example: ./copy-discussions.sh source-org repo1 target-org repo2 +# +# Prerequisites: +# - SOURCE_TOKEN environment variable with read access to source repository discussions +# - TARGET_TOKEN environment variable with write access to target repository discussions +# - Both tokens must have the 'public_repo' or 'repo' scope +# - GitHub CLI (gh) must be installed +# +# Note: This script copies discussion content, comments, replies, and basic metadata. +# Reactions and other advanced interactions are not copied. +# Attachments (images and files) will not copy over - they need manual handling. + +# TODO: Polls don't copy options +# TODO: mark as answers? +# TODO: copy closed discussions and mark as closed in target? + +set -e + +# Color codes for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' # No Color + +# Function to display usage +usage() { + echo "Usage: $0 " + echo "" + echo "Copy discussions from source repository to target repository" + echo "" + echo "Required environment variables:" + echo " SOURCE_TOKEN - GitHub token with read access to source repository" + echo " TARGET_TOKEN - GitHub token with write access to target repository" + echo "" + echo "Example:" + echo " $0 source-org repo1 target-org repo2" + exit 1 +} + +# Function to log messages +log() { + echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" >&2 +} + +# Function to log warnings +warn() { + echo -e "${YELLOW}[$(date +'%Y-%m-%d %H:%M:%S')] WARNING:${NC} $1" >&2 +} + +# Function to log errors +error() { + echo -e "${RED}[$(date +'%Y-%m-%d %H:%M:%S')] ERROR:${NC} $1" >&2 +} + +# Function to handle rate limiting +rate_limit_sleep() { + local seconds=${1:-2} + log "Waiting ${seconds}s to avoid rate limiting..." + sleep "$seconds" +} + +# Function to handle rate limit errors with exponential backoff +handle_rate_limit_error() { + local response="$1" + local attempt=${2:-1} + + if echo "$response" | grep -q "exceeded a secondary rate limit\|rate limit"; then + local wait_time=$((attempt * 60)) # Start with 1 minute, then 2, 3, etc. + warn "Hit rate limit! Waiting ${wait_time} seconds before retrying (attempt $attempt)" + sleep "$wait_time" + return 0 # Indicates we should retry + fi + + return 1 # Not a rate limit error +} + +# Function to check if a command exists +check_command() { + if ! command -v "$1" &> /dev/null; then + error "$1 is required but not installed. Please install $1 and try again." + exit 1 + fi +} + +# Check for required dependencies +log "Checking for required dependencies..." +check_command "gh" +check_command "jq" +log "✓ All required dependencies are installed" + +# Validate input parameters +if [ $# -ne 4 ]; then + usage +fi + +SOURCE_ORG="$1" +SOURCE_REPO="$2" +TARGET_ORG="$3" +TARGET_REPO="$4" + +# Initialize tracking variables +missing_categories=() + +# Validate required environment variables +if [ -z "$SOURCE_TOKEN" ]; then + error "SOURCE_TOKEN environment variable is required" + exit 1 +fi + +if [ -z "$TARGET_TOKEN" ]; then + error "TARGET_TOKEN environment variable is required" + exit 1 +fi + +log "Starting discussion copy process..." +log "Source: $SOURCE_ORG/$SOURCE_REPO" +log "Target: $TARGET_ORG/$TARGET_REPO" +log "" +log "⚡ This script uses conservative rate limiting to avoid GitHub API limits" +log " If you encounter rate limit errors, the script will automatically retry" +log "" + +# GraphQL query to fetch discussions from source repository +fetch_discussions_query=' +query($owner: String!, $name: String!, $cursor: String) { + repository(owner: $owner, name: $name) { + discussions(first: 100, after: $cursor, orderBy: {field: CREATED_AT, direction: ASC}) { + pageInfo { + hasNextPage + endCursor + } + nodes { + id + title + body + category { + id + name + slug + description + emoji + } + labels(first: 100) { + nodes { + id + name + color + description + } + } + author { + login + } + createdAt + closed + locked + upvoteCount + url + number + + } + } + } +}' + +# GraphQL query to fetch discussion categories from target repository +fetch_categories_query=' +query($owner: String!, $name: String!) { + repository(owner: $owner, name: $name) { + discussionCategories(first: 100) { + nodes { + id + name + slug + emoji + description + } + } + } +}' + +# GraphQL query to check if discussions are enabled +check_discussions_enabled_query=' +query($owner: String!, $name: String!) { + repository(owner: $owner, name: $name) { + hasDiscussionsEnabled + discussionCategories(first: 1) { + nodes { + id + } + } + } +}' + +# GraphQL query to fetch comments for a specific discussion +fetch_discussion_comments_query=' +query($discussionId: ID!) { + node(id: $discussionId) { + ... on Discussion { + comments(first: 100) { + nodes { + id + body + author { + login + } + createdAt + upvoteCount + replies(first: 50) { + nodes { + id + body + author { + login + } + createdAt + upvoteCount + } + } + } + } + } + } +}' + +# GraphQL query to fetch labels from target repository +fetch_labels_query=' +query($owner: String!, $name: String!) { + repository(owner: $owner, name: $name) { + labels(first: 100) { + nodes { + id + name + color + description + } + } + } +}' + +# GraphQL mutation to create label in target repository +create_label_mutation=' +mutation($repositoryId: ID!, $name: String!, $color: String!, $description: String) { + createLabel(input: { + repositoryId: $repositoryId, + name: $name, + color: $color, + description: $description + }) { + label { + id + name + } + } +}' + +# GraphQL mutation to create discussion in target repository +create_discussion_mutation=' +mutation($repositoryId: ID!, $categoryId: ID!, $title: String!, $body: String!) { + createDiscussion(input: { + repositoryId: $repositoryId, + categoryId: $categoryId, + title: $title, + body: $body + }) { + clientMutationId + discussion { + id + title + url + number + } + } +}' + +# GraphQL mutation to add labels to discussion +add_labels_to_discussion_mutation=' +mutation($labelableId: ID!, $labelIds: [ID!]!) { + addLabelsToLabelable(input: { + labelableId: $labelableId, + labelIds: $labelIds + }) { + labelable { + labels(first: 100) { + nodes { + name + } + } + } + } +}' + +# GraphQL mutation to add comment to discussion +add_discussion_comment_mutation=' +mutation($discussionId: ID!, $body: String!) { + addDiscussionComment(input: { + discussionId: $discussionId, + body: $body + }) { + comment { + id + body + createdAt + } + } +}' + +# GraphQL mutation to add reply to discussion comment +add_discussion_comment_reply_mutation=' +mutation($discussionId: ID!, $replyToId: ID!, $body: String!) { + addDiscussionComment(input: { + discussionId: $discussionId, + replyToId: $replyToId, + body: $body + }) { + comment { + id + body + createdAt + } + } +}' + +# Function to get repository ID +get_repository_id() { + local org=$1 + local repo=$2 + local token=$3 + + local query=' + query($owner: String!, $name: String!) { + repository(owner: $owner, name: $name) { + id + } + }' + + GH_TOKEN="$token" gh api graphql \ + -f query="$query" \ + -f owner="$org" \ + -f name="$repo" \ + --jq '.data.repository.id' +} + +# Function to fetch discussion categories from target repository +# Function to check if discussions are enabled in target repository +check_discussions_enabled() { + log "Checking if discussions are enabled in target repository..." + + rate_limit_sleep 4 + + local response + response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ + -f query="$check_discussions_enabled_query" \ + -f owner="$TARGET_ORG" \ + -f name="$TARGET_REPO" 2>&1) + + if [ $? -ne 0 ]; then + error "Failed to check discussions status: $response" + return 1 + fi + + local has_discussions_enabled + has_discussions_enabled=$(echo "$response" | jq -r '.data.repository.hasDiscussionsEnabled // false') + + if [ "$has_discussions_enabled" != "true" ]; then + error "Discussions are not enabled in the target repository: $TARGET_ORG/$TARGET_REPO" + error "Please enable discussions in the repository settings before running this script." + return 1 + fi + + log "✓ Discussions are enabled in target repository" + return 0 +} + +# Function to fetch available categories from target repository +fetch_target_categories() { + log "Fetching available categories from target repository..." + + rate_limit_sleep 4 + + local response + response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ + -f query="$fetch_categories_query" \ + -f owner="$TARGET_ORG" \ + -f name="$TARGET_REPO" 2>&1) + + if [ $? -ne 0 ]; then + error "Failed to fetch categories: $response" + return 1 + fi + + # Check for GraphQL errors + if echo "$response" | jq -e '.errors // empty' > /dev/null 2>&1; then + error "GraphQL error in fetch categories: $(echo "$response" | jq -r '.errors[0].message // "Unknown error"')" + return 1 + fi + + target_categories=$(echo "$response" | jq -c '.data.repository.discussionCategories.nodes[]?' 2>/dev/null) + + if [ -z "$target_categories" ]; then + warn "No discussion categories found in target repository" + else + local category_count + category_count=$(echo "$target_categories" | wc -l | tr -d ' ') + log "Found $category_count categories in target repository" + fi +} + +# Function to find matching category ID by name or slug +find_category_id() { + local source_category_name="$1" + local source_category_slug="$2" + + echo "$target_categories" | jq -r --arg name "$source_category_name" --arg slug "$source_category_slug" ' + select(.name == $name or .slug == $slug) | .id + ' | head -1 +} + +# Function to create discussion category if it doesn't exist +create_or_get_category_id() { + local category_name="$1" + local category_slug="$2" + local category_description="$3" + local category_emoji="$4" + + # First try to find existing category + + # Validate target_categories JSON + if ! echo "$target_categories" | jq . > /dev/null 2>&1; then + error "target_categories contains invalid JSON:" + error "$target_categories" + return 1 + fi + + local existing_id + existing_id=$(echo "$target_categories" | jq -r --arg name "$category_name" --arg slug "$category_slug" ' + select(.name == $name or .slug == $slug) | .id + ' | head -1) + + if [ -n "$existing_id" ] && [ "$existing_id" != "null" ]; then + echo "$existing_id" + return 0 + fi + + # Category doesn't exist - GitHub doesn't support creating discussion categories via API + warn "Category '$category_name' ($category_slug) not found in target repository" + + # Track missing category for summary + local found=false + for existing_cat in "${missing_categories[@]}"; do + if [ "$existing_cat" = "$category_name" ]; then + found=true + break + fi + done + if [ "$found" = false ]; then + missing_categories+=("$category_name") + fi + + # Try to find "General" category as fallback + local general_id + general_id=$(echo "$target_categories" | jq -r ' + select(.name == "General" or .slug == "general") | .id + ' | head -1) + + if [ -n "$general_id" ] && [ "$general_id" != "null" ]; then + warn "Using 'General' category as fallback for '$category_name'" + echo "$general_id" + return 0 + fi + + # If no General category, use the first available category + local first_category_id + first_category_id=$(echo "$target_categories" | jq -r '.id' | head -1) + + if [ -n "$first_category_id" ] && [ "$first_category_id" != "null" ]; then + local first_category_name + first_category_name=$(echo "$target_categories" | jq -r '.name' | head -1) + warn "Using '$first_category_name' category as fallback for '$category_name'" + echo "$first_category_id" + return 0 + fi + + error "No available categories found in target repository to use as fallback" + return 1 +} + +# Function to fetch labels from target repository +fetch_target_labels() { + log "Fetching labels from target repository..." + + local max_retries=3 + local attempt=1 + + while [ $attempt -le $max_retries ]; do + rate_limit_sleep 3 # Increased default wait time + + local response + response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ + -f query="$fetch_labels_query" \ + -f owner="$TARGET_ORG" \ + -f name="$TARGET_REPO" 2>&1) + + local exit_code=$? + + if [ $exit_code -eq 0 ]; then + # Success, process the response + break + else + # Check if it's a rate limit error + if handle_rate_limit_error "$response" "$attempt"; then + attempt=$((attempt + 1)) + log "Retrying labels fetch (attempt $attempt/$max_retries)..." + continue + else + error "Failed to fetch labels: $response" + return 1 + fi + fi + done + + if [ $attempt -gt $max_retries ]; then + error "Failed to fetch labels after $max_retries attempts due to rate limiting" + return 1 + fi + + # Check if response is valid JSON + if ! echo "$response" | jq . > /dev/null 2>&1; then + error "Invalid JSON response from labels API: $response" + return 1 + fi + + # Check for GraphQL errors + if echo "$response" | jq -e '.errors // empty' > /dev/null 2>&1; then + error "GraphQL error in fetch labels: $(echo "$response" | jq -r '.errors[0].message // "Unknown error"')" + return 1 + fi + + echo "$response" | jq -c '.data.repository.labels.nodes[]?' 2>/dev/null +} + +# Function to fetch comments for a specific discussion +fetch_discussion_comments() { + local discussion_id="$1" + + log "Fetching comments for discussion $discussion_id..." + + rate_limit_sleep 2 + + local response + response=$(GH_TOKEN="$SOURCE_TOKEN" gh api graphql \ + -f query="$fetch_discussion_comments_query" \ + -f discussionId="$discussion_id" 2>&1) + + if [ $? -ne 0 ]; then + error "Failed to fetch comments for discussion $discussion_id: $response" + return 1 + fi + + # Check for GraphQL errors + if echo "$response" | jq -e '.errors // empty' > /dev/null 2>&1; then + error "GraphQL error in fetch comments: $(echo "$response" | jq -r '.errors[0].message // "Unknown error"')" + return 1 + fi + + # Extract comments + local comments + comments=$(echo "$response" | jq -c '.data.node.comments.nodes // []' 2>/dev/null) + + if [ -z "$comments" ]; then + log "No comments found for discussion" + echo "[]" + else + echo "$comments" + fi +} + +# Function to find matching label ID by name +find_label_id() { + local label_name="$1" + + echo "$target_labels" | jq -r --arg name "$label_name" ' + select(.name == $name) | .id + ' | head -1 +} + +# Function to create label if it doesn't exist +create_or_get_label_id() { + local label_name="$1" + local label_color="$2" + local label_description="$3" + + # First try to find existing label + local existing_id + existing_id=$(find_label_id "$label_name") + + if [ -n "$existing_id" ] && [ "$existing_id" != "null" ]; then + echo "$existing_id" + return 0 + fi + + # Label doesn't exist, create it + log "Creating new label: '$label_name'" + + rate_limit_sleep 3 + + local response + response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ + -f query="$create_label_mutation" \ + -f repositoryId="$target_repo_id" \ + -f name="$label_name" \ + -f color="$label_color" \ + -f description="$label_description" 2>&1) + + if [ $? -eq 0 ]; then + local new_label_id + new_label_id=$(echo "$response" | jq -r '.data.createLabel.label.id') + + if [ -n "$new_label_id" ] && [ "$new_label_id" != "null" ]; then + log "✓ Created label '$label_name' with ID: $new_label_id" + + # Update our local cache of target labels (if target_labels is an array) + if echo "$target_labels" | jq -e 'type == "array"' >/dev/null 2>&1; then + target_labels=$(echo "$target_labels" | jq --arg id "$new_label_id" --arg name "$label_name" --arg color "$label_color" --arg desc "$label_description" '. + [{id: $id, name: $name, color: $color, description: $desc}]') + else + # If target_labels is not an array, convert it + target_labels=$(jq -n --arg id "$new_label_id" --arg name "$label_name" --arg color "$label_color" --arg desc "$label_description" '[{id: $id, name: $name, color: $color, description: $desc}]') + fi + + echo "$new_label_id" + return 0 + fi + fi + + error "Failed to create label '$label_name': $response" + return 1 +} + +# Function to add labels to a discussion +add_labels_to_discussion() { + local discussion_id="$1" + shift + local label_ids=("$@") + + if [ ${#label_ids[@]} -eq 0 ]; then + return 0 + fi + + # Convert array to JSON array format for GraphQL + local label_ids_json + label_ids_json=$(printf '%s\n' "${label_ids[@]}" | jq -R . | jq -s . | jq -c .) + + log "Adding ${#label_ids[@]} labels to discussion" + log "Discussion ID: $discussion_id" + log "Label IDs (compact JSON): $label_ids_json" + + rate_limit_sleep 2 + + # Construct the full GraphQL request with variables + local graphql_request + graphql_request=$(jq -n \ + --arg query "$add_labels_to_discussion_mutation" \ + --arg labelableId "$discussion_id" \ + --argjson labelIds "$label_ids_json" \ + '{ + query: $query, + variables: { + labelableId: $labelableId, + labelIds: $labelIds + } + }') + + log "GraphQL request: $graphql_request" + + local response + response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql --input - <<< "$graphql_request" 2>&1) + + local api_exit_code=$? + + log "GraphQL API exit code: $api_exit_code" + log "GraphQL API response: $response" + + if [ $api_exit_code -eq 0 ]; then + # Check if there are any errors in the response + local errors + errors=$(echo "$response" | jq -r '.errors // empty | .[] | .message' 2>/dev/null) + if [ -n "$errors" ]; then + error "GraphQL errors in response: $errors" + return 1 + fi + + log "✓ Successfully added labels to discussion" + return 0 + else + error "Failed to add labels to discussion (exit code: $api_exit_code): $response" + return 1 + fi +} + +# Function to add comment to discussion +add_discussion_comment() { + local discussion_id="$1" + local comment_body="$2" + local original_author="$3" + local original_created="$4" + + # Add metadata to comment body with collapsible section + local enhanced_body="$comment_body"$'\n\n'"---"$'\n\n'"
"$'\n'"Original comment details"$'\n\n'"**Original author:** @$original_author"$'\n'"**Created:** $original_created"$'\n\n'"
" + + log "Adding comment to discussion" + + rate_limit_sleep 2 + + local response + response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ + -f query="$add_discussion_comment_mutation" \ + -f discussionId="$discussion_id" \ + -f body="$enhanced_body" 2>&1) + + local exit_code=$? + + if [ $exit_code -eq 0 ]; then + local comment_id + comment_id=$(echo "$response" | jq -r '.data.addDiscussionComment.comment.id // empty') + + if [ -n "$comment_id" ] && [ "$comment_id" != "null" ]; then + log "✓ Added comment with ID: $comment_id" + echo "$comment_id" + return 0 + else + error "Failed to extract comment ID from response: $response" + return 1 + fi + else + error "Failed to add comment: $response" + return 1 + fi +} + +# Function to add reply to discussion comment +add_discussion_comment_reply() { + local discussion_id="$1" + local parent_comment_id="$2" + local reply_body="$3" + local original_author="$4" + local original_created="$5" + + # Add metadata to reply body with collapsible section + local enhanced_body="$reply_body"$'\n\n'"---"$'\n\n'"
"$'\n'"Original reply details"$'\n\n'"**Original author:** @$original_author"$'\n'"**Created:** $original_created"$'\n\n'"
" + + log "Adding reply to comment $parent_comment_id" + + rate_limit_sleep 2 + + local response + response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ + -f query="$add_discussion_comment_reply_mutation" \ + -f discussionId="$discussion_id" \ + -f replyToId="$parent_comment_id" \ + -f body="$enhanced_body" 2>&1) + + local exit_code=$? + + if [ $exit_code -eq 0 ]; then + local reply_id + reply_id=$(echo "$response" | jq -r '.data.addDiscussionComment.comment.id // empty') + + if [ -n "$reply_id" ] && [ "$reply_id" != "null" ]; then + log "✓ Added reply with ID: $reply_id" + echo "$reply_id" + return 0 + else + error "Failed to extract reply ID from response: $response" + return 1 + fi + else + error "Failed to add reply: $response" + return 1 + fi +} + +# Function to copy discussion comments +copy_discussion_comments() { + local discussion_id="$1" + local comments_json="$2" + + if [ -z "$comments_json" ] || [ "$comments_json" = "null" ]; then + log "No comments to copy for this discussion" + return 0 + fi + + local comment_count + comment_count=$(echo "$comments_json" | jq -r 'length // 0') + + if [ "$comment_count" -eq 0 ]; then + log "No comments to copy for this discussion" + return 0 + fi + + log "Copying $comment_count comments..." + total_comments=$((total_comments + comment_count)) + + local comment_index=0 + while [ $comment_index -lt "$comment_count" ]; do + local comment + comment=$(echo "$comments_json" | jq -r ".[$comment_index]") + + if [ "$comment" != "null" ]; then + local comment_body author created_at replies + comment_body=$(echo "$comment" | jq -r '.body // ""') + author=$(echo "$comment" | jq -r '.author.login // "unknown"') + created_at=$(echo "$comment" | jq -r '.createdAt // ""') + replies=$(echo "$comment" | jq -c '.replies.nodes // []') + + if [ -n "$comment_body" ]; then + log "Copying comment by @$author" + + # Add the comment + set +e # Don't exit on error + local new_comment_id + new_comment_id=$(add_discussion_comment "$discussion_id" "$comment_body" "$author" "$created_at") + local comment_result=$? + set -e + + if [ $comment_result -eq 0 ] && [ -n "$new_comment_id" ]; then + copied_comments=$((copied_comments + 1)) + # Copy replies if any exist + local reply_count + reply_count=$(echo "$replies" | jq -r 'length // 0') + + if [ "$reply_count" -gt 0 ]; then + log "Copying $reply_count replies to comment..." + + local reply_index=0 + while [ $reply_index -lt "$reply_count" ]; do + local reply + reply=$(echo "$replies" | jq -r ".[$reply_index]") + + if [ "$reply" != "null" ]; then + local reply_body reply_author reply_created + reply_body=$(echo "$reply" | jq -r '.body // ""') + reply_author=$(echo "$reply" | jq -r '.author.login // "unknown"') + reply_created=$(echo "$reply" | jq -r '.createdAt // ""') + + if [ -n "$reply_body" ]; then + log "Copying reply by @$reply_author" + + set +e + add_discussion_comment_reply "$discussion_id" "$new_comment_id" "$reply_body" "$reply_author" "$reply_created" >/dev/null + set -e + fi + fi + + reply_index=$((reply_index + 1)) + done + fi + else + warn "Failed to copy comment by @$author, skipping replies" + fi + fi + fi + + comment_index=$((comment_index + 1)) + done + + log "✓ Finished copying comments" +} + +# Function to create discussion +create_discussion() { + local repo_id="$1" + local category_id="$2" + local title="$3" + local body="$4" + local source_url="$5" + local source_author="$6" + local source_created="$7" + + # Add metadata to body with collapsible section + local enhanced_body="$body"$'\n\n'"---"$'\n\n'"
"$'\n'"Original discussion details"$'\n\n'"**Original author:** @$source_author"$'\n'"**Created:** $source_created"$'\n'"**Source:** $source_url"$'\n\n'"
" + + log "Creating discussion: '$title'" + + rate_limit_sleep 3 + + local response + response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ + -f query="$create_discussion_mutation" \ + -f repositoryId="$repo_id" \ + -f categoryId="$category_id" \ + -f title="$title" \ + -f body="$enhanced_body" 2>&1) + + local exit_code=$? + + if [ $exit_code -eq 0 ]; then + echo "$response" + return 0 + else + error "Failed to create discussion: $response" + return $exit_code + fi +} + +# Get source repository ID to verify access +log "Verifying access to source repository..." +source_repo_id=$(get_repository_id "$SOURCE_ORG" "$SOURCE_REPO" "$SOURCE_TOKEN") +if [ -z "$source_repo_id" ]; then + error "Failed to get source repository ID. Check if repository exists and SOURCE_TOKEN has access." + exit 1 +fi +log "Source repository ID: $source_repo_id" + +# Check if discussions are enabled in source repository +log "Checking if discussions are enabled in source repository..." +rate_limit_sleep 2 + +source_discussions_check=$(GH_TOKEN="$SOURCE_TOKEN" gh api graphql \ + -f query="$check_discussions_enabled_query" \ + -f owner="$SOURCE_ORG" \ + -f name="$SOURCE_REPO" 2>&1) + +if [ $? -ne 0 ]; then + error "Failed to check discussions status in source repository: $source_discussions_check" + exit 1 +fi + +source_has_discussions=$(echo "$source_discussions_check" | jq -r '.data.repository.hasDiscussionsEnabled // false') +if [ "$source_has_discussions" != "true" ]; then + error "Discussions are not enabled in the source repository: $SOURCE_ORG/$SOURCE_REPO" + exit 1 +fi +log "✓ Discussions are enabled in source repository" + +# Get target repository ID +log "Getting target repository ID..." +target_repo_id=$(get_repository_id "$TARGET_ORG" "$TARGET_REPO" "$TARGET_TOKEN") +if [ -z "$target_repo_id" ]; then + error "Failed to get target repository ID. Check if repository exists and token has access." + exit 1 +fi +log "Target repository ID: $target_repo_id" + +# Check if discussions are enabled in target repository +if ! check_discussions_enabled; then + exit 1 +fi + +# Fetch target repository categories +if ! fetch_target_categories; then + exit 1 +fi + +if [ -z "$target_categories" ]; then + error "Failed to fetch discussion categories from target repository" + exit 1 +fi + +log "Available categories in target repository:" +echo "$target_categories" | jq -r '" " + .name + " (" + .slug + ")"' + +# Fetch target repository labels +target_labels=$(fetch_target_labels) +if [ $? -ne 0 ] || [ -z "$target_labels" ]; then + warn "Failed to fetch labels or no labels found in target repository" + target_labels="[]" + log "Available labels in target repository: 0 labels" +else + # Count labels properly + label_count=$(echo "$target_labels" | jq -s 'length' 2>/dev/null || echo "0") + log "Available labels in target repository: $label_count labels" +fi + +# Initialize counters +total_discussions=0 +created_discussions=0 +skipped_discussions=0 +total_comments=0 +copied_comments=0 + +# Function to process discussions page +process_discussions_page() { + local cursor="$1" + + # Build cursor parameter + local cursor_param="" + if [ -n "$cursor" ]; then + cursor_param="-f cursor=$cursor" + fi + + log "Fetching discussions page (cursor: ${cursor:-"null"})..." + + rate_limit_sleep 3 + + # Fetch discussions from source repository + log "Executing GraphQL query with parameters:" + log " owner: $SOURCE_ORG" + log " name: $SOURCE_REPO" + log " cursor: ${cursor:-"null"}" + + local response + response=$(GH_TOKEN="$SOURCE_TOKEN" gh api graphql \ + -f query="$fetch_discussions_query" \ + -f owner="$SOURCE_ORG" \ + -f name="$SOURCE_REPO" \ + $cursor_param 2>&1) + + local api_exit_code=$? + log "API call exit code: $api_exit_code" + log "Response length: ${#response} characters" + log "First 200 chars of response: ${response:0:200}" + + # Debug: Show what we got back + if ! echo "$response" | jq . > /dev/null 2>&1; then + error "Invalid JSON response from source discussions API!" + error "Full response:" + error "$response" + error "---" + error "API exit code was: $api_exit_code" + error "This could be:" + error " 1. Authentication issue with SOURCE_TOKEN" + error " 2. Repository access permissions" + error " 3. Repository doesn't exist or discussions disabled" + error " 4. Network/API connectivity issue" + error " 5. GraphQL query syntax error" + return 1 + fi + + # Check for GraphQL errors + if echo "$response" | jq -e '.errors // empty' > /dev/null 2>&1; then + error "GraphQL error in fetch discussions: $(echo "$response" | jq -r '.errors[0].message // "Unknown error"')" + return 1 + fi + + local discussions + discussions=$(echo "$response" | jq -c '.data.repository.discussions.nodes[]' 2>&1) + local jq_extract_exit_code=$? + + log "JQ extraction exit code: $jq_extract_exit_code" + log "Extracted discussions from response" + log "Discussions data length: ${#discussions} characters" + + if [ $jq_extract_exit_code -ne 0 ]; then + error "Failed to extract discussions with jq:" + error "$discussions" + return 1 + fi + + if [ -z "$discussions" ]; then + log "No discussions found on this page" + log "Checking response structure:" + echo "$response" | jq '.data.repository.discussions' 2>/dev/null || log "Failed to parse discussions structure" + return 1 + fi + + local discussion_count + discussion_count=$(echo "$discussions" | wc -l | tr -d ' ') + log "Found $discussion_count discussions to process on this page" + + # Process each discussion + local discussion_counter=0 + log "Starting to iterate through discussions..." + log "About to process discussions with while loop" + + while IFS= read -r discussion; do + discussion_counter=$((discussion_counter + 1)) + log "=== DISCUSSION $discussion_counter ===" + + if [ -z "$discussion" ]; then + log "Skipping empty discussion entry at position $discussion_counter" + continue + fi + + total_discussions=$((total_discussions + 1)) + + log "Processing discussion $discussion_counter of this page (total: $total_discussions)" + + # Show the COMPLETE JSON for debugging + log "=== COMPLETE DISCUSSION JSON ===" + printf '%s\n' "$discussion" + log "=== END COMPLETE JSON ===" + + # Debug: Show what we're trying to parse + log "Discussion data length: ${#discussion} characters" + log "Discussion data (first 200 chars): ${discussion:0:200}" + log "Discussion data (last 200 chars): ${discussion: -200}" + + # Try to identify the exact jq error + local jq_error + jq_error=$(echo "$discussion" | jq . 2>&1) + local jq_exit_code=$? + + if [ $jq_exit_code -ne 0 ]; then + error "JSON parsing failed with exit code: $jq_exit_code" + error "JQ error message: $jq_error" + error "Full discussion data:" + error "$discussion" + error "---" + error "Hexdump of first 50 bytes:" + echo "$discussion" | head -c 50 | hexdump -C + error "---" + skipped_discussions=$((skipped_discussions + 1)) + continue + fi + + log "✓ Discussion JSON is valid" + + # Extract discussion details with error handling + local title body category_name category_slug category_description category_emoji author created_at source_url number + + log "Extracting title..." + title=$(echo "$discussion" | jq -r '.title' 2>&1) + if [ $? -ne 0 ]; then + error "Failed to extract title: $title" + skipped_discussions=$((skipped_discussions + 1)) + continue + fi + log "Title: $title" + + log "Extracting body..." + body=$(echo "$discussion" | jq -r '.body // ""' 2>&1) + if [ $? -ne 0 ]; then + error "Failed to extract body: $body" + skipped_discussions=$((skipped_discussions + 1)) + continue + fi + + log "Extracting category details..." + category_name=$(echo "$discussion" | jq -r '.category.name' 2>&1) + category_slug=$(echo "$discussion" | jq -r '.category.slug' 2>&1) + category_description=$(echo "$discussion" | jq -r '.category.description // ""' 2>&1) + category_emoji=$(echo "$discussion" | jq -r '.category.emoji // ":speech_balloon:"' 2>&1) + + log "Extracting author..." + author=$(echo "$discussion" | jq -r '.author.login // "unknown"' 2>&1) + if [ $? -ne 0 ]; then + error "Failed to extract author: $author" + skipped_discussions=$((skipped_discussions + 1)) + continue + fi + + log "Extracting createdAt..." + created_at=$(echo "$discussion" | jq -r '.createdAt' 2>&1) + if [ $? -ne 0 ]; then + error "Failed to extract createdAt: $created_at" + skipped_discussions=$((skipped_discussions + 1)) + continue + fi + + log "Extracting url..." + source_url=$(echo "$discussion" | jq -r '.url' 2>&1) + if [ $? -ne 0 ]; then + error "Failed to extract url: $source_url" + skipped_discussions=$((skipped_discussions + 1)) + continue + fi + + log "Extracting number..." + number=$(echo "$discussion" | jq -r '.number' 2>&1) + if [ $? -ne 0 ]; then + error "Failed to extract number: $number" + skipped_discussions=$((skipped_discussions + 1)) + continue + fi + + # Get or create category in target repository + log "Getting/creating category: '$category_name' ($category_slug)" + local target_category_id + set +e # Temporarily disable exit on error + target_category_id=$(create_or_get_category_id "$category_name" "$category_slug" "$category_description" "$category_emoji") + local category_exit_code=$? + set -e # Re-enable exit on error + + if [ $category_exit_code -ne 0 ]; then + error "create_or_get_category_id failed with exit code: $category_exit_code" + error "Output was: $target_category_id" + skipped_discussions=$((skipped_discussions + 1)) + continue + fi + + if [ -z "$target_category_id" ] || [ "$target_category_id" == "null" ]; then + error "Failed to get or create category '$category_name' ($category_slug). Skipping discussion #$number: '$title'" + skipped_discussions=$((skipped_discussions + 1)) + continue + fi + + # Create the discussion + local new_discussion_id new_discussion_response + new_discussion_response=$(create_discussion "$target_repo_id" "$target_category_id" "$title" "$body" "$source_url" "$author" "$created_at") + + if [ $? -eq 0 ]; then + # Extract the discussion ID from the response + new_discussion_id=$(echo "$new_discussion_response" | jq -r '.data.createDiscussion.discussion.id // empty') + + if [ -n "$new_discussion_id" ]; then + created_discussions=$((created_discussions + 1)) + log "✓ Created discussion #$number: '$title'" + + # Process labels if any + local labels + labels=$(echo "$discussion" | jq -c '.labels.nodes[]?') + + if [ -n "$labels" ]; then + local label_ids=() + + # Process each label + while IFS= read -r label; do + if [ -n "$label" ]; then + local label_name label_color label_description label_id + label_name=$(echo "$label" | jq -r '.name') + label_color=$(echo "$label" | jq -r '.color') + label_description=$(echo "$label" | jq -r '.description // ""') + + # Get or create label + log "Processing label: '$label_name' (color: $label_color)" + set +e # Temporarily disable exit on error + label_id=$(create_or_get_label_id "$label_name" "$label_color" "$label_description") + local label_exit_code=$? + set -e # Re-enable exit on error + log "Label ID result: '$label_id' (exit code: $label_exit_code)" + + if [ $label_exit_code -eq 0 ] && [ -n "$label_id" ] && [ "$label_id" != "null" ]; then + label_ids+=("$label_id") + log "Added label ID to array: $label_id" + else + log "Skipping invalid label ID: '$label_id' (exit code: $label_exit_code)" + fi + fi + done <<< "$labels" + + log "Finished processing labels. Total label IDs collected: ${#label_ids[@]}" + + # Add labels to the discussion if we have any + if [ ${#label_ids[@]} -gt 0 ]; then + if add_labels_to_discussion "$new_discussion_id" "${label_ids[@]}"; then + log "Completed adding labels to discussion" + else + error "Failed to add labels to discussion, but continuing..." + fi + else + log "No valid labels to add to discussion" + fi + fi + + # Copy discussion comments (always run regardless of labels) + log "Processing comments for discussion..." + local source_discussion_id + source_discussion_id=$(echo "$discussion" | jq -r '.id') + + if [ -n "$source_discussion_id" ] && [ "$source_discussion_id" != "null" ]; then + set +e # Don't exit on error for comment fetching + local comments + comments=$(fetch_discussion_comments "$source_discussion_id") + local fetch_result=$? + set -e + + if [ $fetch_result -eq 0 ] && [ -n "$comments" ] && [ "$comments" != "null" ] && [ "$comments" != "[]" ]; then + copy_discussion_comments "$new_discussion_id" "$comments" + else + log "No comments to copy for this discussion" + fi + else + warn "Could not extract source discussion ID for comment fetching" + fi + else + warn "Discussion created but couldn't extract ID from response" + created_discussions=$((created_discussions + 1)) + fi + else + error "Failed to create discussion #$number: '$title'" + skipped_discussions=$((skipped_discussions + 1)) + fi + + log "✅ Finished processing discussion #$number: '$title'" + + # Delay between discussions to avoid rate limiting + sleep 5 + + done <<< "$discussions" + + # Check if there are more pages + local has_next_page next_cursor + has_next_page=$(echo "$response" | jq -r '.data.repository.discussions.pageInfo.hasNextPage') + next_cursor=$(echo "$response" | jq -r '.data.repository.discussions.pageInfo.endCursor') + + log "Pagination info:" + log " hasNextPage: $has_next_page" + log " endCursor: ${next_cursor:-"null"}" + + if [ "$has_next_page" = "true" ]; then + log "Processing next page with cursor: $next_cursor" + process_discussions_page "$next_cursor" + else + log "No more pages to process" + fi +} + +# Test discussions access first +log "Testing discussions access..." +rate_limit_sleep 2 + +test_discussions_query=' +query($owner: String!, $name: String!) { + repository(owner: $owner, name: $name) { + discussions(first: 1) { + totalCount + nodes { + title + } + } + } +}' + +test_response=$(GH_TOKEN="$SOURCE_TOKEN" gh api graphql \ + -f query="$test_discussions_query" \ + -f owner="$SOURCE_ORG" \ + -f name="$SOURCE_REPO" 2>&1) + +if ! echo "$test_response" | jq . > /dev/null 2>&1; then + error "Failed to test discussions access:" + error "Raw response: $test_response" + exit 1 +fi + +discussion_count=$(echo "$test_response" | jq -r '.data.repository.discussions.totalCount // 0') +log "Found $discussion_count total discussions in source repository" + +if [ "$discussion_count" -eq 0 ]; then + log "No discussions found in source repository. Nothing to copy." + exit 0 +fi + +# Start processing discussions +log "Starting to fetch and copy discussions..." +process_discussions_page "" + +# Summary +log "Discussion copy completed!" +log "Total discussions found: $total_discussions" +log "Discussions created: $created_discussions" +log "Discussions skipped: $skipped_discussions" +log "Total comments found: $total_comments" +log "Comments copied: $copied_comments" + +if [ ${#missing_categories[@]} -gt 0 ]; then + warn "The following categories were missing and need to be created manually:" + for missing_cat in "${missing_categories[@]}"; do + warn " - $missing_cat" + done + warn "" + warn "To create categories manually:" + warn "1. Go to https://github.com/$TARGET_ORG/$TARGET_REPO/discussions" + warn "2. Click 'New discussion'" + warn "3. Look for category management options" + warn "4. Create the missing categories with appropriate names and descriptions" +fi + +if [ $skipped_discussions -gt 0 ]; then + warn "Some discussions were skipped. Please check the categories in the target repository." +fi + +log "All done! ✨" \ No newline at end of file From 669e7a8b2cacd7f32a65683f205407dbd783d86e Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 10:00:28 -0500 Subject: [PATCH 02/32] feat: add copy-discussions node script --- scripts/copy-discussions.js | 820 ++++++++++++++++++++++++++++++++++++ 1 file changed, 820 insertions(+) create mode 100644 scripts/copy-discussions.js diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js new file mode 100644 index 0000000..ab203a9 --- /dev/null +++ b/scripts/copy-discussions.js @@ -0,0 +1,820 @@ +#!/usr/bin/env node + +// +// Copy Discussions between repositories in different enterprises +// This script copies discussions from a source repository to a target repository +// using different GitHub tokens for authentication to support cross-enterprise copying +// +// Usage: +// node copy-discussions.js +// +// Example: +// node copy-discussions.js source-org repo1 target-org repo2 +// +// Prerequisites: +// - SOURCE_TOKEN environment variable with read access to source repository discussions +// - TARGET_TOKEN environment variable with write access to target repository discussions +// - Both tokens must have the 'repo' scope +// - Dependencies installed via `npm i octokit` +// +// Note: This script copies discussion content, comments, replies, and basic metadata. +// Reactions and other advanced interactions are not copied. +// Attachments (images and files) will not copy over - they need manual handling. +// +// TODO: Polls don't copy options +// TODO: Mark as answers +// TODO: Copy closed discussions and mark as closed in target + +const { Octokit } = require("octokit"); + +// Parse command line arguments +const args = process.argv.slice(2); +if (args.length !== 4) { + console.error("Usage: node copy-discussions.js "); + console.error("\nExample:"); + console.error(" node copy-discussions.js source-org repo1 target-org repo2"); + process.exit(1); +} + +const [SOURCE_ORG, SOURCE_REPO, TARGET_ORG, TARGET_REPO] = args; + +// Validate environment variables +if (!process.env.SOURCE_TOKEN) { + console.error("ERROR: SOURCE_TOKEN environment variable is required"); + process.exit(1); +} + +if (!process.env.TARGET_TOKEN) { + console.error("ERROR: TARGET_TOKEN environment variable is required"); + process.exit(1); +} + +// Initialize Octokit instances +const sourceOctokit = new Octokit({ + auth: process.env.SOURCE_TOKEN +}); + +const targetOctokit = new Octokit({ + auth: process.env.TARGET_TOKEN +}); + +// Tracking variables +let missingCategories = []; +let totalDiscussions = 0; +let createdDiscussions = 0; +let skippedDiscussions = 0; +let totalComments = 0; +let copiedComments = 0; + +// Helper functions +function log(message) { + const timestamp = new Date().toISOString().replace('T', ' ').split('.')[0]; + console.log(`\x1b[32m[${timestamp}]\x1b[0m ${message}`); +} + +function warn(message) { + const timestamp = new Date().toISOString().replace('T', ' ').split('.')[0]; + console.warn(`\x1b[33m[${timestamp}] WARNING:\x1b[0m ${message}`); +} + +function error(message) { + const timestamp = new Date().toISOString().replace('T', ' ').split('.')[0]; + console.error(`\x1b[31m[${timestamp}] ERROR:\x1b[0m ${message}`); +} + +async function sleep(seconds) { + return new Promise(resolve => setTimeout(resolve, seconds * 1000)); +} + +async function rateLimitSleep(seconds = 2) { + log(`Waiting ${seconds}s to avoid rate limiting...`); + await sleep(seconds); +} + +// GraphQL Queries and Mutations +const CHECK_DISCUSSIONS_ENABLED_QUERY = ` + query($owner: String!, $repo: String!) { + repository(owner: $owner, name: $repo) { + hasDiscussionsEnabled + id + } + } +`; + +const FETCH_CATEGORIES_QUERY = ` + query($owner: String!, $repo: String!) { + repository(owner: $owner, name: $repo) { + discussionCategories(first: 100) { + nodes { + id + name + slug + emoji + description + } + } + } + } +`; + +const FETCH_LABELS_QUERY = ` + query($owner: String!, $repo: String!) { + repository(owner: $owner, name: $repo) { + labels(first: 100) { + nodes { + id + name + color + description + } + } + } + } +`; + +const FETCH_DISCUSSIONS_QUERY = ` + query($owner: String!, $repo: String!, $cursor: String) { + repository(owner: $owner, name: $repo) { + discussions(first: 100, after: $cursor, orderBy: {field: CREATED_AT, direction: ASC}) { + pageInfo { + hasNextPage + endCursor + } + nodes { + id + title + body + category { + id + name + slug + description + emoji + } + labels(first: 100) { + nodes { + id + name + color + description + } + } + author { + login + } + createdAt + closed + locked + upvoteCount + url + number + } + } + } + } +`; + +const FETCH_DISCUSSION_COMMENTS_QUERY = ` + query($discussionId: ID!) { + node(id: $discussionId) { + ... on Discussion { + comments(first: 100) { + nodes { + id + body + author { + login + } + createdAt + upvoteCount + replies(first: 50) { + nodes { + id + body + author { + login + } + createdAt + upvoteCount + } + } + } + } + } + } + } +`; + +const CREATE_DISCUSSION_MUTATION = ` + mutation($repositoryId: ID!, $categoryId: ID!, $title: String!, $body: String!) { + createDiscussion(input: { + repositoryId: $repositoryId, + categoryId: $categoryId, + title: $title, + body: $body + }) { + discussion { + id + title + url + number + } + } + } +`; + +const CREATE_LABEL_MUTATION = ` + mutation($repositoryId: ID!, $name: String!, $color: String!, $description: String) { + createLabel(input: { + repositoryId: $repositoryId, + name: $name, + color: $color, + description: $description + }) { + label { + id + name + } + } + } +`; + +const ADD_LABELS_MUTATION = ` + mutation($labelableId: ID!, $labelIds: [ID!]!) { + addLabelsToLabelable(input: { + labelableId: $labelableId, + labelIds: $labelIds + }) { + labelable { + labels(first: 100) { + nodes { + name + } + } + } + } + } +`; + +const ADD_DISCUSSION_COMMENT_MUTATION = ` + mutation($discussionId: ID!, $body: String!) { + addDiscussionComment(input: { + discussionId: $discussionId, + body: $body + }) { + comment { + id + body + createdAt + } + } + } +`; + +const ADD_DISCUSSION_COMMENT_REPLY_MUTATION = ` + mutation($discussionId: ID!, $replyToId: ID!, $body: String!) { + addDiscussionComment(input: { + discussionId: $discussionId, + replyToId: $replyToId, + body: $body + }) { + comment { + id + body + createdAt + } + } + } +`; + +// Main functions +async function checkDiscussionsEnabled(octokit, owner, repo) { + log(`Checking if discussions are enabled in ${owner}/${repo}...`); + + await rateLimitSleep(2); + + try { + const response = await octokit.graphql(CHECK_DISCUSSIONS_ENABLED_QUERY, { + owner, + repo + }); + + if (!response.repository.hasDiscussionsEnabled) { + error(`Discussions are not enabled in ${owner}/${repo}`); + return null; + } + + log(`✓ Discussions are enabled in ${owner}/${repo}`); + return response.repository.id; + } catch (err) { + error(`Failed to check discussions status: ${err.message}`); + throw err; + } +} + +async function fetchCategories(octokit, owner, repo) { + log(`Fetching categories from ${owner}/${repo}...`); + + await rateLimitSleep(2); + + try { + const response = await octokit.graphql(FETCH_CATEGORIES_QUERY, { + owner, + repo + }); + + const categories = response.repository.discussionCategories.nodes; + log(`Found ${categories.length} categories`); + + return categories; + } catch (err) { + error(`Failed to fetch categories: ${err.message}`); + throw err; + } +} + +async function fetchLabels(octokit, owner, repo) { + log(`Fetching labels from ${owner}/${repo}...`); + + await rateLimitSleep(2); + + try { + const response = await octokit.graphql(FETCH_LABELS_QUERY, { + owner, + repo + }); + + const labels = response.repository.labels.nodes; + log(`Found ${labels.length} labels`); + + return labels; + } catch (err) { + error(`Failed to fetch labels: ${err.message}`); + throw err; + } +} + +function findCategoryId(categories, categoryName, categorySlug) { + const category = categories.find(c => + c.name === categoryName || c.slug === categorySlug + ); + + return category ? category.id : null; +} + +function getCategoryIdOrFallback(categories, categoryName, categorySlug) { + let categoryId = findCategoryId(categories, categoryName, categorySlug); + + if (categoryId) { + return categoryId; + } + + warn(`Category '${categoryName}' (${categorySlug}) not found in target repository`); + + // Track missing category + if (!missingCategories.includes(categoryName)) { + missingCategories.push(categoryName); + } + + // Try to find "General" category + const generalCategory = categories.find(c => + c.name === "General" || c.slug === "general" + ); + + if (generalCategory) { + warn(`Using 'General' category as fallback for '${categoryName}'`); + return generalCategory.id; + } + + // Use first category as last resort + if (categories.length > 0) { + warn(`Using '${categories[0].name}' category as fallback for '${categoryName}'`); + return categories[0].id; + } + + error("No available categories found in target repository"); + return null; +} + +function findLabelId(labels, labelName) { + const label = labels.find(l => l.name === labelName); + return label ? label.id : null; +} + +async function createLabel(octokit, repositoryId, name, color, description, targetLabels) { + log(`Creating new label: '${name}'`); + + await rateLimitSleep(2); + + try { + const response = await octokit.graphql(CREATE_LABEL_MUTATION, { + repositoryId, + name, + color, + description + }); + + const newLabel = response.createLabel.label; + log(`✓ Created label '${name}' with ID: ${newLabel.id}`); + + // Update local cache + targetLabels.push({ + id: newLabel.id, + name, + color, + description + }); + + return newLabel.id; + } catch (err) { + error(`Failed to create label '${name}': ${err.message}`); + return null; + } +} + +async function getOrCreateLabelId(octokit, repositoryId, labelName, labelColor, labelDescription, targetLabels) { + let labelId = findLabelId(targetLabels, labelName); + + if (labelId) { + return labelId; + } + + return await createLabel(octokit, repositoryId, labelName, labelColor, labelDescription, targetLabels); +} + +async function addLabelsToDiscussion(octokit, discussionId, labelIds) { + if (labelIds.length === 0) { + return true; + } + + log(`Adding ${labelIds.length} labels to discussion`); + + await rateLimitSleep(2); + + try { + await octokit.graphql(ADD_LABELS_MUTATION, { + labelableId: discussionId, + labelIds + }); + + log("✓ Successfully added labels to discussion"); + return true; + } catch (err) { + error(`Failed to add labels to discussion: ${err.message}`); + return false; + } +} + +async function createDiscussion(octokit, repositoryId, categoryId, title, body, sourceUrl, sourceAuthor, sourceCreated) { + const enhancedBody = `${body}\n\n---\n
\nOriginal discussion metadata\n\n_Original discussion by @${sourceAuthor} on ${sourceCreated}_\n_Source: ${sourceUrl}_\n
`; + + log(`Creating discussion: '${title}'`); + + await rateLimitSleep(3); + + try { + const response = await octokit.graphql(CREATE_DISCUSSION_MUTATION, { + repositoryId, + categoryId, + title, + body: enhancedBody + }); + + return response.createDiscussion.discussion; + } catch (err) { + error(`Failed to create discussion: ${err.message}`); + throw err; + } +} + +async function fetchDiscussionComments(octokit, discussionId) { + log(`Fetching comments for discussion ${discussionId}...`); + + await rateLimitSleep(2); + + try { + const response = await octokit.graphql(FETCH_DISCUSSION_COMMENTS_QUERY, { + discussionId + }); + + return response.node.comments.nodes || []; + } catch (err) { + error(`Failed to fetch comments: ${err.message}`); + return []; + } +} + +async function addDiscussionComment(octokit, discussionId, body, originalAuthor, originalCreated) { + const enhancedBody = `${body}\n\n---\n
\nOriginal comment metadata\n\n_Original comment by @${originalAuthor} on ${originalCreated}_\n
`; + + log("Adding comment to discussion"); + + await rateLimitSleep(2); + + try { + const response = await octokit.graphql(ADD_DISCUSSION_COMMENT_MUTATION, { + discussionId, + body: enhancedBody + }); + + const commentId = response.addDiscussionComment.comment.id; + log(`✓ Added comment with ID: ${commentId}`); + return commentId; + } catch (err) { + error(`Failed to add comment: ${err.message}`); + return null; + } +} + +async function addDiscussionCommentReply(octokit, discussionId, replyToId, body, originalAuthor, originalCreated) { + const enhancedBody = `${body}\n\n---\n_Original reply by @${originalAuthor} on ${originalCreated}_`; + + log(`Adding reply to comment ${replyToId}`); + + await rateLimitSleep(2); + + try { + const response = await octokit.graphql(ADD_DISCUSSION_COMMENT_REPLY_MUTATION, { + discussionId, + replyToId, + body: enhancedBody + }); + + const replyId = response.addDiscussionComment.comment.id; + log(`✓ Added reply with ID: ${replyId}`); + return replyId; + } catch (err) { + error(`Failed to add reply: ${err.message}`); + return null; + } +} + +async function copyDiscussionComments(octokit, discussionId, comments) { + if (!comments || comments.length === 0) { + log("No comments to copy for this discussion"); + return; + } + + log(`Copying ${comments.length} comments...`); + totalComments += comments.length; + + for (const comment of comments) { + if (!comment.body) continue; + + const author = comment.author?.login || "unknown"; + const createdAt = comment.createdAt || ""; + + log(`Copying comment by @${author}`); + + const newCommentId = await addDiscussionComment( + octokit, + discussionId, + comment.body, + author, + createdAt + ); + + if (newCommentId) { + copiedComments++; + + // Copy replies if any + const replies = comment.replies?.nodes || []; + if (replies.length > 0) { + log(`Copying ${replies.length} replies to comment...`); + + for (const reply of replies) { + if (!reply.body) continue; + + const replyAuthor = reply.author?.login || "unknown"; + const replyCreated = reply.createdAt || ""; + + log(`Copying reply by @${replyAuthor}`); + + await addDiscussionCommentReply( + octokit, + discussionId, + newCommentId, + reply.body, + replyAuthor, + replyCreated + ); + } + } + } else { + warn(`Failed to copy comment by @${author}, skipping replies`); + } + } + + log("✓ Finished copying comments"); +} + +async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, targetRepoId, targetCategories, targetLabels, cursor = null) { + log(`Fetching discussions page (cursor: ${cursor || "null"})...`); + + await rateLimitSleep(3); + + try { + const response = await sourceOctokit.graphql(FETCH_DISCUSSIONS_QUERY, { + owner, + repo, + cursor + }); + + const discussions = response.repository.discussions.nodes; + const pageInfo = response.repository.discussions.pageInfo; + + log(`Found ${discussions.length} discussions to process on this page`); + + for (const discussion of discussions) { + totalDiscussions++; + + log(`\n=== Processing discussion #${discussion.number}: '${discussion.title}' ===`); + + // Get or fallback category + const targetCategoryId = getCategoryIdOrFallback( + targetCategories, + discussion.category.name, + discussion.category.slug + ); + + if (!targetCategoryId) { + error(`No valid category found for discussion #${discussion.number}`); + skippedDiscussions++; + continue; + } + + // Create discussion + try { + const newDiscussion = await createDiscussion( + targetOctokit, + targetRepoId, + targetCategoryId, + discussion.title, + discussion.body || "", + discussion.url, + discussion.author?.login || "unknown", + discussion.createdAt + ); + + createdDiscussions++; + log(`✓ Created discussion #${discussion.number}: '${discussion.title}'`); + + // Process labels + if (discussion.labels.nodes.length > 0) { + const labelIds = []; + + for (const label of discussion.labels.nodes) { + log(`Processing label: '${label.name}' (color: ${label.color})`); + + const labelId = await getOrCreateLabelId( + targetOctokit, + targetRepoId, + label.name, + label.color, + label.description || "", + targetLabels + ); + + if (labelId) { + labelIds.push(labelId); + } + } + + if (labelIds.length > 0) { + await addLabelsToDiscussion(targetOctokit, newDiscussion.id, labelIds); + } + } + + // Copy comments + log("Processing comments for discussion..."); + const comments = await fetchDiscussionComments(sourceOctokit, discussion.id); + await copyDiscussionComments(targetOctokit, newDiscussion.id, comments); + + log(`✅ Finished processing discussion #${discussion.number}: '${discussion.title}'`); + + // Delay between discussions + await sleep(5); + + } catch (err) { + error(`Failed to create discussion #${discussion.number}: '${discussion.title}' - ${err.message}`); + skippedDiscussions++; + } + } + + // Process next page if exists + if (pageInfo.hasNextPage) { + log(`Processing next page with cursor: ${pageInfo.endCursor}`); + await processDiscussionsPage( + sourceOctokit, + targetOctokit, + owner, + repo, + targetRepoId, + targetCategories, + targetLabels, + pageInfo.endCursor + ); + } else { + log("No more pages to process"); + } + + } catch (err) { + error(`Failed to fetch discussions: ${err.message}`); + throw err; + } +} + +// Main execution +async function main() { + try { + log("Starting discussion copy process..."); + log(`Source: ${SOURCE_ORG}/${SOURCE_REPO}`); + log(`Target: ${TARGET_ORG}/${TARGET_REPO}`); + log(""); + log("⚡ This script uses conservative rate limiting to avoid GitHub API limits"); + log(""); + + // Verify source repository + log("Verifying access to source repository..."); + const sourceRepoId = await checkDiscussionsEnabled(sourceOctokit, SOURCE_ORG, SOURCE_REPO); + if (!sourceRepoId) { + process.exit(1); + } + log(`Source repository ID: ${sourceRepoId}`); + + // Verify target repository + log("Getting target repository ID..."); + const targetRepoId = await checkDiscussionsEnabled(targetOctokit, TARGET_ORG, TARGET_REPO); + if (!targetRepoId) { + process.exit(1); + } + log(`Target repository ID: ${targetRepoId}`); + + // Fetch target categories + const targetCategories = await fetchCategories(targetOctokit, TARGET_ORG, TARGET_REPO); + if (targetCategories.length === 0) { + error("No categories found in target repository"); + process.exit(1); + } + + log("Available categories in target repository:"); + targetCategories.forEach(cat => { + log(` ${cat.name} (${cat.slug})`); + }); + + // Fetch target labels + const targetLabels = await fetchLabels(targetOctokit, TARGET_ORG, TARGET_REPO); + log(`Available labels in target repository: ${targetLabels.length} labels`); + + // Start processing discussions + log("\nStarting to fetch and copy discussions..."); + await processDiscussionsPage( + sourceOctokit, + targetOctokit, + SOURCE_ORG, + SOURCE_REPO, + targetRepoId, + targetCategories, + targetLabels + ); + + // Summary + log("\n"); + log("=".repeat(60)); + log("Discussion copy completed!"); + log(`Total discussions found: ${totalDiscussions}`); + log(`Discussions created: ${createdDiscussions}`); + log(`Discussions skipped: ${skippedDiscussions}`); + log(`Total comments found: ${totalComments}`); + log(`Comments copied: ${copiedComments}`); + + if (missingCategories.length > 0) { + warn("\nThe following categories were missing and need to be created manually:"); + missingCategories.forEach(cat => { + warn(` - ${cat}`); + }); + warn(""); + warn("To create categories manually:"); + warn(`1. Go to https://github.com/${TARGET_ORG}/${TARGET_REPO}/discussions`); + warn("2. Click 'New discussion'"); + warn("3. Look for category management options"); + warn("4. Create the missing categories with appropriate names and descriptions"); + } + + if (skippedDiscussions > 0) { + warn("\nSome discussions were skipped. Please check the categories in the target repository."); + } + + log("\nAll done! ✨"); + + } catch (err) { + error(`Fatal error: ${err.message}`); + if (err.stack) { + console.error(err.stack); + } + process.exit(1); + } +} + +// Run main function +main(); From 684a95a1a1b3875b614f96c5b90211124d53e099 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 10:35:53 -0500 Subject: [PATCH 03/32] feat: add functionality to close discussions in target repository if closed in source --- scripts/copy-discussions.js | 40 ++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index ab203a9..0e639e7 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -23,7 +23,6 @@ // // TODO: Polls don't copy options // TODO: Mark as answers -// TODO: Copy closed discussions and mark as closed in target const { Octokit } = require("octokit"); @@ -287,6 +286,20 @@ const ADD_DISCUSSION_COMMENT_REPLY_MUTATION = ` } `; +const CLOSE_DISCUSSION_MUTATION = ` + mutation($discussionId: ID!, $reason: DiscussionCloseReason) { + closeDiscussion(input: { + discussionId: $discussionId, + reason: $reason + }) { + discussion { + id + closed + } + } + } +`; + // Main functions async function checkDiscussionsEnabled(octokit, owner, repo) { log(`Checking if discussions are enabled in ${owner}/${repo}...`); @@ -549,6 +562,25 @@ async function addDiscussionCommentReply(octokit, discussionId, replyToId, body, } } +async function closeDiscussion(octokit, discussionId) { + log("Closing discussion..."); + + await rateLimitSleep(2); + + try { + await octokit.graphql(CLOSE_DISCUSSION_MUTATION, { + discussionId, + reason: "RESOLVED" + }); + + log("✓ Discussion closed"); + return true; + } catch (err) { + error(`Failed to close discussion: ${err.message}`); + return false; + } +} + async function copyDiscussionComments(octokit, discussionId, comments) { if (!comments || comments.length === 0) { log("No comments to copy for this discussion"); @@ -690,6 +722,12 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, const comments = await fetchDiscussionComments(sourceOctokit, discussion.id); await copyDiscussionComments(targetOctokit, newDiscussion.id, comments); + // Close discussion if it was closed in source + if (discussion.closed) { + log("Source discussion is closed, closing target discussion..."); + await closeDiscussion(targetOctokit, newDiscussion.id); + } + log(`✅ Finished processing discussion #${discussion.number}: '${discussion.title}'`); // Delay between discussions From 877676192a4ad787bb9c45a3a33f84d5009d6416 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 10:51:09 -0500 Subject: [PATCH 04/32] feat: add functionality to mark discussion comments as answers during copy --- scripts/copy-discussions.js | 65 ++++++++++++++++++++++++++++++++++--- 1 file changed, 61 insertions(+), 4 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 0e639e7..3d67c61 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -22,7 +22,6 @@ // Attachments (images and files) will not copy over - they need manual handling. // // TODO: Polls don't copy options -// TODO: Mark as answers const { Octokit } = require("octokit"); @@ -158,6 +157,9 @@ const FETCH_DISCUSSIONS_QUERY = ` description } } + answer { + id + } author { login } @@ -300,6 +302,18 @@ const CLOSE_DISCUSSION_MUTATION = ` } `; +const MARK_DISCUSSION_COMMENT_AS_ANSWER_MUTATION = ` + mutation($commentId: ID!) { + markDiscussionCommentAsAnswer(input: { + id: $commentId + }) { + discussion { + id + } + } + } +`; + // Main functions async function checkDiscussionsEnabled(octokit, owner, repo) { log(`Checking if discussions are enabled in ${owner}/${repo}...`); @@ -581,15 +595,36 @@ async function closeDiscussion(octokit, discussionId) { } } -async function copyDiscussionComments(octokit, discussionId, comments) { +async function markCommentAsAnswer(octokit, commentId) { + log("Marking comment as answer..."); + + await rateLimitSleep(2); + + try { + await octokit.graphql(MARK_DISCUSSION_COMMENT_AS_ANSWER_MUTATION, { + commentId + }); + + log("✓ Comment marked as answer"); + return true; + } catch (err) { + error(`Failed to mark comment as answer: ${err.message}`); + return false; + } +} + +async function copyDiscussionComments(octokit, discussionId, comments, answerCommentId = null) { if (!comments || comments.length === 0) { log("No comments to copy for this discussion"); - return; + return null; } log(`Copying ${comments.length} comments...`); totalComments += comments.length; + // Map to track source comment ID to target comment ID + const commentIdMap = new Map(); + for (const comment of comments) { if (!comment.body) continue; @@ -609,6 +644,9 @@ async function copyDiscussionComments(octokit, discussionId, comments) { if (newCommentId) { copiedComments++; + // Track the mapping + commentIdMap.set(comment.id, newCommentId); + // Copy replies if any const replies = comment.replies?.nodes || []; if (replies.length > 0) { @@ -638,6 +676,13 @@ async function copyDiscussionComments(octokit, discussionId, comments) { } log("✓ Finished copying comments"); + + // Return the new comment ID if this was the answer comment + if (answerCommentId && commentIdMap.has(answerCommentId)) { + return commentIdMap.get(answerCommentId); + } + + return null; } async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, targetRepoId, targetCategories, targetLabels, cursor = null) { @@ -720,7 +765,19 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, // Copy comments log("Processing comments for discussion..."); const comments = await fetchDiscussionComments(sourceOctokit, discussion.id); - await copyDiscussionComments(targetOctokit, newDiscussion.id, comments); + const answerCommentId = discussion.answer?.id || null; + const newAnswerCommentId = await copyDiscussionComments( + targetOctokit, + newDiscussion.id, + comments, + answerCommentId + ); + + // Mark answer if applicable + if (newAnswerCommentId) { + log("Source discussion has an answer comment, marking it in target..."); + await markCommentAsAnswer(targetOctokit, newAnswerCommentId); + } // Close discussion if it was closed in source if (discussion.closed) { From 6089e5d6c6ade10d48539524adce5322cfd2c6dc Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 11:57:10 -0500 Subject: [PATCH 05/32] feat: add support for copying poll data and visual representation in discussions --- scripts/copy-discussions.js | 85 ++++++++++++++++++++++++++++++++++--- 1 file changed, 80 insertions(+), 5 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 3d67c61..f8d850d 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -20,8 +20,9 @@ // Note: This script copies discussion content, comments, replies, and basic metadata. // Reactions and other advanced interactions are not copied. // Attachments (images and files) will not copy over - they need manual handling. -// -// TODO: Polls don't copy options + +// Configuration +const INCLUDE_POLL_MERMAID_CHART = true; // Set to false to disable Mermaid pie chart for polls const { Octokit } = require("octokit"); @@ -89,6 +90,55 @@ async function rateLimitSleep(seconds = 2) { await sleep(seconds); } +function formatPollData(poll) { + if (!poll || !poll.options || poll.options.nodes.length === 0) { + return ''; + } + + const options = poll.options.nodes; + const totalVotes = poll.totalVoteCount || 0; + + let pollMarkdown = '\n\n---\n\n### 📊 Poll Results (from source discussion)\n\n'; + pollMarkdown += `**${poll.question}**\n\n`; + + // Create table + pollMarkdown += '| Option | Votes | Percentage |\n'; + pollMarkdown += '|--------|-------|------------|\n'; + + options.forEach(option => { + const votes = option.totalVoteCount || 0; + const percentage = totalVotes > 0 ? ((votes / totalVotes) * 100).toFixed(1) : '0.0'; + pollMarkdown += `| ${option.option} | ${votes} | ${percentage}% |\n`; + }); + + pollMarkdown += `\n**Total votes:** ${totalVotes}\n`; + + // Add Mermaid pie chart if enabled + if (INCLUDE_POLL_MERMAID_CHART && totalVotes > 0) { + pollMarkdown += '\n
\nVisual representation\n\n'; + pollMarkdown += '```mermaid\n'; + pollMarkdown += '%%{init: {"pie": {"textPosition": 0.5}, "themeVariables": {"pieOuterStrokeWidth": "5px"}}}%%\n'; + pollMarkdown += 'pie showData\n'; + pollMarkdown += ` title ${poll.question}\n`; + + options.forEach(option => { + const votes = option.totalVoteCount || 0; + if (votes > 0) { + // Escape quotes in option text for Mermaid + const escapedOption = option.option.replace(/"/g, '\\"'); + pollMarkdown += ` "${escapedOption}" : ${votes}\n`; + } + }); + + pollMarkdown += '```\n\n'; + pollMarkdown += '
\n'; + } + + pollMarkdown += '\n_Note: This is a static snapshot of poll results from the source discussion. Voting is not available in copied discussions._\n'; + + return pollMarkdown; +} + // GraphQL Queries and Mutations const CHECK_DISCUSSIONS_ENABLED_QUERY = ` query($owner: String!, $repo: String!) { @@ -169,6 +219,16 @@ const FETCH_DISCUSSIONS_QUERY = ` upvoteCount url number + poll { + question + totalVoteCount + options(first: 100) { + nodes { + option + totalVoteCount + } + } + } } } } @@ -492,8 +552,17 @@ async function addLabelsToDiscussion(octokit, discussionId, labelIds) { } } -async function createDiscussion(octokit, repositoryId, categoryId, title, body, sourceUrl, sourceAuthor, sourceCreated) { - const enhancedBody = `${body}\n\n---\n
\nOriginal discussion metadata\n\n_Original discussion by @${sourceAuthor} on ${sourceCreated}_\n_Source: ${sourceUrl}_\n
`; +async function createDiscussion(octokit, repositoryId, categoryId, title, body, sourceUrl, sourceAuthor, sourceCreated, poll = null) { + let enhancedBody = body; + + // Add poll data if present + if (poll) { + const pollMarkdown = formatPollData(poll); + enhancedBody += pollMarkdown; + } + + // Add metadata + enhancedBody += `\n\n---\n
\nOriginal discussion metadata\n\n_Original discussion by @${sourceAuthor} on ${sourceCreated}_\n_Source: ${sourceUrl}_\n
`; log(`Creating discussion: '${title}'`); @@ -730,12 +799,18 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, discussion.body || "", discussion.url, discussion.author?.login || "unknown", - discussion.createdAt + discussion.createdAt, + discussion.poll || null ); createdDiscussions++; log(`✓ Created discussion #${discussion.number}: '${discussion.title}'`); + // Log poll info if present + if (discussion.poll && discussion.poll.options?.nodes?.length > 0) { + log(` ℹ️ Poll included with ${discussion.poll.options.nodes.length} options (${discussion.poll.totalVoteCount} total votes)`); + } + // Process labels if (discussion.labels.nodes.length > 0) { const labelIds = []; From b280310cdc16f3d19738fcabfd3da4d18f126ff1 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 13:10:25 -0500 Subject: [PATCH 06/32] fix: incomplete string escaping Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> --- scripts/copy-discussions.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index f8d850d..9a7b8d5 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -124,8 +124,8 @@ function formatPollData(poll) { options.forEach(option => { const votes = option.totalVoteCount || 0; if (votes > 0) { - // Escape quotes in option text for Mermaid - const escapedOption = option.option.replace(/"/g, '\\"'); + // Escape backslashes and quotes in option text for Mermaid + const escapedOption = option.option.replace(/\\/g, '\\\\').replace(/"/g, '\\"'); pollMarkdown += ` "${escapedOption}" : ${votes}\n`; } }); From ee1e0cc1a3962e10a6e4281ee3a7f434471e567d Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 13:11:48 -0500 Subject: [PATCH 07/32] docs: update README to include detailed usage and features for copy-discussions.js --- scripts/README.md | 40 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/scripts/README.md b/scripts/README.md index e3c3673..72ef46e 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -32,7 +32,45 @@ Configuration values to change in the script: - `gitEmail` = If using a GitHub App, combine the App's user ID (⚠️ this is different than App ID!) and name to form an email like: `149130343+josh-issueops-bot[bot]@users.noreply.github.com`. You can find the App's user ID number by calling: `gh api '/users/josh-issueops-bot[bot]' --jq .id` - `overwrite`: use `false` or `true` on whether it should overwrite the existing `dependabot.yml` file -## ado-workitems-to-github-issues.ps1 +## copy-discussions.js + +Copy GitHub Discussions between repositories, including categories, labels, comments, and replies. This script can copy discussions across different GitHub instances and enterprises. + +The script is expecting: + +- environment variables `SOURCE_TOKEN` and `TARGET_TOKEN` with GitHub PATs that have `repo` and `read:discussion` scopes +- dependencies installed via `npm i octokit` + +Script usage: + +```bash +export SOURCE_TOKEN=ghp_abc +export TARGET_TOKEN=ghp_xyz +npm i octokit +node ./copy-discussions.js source-org source-repo target-org target-repo +``` + +Features: + +- Automatically creates missing discussion categories in the target repository +- Creates labels in the target repository if they don't exist +- Copies all comments and threaded replies with proper attribution +- Copies poll results as static snapshots (with table and optional Mermaid chart) +- Handles rate limiting with exponential backoff +- Provides colored console output for better visibility + +Configuration: + +- Set `INCLUDE_POLL_MERMAID_CHART = false` at the top of the script to disable Mermaid pie charts for polls + +Notes: + +- If a category doesn't exist in the target repository, discussions will be created in the "General" category +- The script preserves discussion metadata by adding attribution text to the body and comments +- Poll results are copied as static snapshots - voting is not available in copied discussions +- Both source and target repositories must have GitHub Discussions enabled + +## delete-branch-protection-rules.ps1 Migrate work items from Azure DevOps to GitHub issues - this just links out to a [separate repo](https://github.com/joshjohanning/ado_workitems_to_github_issues) From 2df58c94df1f6fea378e21f830376f326559a3a1 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 13:14:19 -0500 Subject: [PATCH 08/32] fix: ensure linting commands do not fail the workflow by adding '|| true' --- .github/workflows/lint-readme.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint-readme.yml b/.github/workflows/lint-readme.yml index e2f3f9f..0519521 100644 --- a/.github/workflows/lint-readme.yml +++ b/.github/workflows/lint-readme.yml @@ -23,7 +23,7 @@ jobs: id: lint run: | set -o pipefail - node ./.github/scripts/lint-readme.js | tee gh-cli-readme-lint-results.txt + node ./.github/scripts/lint-readme.js | tee gh-cli-readme-lint-results.txt || true - name: Upload lint results if: steps.lint.outcome == 'failure' || steps.lint.outcome == 'success' @@ -46,7 +46,7 @@ jobs: id: lint run: | set -o pipefail - node ./.github/scripts/lint-readme.js ./scripts '##' '# scripts' | tee scripts-readme-lint-results.txt + node ./.github/scripts/lint-readme.js ./scripts '##' '# scripts' | tee scripts-readme-lint-results.txt || true - name: Upload lint results if: steps.lint.outcome == 'failure' || steps.lint.outcome == 'success' From b3e800f2a99e3e9052fe62078b146f811975c669 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 13:18:53 -0500 Subject: [PATCH 09/32] Revert "docs: update README to include detailed usage and features for copy-discussions.js" This reverts commit ee1e0cc1a3962e10a6e4281ee3a7f434471e567d. --- scripts/README.md | 40 +--------------------------------------- 1 file changed, 1 insertion(+), 39 deletions(-) diff --git a/scripts/README.md b/scripts/README.md index 72ef46e..e3c3673 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -32,45 +32,7 @@ Configuration values to change in the script: - `gitEmail` = If using a GitHub App, combine the App's user ID (⚠️ this is different than App ID!) and name to form an email like: `149130343+josh-issueops-bot[bot]@users.noreply.github.com`. You can find the App's user ID number by calling: `gh api '/users/josh-issueops-bot[bot]' --jq .id` - `overwrite`: use `false` or `true` on whether it should overwrite the existing `dependabot.yml` file -## copy-discussions.js - -Copy GitHub Discussions between repositories, including categories, labels, comments, and replies. This script can copy discussions across different GitHub instances and enterprises. - -The script is expecting: - -- environment variables `SOURCE_TOKEN` and `TARGET_TOKEN` with GitHub PATs that have `repo` and `read:discussion` scopes -- dependencies installed via `npm i octokit` - -Script usage: - -```bash -export SOURCE_TOKEN=ghp_abc -export TARGET_TOKEN=ghp_xyz -npm i octokit -node ./copy-discussions.js source-org source-repo target-org target-repo -``` - -Features: - -- Automatically creates missing discussion categories in the target repository -- Creates labels in the target repository if they don't exist -- Copies all comments and threaded replies with proper attribution -- Copies poll results as static snapshots (with table and optional Mermaid chart) -- Handles rate limiting with exponential backoff -- Provides colored console output for better visibility - -Configuration: - -- Set `INCLUDE_POLL_MERMAID_CHART = false` at the top of the script to disable Mermaid pie charts for polls - -Notes: - -- If a category doesn't exist in the target repository, discussions will be created in the "General" category -- The script preserves discussion metadata by adding attribution text to the body and comments -- Poll results are copied as static snapshots - voting is not available in copied discussions -- Both source and target repositories must have GitHub Discussions enabled - -## delete-branch-protection-rules.ps1 +## ado-workitems-to-github-issues.ps1 Migrate work items from Azure DevOps to GitHub issues - this just links out to a [separate repo](https://github.com/joshjohanning/ado_workitems_to_github_issues) From eb3f21ac608e520f4a6e2009a1308c8e35e21df7 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 13:19:19 -0500 Subject: [PATCH 10/32] docs: enhance README for copy-discussions.js with usage instructions and features --- scripts/README.md | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/scripts/README.md b/scripts/README.md index e3c3673..1279dca 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -36,6 +36,44 @@ Configuration values to change in the script: Migrate work items from Azure DevOps to GitHub issues - this just links out to a [separate repo](https://github.com/joshjohanning/ado_workitems_to_github_issues) +## copy-discussions.js + +Copy GitHub Discussions between repositories, including categories, labels, comments, and replies. This script can copy discussions across different GitHub instances and enterprises. + +The script is expecting: + +- environment variables `SOURCE_TOKEN` and `TARGET_TOKEN` with GitHub PATs that have `repo` and `read:discussion` scopes +- dependencies installed via `npm i octokit` + +Script usage: + +```bash +export SOURCE_TOKEN=ghp_abc +export TARGET_TOKEN=ghp_xyz +npm i octokit +node ./copy-discussions.js source-org source-repo target-org target-repo +``` + +Features: + +- Automatically creates missing discussion categories in the target repository +- Creates labels in the target repository if they don't exist +- Copies all comments and threaded replies with proper attribution +- Copies poll results as static snapshots (with table and optional Mermaid chart) +- Handles rate limiting with exponential backoff +- Provides colored console output for better visibility + +Configuration: + +- Set `INCLUDE_POLL_MERMAID_CHART = false` at the top of the script to disable Mermaid pie charts for polls + +Notes: + +- If a category doesn't exist in the target repository, discussions will be created in the "General" category +- The script preserves discussion metadata by adding attribution text to the body and comments +- Poll results are copied as static snapshots - voting is not available in copied discussions +- Both source and target repositories must have GitHub Discussions enabled + ## delete-branch-protection-rules.ps1 Delete branch protection rules programmatically based on a pattern. From ae3e30b2f3b7566b16513639279e134e4ee50dc9 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 13:25:09 -0500 Subject: [PATCH 11/32] feat: enhance discussion copying by preserving reactions, locked status, and pinned indicators --- scripts/README.md | 6 ++ scripts/copy-discussions.js | 150 +++++++++++++++++++++++++++++++++--- 2 files changed, 145 insertions(+), 11 deletions(-) diff --git a/scripts/README.md b/scripts/README.md index 1279dca..d6b0910 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -60,6 +60,9 @@ Features: - Creates labels in the target repository if they don't exist - Copies all comments and threaded replies with proper attribution - Copies poll results as static snapshots (with table and optional Mermaid chart) +- Preserves reaction counts on discussions, comments, and replies +- Maintains locked status of discussions +- Indicates pinned discussions with a visual indicator - Handles rate limiting with exponential backoff - Provides colored console output for better visibility @@ -72,6 +75,9 @@ Notes: - If a category doesn't exist in the target repository, discussions will be created in the "General" category - The script preserves discussion metadata by adding attribution text to the body and comments - Poll results are copied as static snapshots - voting is not available in copied discussions +- Reactions are copied as read-only summaries (users cannot add new reactions) +- Locked discussions will be locked in the target repository +- Pinned status is indicated in the discussion body (GitHub API doesn't allow pinning via GraphQL) - Both source and target repositories must have GitHub Discussions enabled ## delete-branch-protection-rules.ps1 diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 9a7b8d5..423cae9 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -139,6 +139,33 @@ function formatPollData(poll) { return pollMarkdown; } +function formatReactions(reactionGroups) { + if (!reactionGroups || reactionGroups.length === 0) { + return ''; + } + + const reactionMap = { + 'THUMBS_UP': '👍', + 'THUMBS_DOWN': '👎', + 'LAUGH': '😄', + 'HOORAY': '🎉', + 'CONFUSED': '😕', + 'HEART': '❤️', + 'ROCKET': '🚀', + 'EYES': '👀' + }; + + const formattedReactions = reactionGroups + .filter(group => group.users.totalCount > 0) + .map(group => { + const emoji = reactionMap[group.content] || group.content; + return `${emoji} ${group.users.totalCount}`; + }) + .join(' | '); + + return formattedReactions ? `\n\n**Reactions:** ${formattedReactions}` : ''; +} + // GraphQL Queries and Mutations const CHECK_DISCUSSIONS_ENABLED_QUERY = ` query($owner: String!, $repo: String!) { @@ -229,6 +256,19 @@ const FETCH_DISCUSSIONS_QUERY = ` } } } + reactionGroups { + content + users { + totalCount + } + } + } + } + pinnedDiscussions(first: 100) { + nodes { + discussion { + id + } } } } @@ -248,6 +288,12 @@ const FETCH_DISCUSSION_COMMENTS_QUERY = ` } createdAt upvoteCount + reactionGroups { + content + users { + totalCount + } + } replies(first: 50) { nodes { id @@ -257,6 +303,12 @@ const FETCH_DISCUSSION_COMMENTS_QUERY = ` } createdAt upvoteCount + reactionGroups { + content + users { + totalCount + } + } } } } @@ -362,6 +414,18 @@ const CLOSE_DISCUSSION_MUTATION = ` } `; +const LOCK_DISCUSSION_MUTATION = ` + mutation($discussionId: ID!) { + lockLockable(input: { + lockableId: $discussionId + }) { + lockedRecord { + locked + } + } + } +`; + const MARK_DISCUSSION_COMMENT_AS_ANSWER_MUTATION = ` mutation($commentId: ID!) { markDiscussionCommentAsAnswer(input: { @@ -552,9 +616,20 @@ async function addLabelsToDiscussion(octokit, discussionId, labelIds) { } } -async function createDiscussion(octokit, repositoryId, categoryId, title, body, sourceUrl, sourceAuthor, sourceCreated, poll = null) { +async function createDiscussion(octokit, repositoryId, categoryId, title, body, sourceUrl, sourceAuthor, sourceCreated, poll = null, locked = false, isPinned = false, reactionGroups = []) { let enhancedBody = body; + // Add pinned indicator if discussion was pinned + if (isPinned) { + enhancedBody = `📌 _This discussion was pinned in the source repository_\n\n${enhancedBody}`; + } + + // Add reactions if present + const reactionsMarkdown = formatReactions(reactionGroups); + if (reactionsMarkdown) { + enhancedBody += reactionsMarkdown; + } + // Add poll data if present if (poll) { const pollMarkdown = formatPollData(poll); @@ -562,7 +637,7 @@ async function createDiscussion(octokit, repositoryId, categoryId, title, body, } // Add metadata - enhancedBody += `\n\n---\n
\nOriginal discussion metadata\n\n_Original discussion by @${sourceAuthor} on ${sourceCreated}_\n_Source: ${sourceUrl}_\n
`; + enhancedBody += `\n\n---\n
\nOriginal discussion metadata\n\n_Original discussion by @${sourceAuthor} on ${sourceCreated}_\n_Source: ${sourceUrl}_\n${locked ? '\n_🔒 This discussion was locked in the source repository_' : ''}\n
`; log(`Creating discussion: '${title}'`); @@ -576,13 +651,35 @@ async function createDiscussion(octokit, repositoryId, categoryId, title, body, body: enhancedBody }); - return response.createDiscussion.discussion; + const newDiscussion = response.createDiscussion.discussion; + + // Lock the discussion if it was locked in the source + if (locked) { + await lockDiscussion(octokit, newDiscussion.id); + } + + return newDiscussion; } catch (err) { error(`Failed to create discussion: ${err.message}`); throw err; } } +async function lockDiscussion(octokit, discussionId) { + log(`Locking discussion ${discussionId}...`); + + await rateLimitSleep(2); + + try { + await octokit.graphql(LOCK_DISCUSSION_MUTATION, { + discussionId + }); + log(`Discussion locked successfully`); + } catch (err) { + error(`Failed to lock discussion: ${err.message}`); + } +} + async function fetchDiscussionComments(octokit, discussionId) { log(`Fetching comments for discussion ${discussionId}...`); @@ -600,8 +697,16 @@ async function fetchDiscussionComments(octokit, discussionId) { } } -async function addDiscussionComment(octokit, discussionId, body, originalAuthor, originalCreated) { - const enhancedBody = `${body}\n\n---\n
\nOriginal comment metadata\n\n_Original comment by @${originalAuthor} on ${originalCreated}_\n
`; +async function addDiscussionComment(octokit, discussionId, body, originalAuthor, originalCreated, reactionGroups = []) { + let enhancedBody = body; + + // Add reactions if present + const reactionsMarkdown = formatReactions(reactionGroups); + if (reactionsMarkdown) { + enhancedBody += reactionsMarkdown; + } + + enhancedBody += `\n\n---\n
\nOriginal comment metadata\n\n_Original comment by @${originalAuthor} on ${originalCreated}_\n
`; log("Adding comment to discussion"); @@ -622,8 +727,16 @@ async function addDiscussionComment(octokit, discussionId, body, originalAuthor, } } -async function addDiscussionCommentReply(octokit, discussionId, replyToId, body, originalAuthor, originalCreated) { - const enhancedBody = `${body}\n\n---\n_Original reply by @${originalAuthor} on ${originalCreated}_`; +async function addDiscussionCommentReply(octokit, discussionId, replyToId, body, originalAuthor, originalCreated, reactionGroups = []) { + let enhancedBody = body; + + // Add reactions if present + const reactionsMarkdown = formatReactions(reactionGroups); + if (reactionsMarkdown) { + enhancedBody += reactionsMarkdown; + } + + enhancedBody += `\n\n---\n_Original reply by @${originalAuthor} on ${originalCreated}_`; log(`Adding reply to comment ${replyToId}`); @@ -707,7 +820,8 @@ async function copyDiscussionComments(octokit, discussionId, comments, answerCom discussionId, comment.body, author, - createdAt + createdAt, + comment.reactionGroups || [] ); if (newCommentId) { @@ -735,7 +849,8 @@ async function copyDiscussionComments(octokit, discussionId, comments, answerCom newCommentId, reply.body, replyAuthor, - replyCreated + replyCreated, + reply.reactionGroups || [] ); } } @@ -800,16 +915,29 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, discussion.url, discussion.author?.login || "unknown", discussion.createdAt, - discussion.poll || null + discussion.poll || null, + discussion.locked || false, + discussion.isPinned || false, + discussion.reactionGroups || [] ); createdDiscussions++; log(`✓ Created discussion #${discussion.number}: '${discussion.title}'`); - // Log poll info if present + // Log additional metadata info if (discussion.poll && discussion.poll.options?.nodes?.length > 0) { log(` ℹ️ Poll included with ${discussion.poll.options.nodes.length} options (${discussion.poll.totalVoteCount} total votes)`); } + if (discussion.locked) { + log(` 🔒 Discussion was locked in source and has been locked in target`); + } + if (discussion.isPinned) { + log(` 📌 Discussion was pinned in source (indicator added to body)`); + } + const totalReactions = discussion.reactionGroups?.reduce((sum, group) => sum + (group.users.totalCount || 0), 0) || 0; + if (totalReactions > 0) { + log(` ❤️ ${totalReactions} reaction${totalReactions !== 1 ? 's' : ''} copied`); + } // Process labels if (discussion.labels.nodes.length > 0) { From 118b323e3ea9a7a37cb7149137c61d663b42b15e Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 13:28:53 -0500 Subject: [PATCH 12/32] Revert "fix: ensure linting commands do not fail the workflow by adding '|| true'" This reverts commit 2df58c94df1f6fea378e21f830376f326559a3a1. --- .github/workflows/lint-readme.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/lint-readme.yml b/.github/workflows/lint-readme.yml index 0519521..e2f3f9f 100644 --- a/.github/workflows/lint-readme.yml +++ b/.github/workflows/lint-readme.yml @@ -23,7 +23,7 @@ jobs: id: lint run: | set -o pipefail - node ./.github/scripts/lint-readme.js | tee gh-cli-readme-lint-results.txt || true + node ./.github/scripts/lint-readme.js | tee gh-cli-readme-lint-results.txt - name: Upload lint results if: steps.lint.outcome == 'failure' || steps.lint.outcome == 'success' @@ -46,7 +46,7 @@ jobs: id: lint run: | set -o pipefail - node ./.github/scripts/lint-readme.js ./scripts '##' '# scripts' | tee scripts-readme-lint-results.txt || true + node ./.github/scripts/lint-readme.js ./scripts '##' '# scripts' | tee scripts-readme-lint-results.txt - name: Upload lint results if: steps.lint.outcome == 'failure' || steps.lint.outcome == 'success' From afa4f9c87e28c23b745efe9e35682c7b0836559e Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 13:52:22 -0500 Subject: [PATCH 13/32] fix: correct formatting of metadata sections in discussions and comments also noting pinned discussions --- scripts/copy-discussions.js | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 423cae9..6c827b1 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -637,7 +637,7 @@ async function createDiscussion(octokit, repositoryId, categoryId, title, body, } // Add metadata - enhancedBody += `\n\n---\n
\nOriginal discussion metadata\n\n_Original discussion by @${sourceAuthor} on ${sourceCreated}_\n_Source: ${sourceUrl}_\n${locked ? '\n_🔒 This discussion was locked in the source repository_' : ''}\n
`; + enhancedBody += `\n---\n
\nOriginal discussion metadata\n\n_Original discussion by @${sourceAuthor} on ${sourceCreated}_\n_Source: ${sourceUrl}_\n${locked ? '\n_🔒 This discussion was locked in the source repository_' : ''}\n
`; log(`Creating discussion: '${title}'`); @@ -706,7 +706,7 @@ async function addDiscussionComment(octokit, discussionId, body, originalAuthor, enhancedBody += reactionsMarkdown; } - enhancedBody += `\n\n---\n
\nOriginal comment metadata\n\n_Original comment by @${originalAuthor} on ${originalCreated}_\n
`; + enhancedBody += `\n---\n
\nOriginal comment metadata\n\n_Original comment by @${originalAuthor} on ${originalCreated}_\n
`; log("Adding comment to discussion"); @@ -736,7 +736,7 @@ async function addDiscussionCommentReply(octokit, discussionId, replyToId, body, enhancedBody += reactionsMarkdown; } - enhancedBody += `\n\n---\n_Original reply by @${originalAuthor} on ${originalCreated}_`; + enhancedBody += `\n---\n_Original reply by @${originalAuthor} on ${originalCreated}_`; log(`Adding reply to comment ${replyToId}`); @@ -883,6 +883,10 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, const discussions = response.repository.discussions.nodes; const pageInfo = response.repository.discussions.pageInfo; + const pinnedDiscussions = response.repository.pinnedDiscussions.nodes || []; + + // Create a set of pinned discussion IDs for quick lookup + const pinnedDiscussionIds = new Set(pinnedDiscussions.map(p => p.discussion.id)); log(`Found ${discussions.length} discussions to process on this page`); @@ -904,6 +908,9 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, continue; } + // Check if discussion is pinned + const isPinned = pinnedDiscussionIds.has(discussion.id); + // Create discussion try { const newDiscussion = await createDiscussion( @@ -917,7 +924,7 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, discussion.createdAt, discussion.poll || null, discussion.locked || false, - discussion.isPinned || false, + isPinned, discussion.reactionGroups || [] ); @@ -931,7 +938,7 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, if (discussion.locked) { log(` 🔒 Discussion was locked in source and has been locked in target`); } - if (discussion.isPinned) { + if (isPinned) { log(` 📌 Discussion was pinned in source (indicator added to body)`); } const totalReactions = discussion.reactionGroups?.reduce((sum, group) => sum + (group.users.totalCount || 0), 0) || 0; From e0662b6c2340babefbbfdc7808de2470488ceec3 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 13:59:45 -0500 Subject: [PATCH 14/32] fix: update script documentation to clarify copied discussion elements --- scripts/copy-discussions.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 6c827b1..3d92e03 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -17,8 +17,8 @@ // - Both tokens must have the 'repo' scope // - Dependencies installed via `npm i octokit` // -// Note: This script copies discussion content, comments, replies, and basic metadata. -// Reactions and other advanced interactions are not copied. +// Note: This script copies discussion content, comments, replies, polls, reactions, locked status, +// and pinned status. Reactions are copied as read-only summaries. // Attachments (images and files) will not copy over - they need manual handling. // Configuration From 02cd02462af7c2f313e822d346ccd91a43ee8349 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 15:04:33 -0500 Subject: [PATCH 15/32] feat: add copy-discussions.sh script for transferring discussions between repositories --- gh-cli/README.md | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/gh-cli/README.md b/gh-cli/README.md index 13b1e83..e99b85c 100644 --- a/gh-cli/README.md +++ b/gh-cli/README.md @@ -276,6 +276,26 @@ Usage: ./check-repository-admin.sh ``` +### copy-discussions.sh + +Copies discussions from one repository to another repository using GraphQL API. Supports cross-enterprise copying with different authentication tokens. + +Usage: + +```shell +./copy-discussions.sh +``` + +Prerequisites: + +- `SOURCE_TOKEN` environment variable with read access to source repository discussions +- `TARGET_TOKEN` environment variable with write access to target repository discussions +- Both tokens must have the `public_repo` or `repo` scope +- GitHub CLI (gh) must be installed + +> [!NOTE] +> This script copies discussion content, comments, replies, and basic metadata. Reactions are not copied. The script will attempt to match discussion categories by name or slug between repositories. Attachments (images and files) will not copy over and require manual handling. + ### copy-organization-members.sh Copy organization members from one organization to the other, the member will **retain** the source role (owner or member), member cannot be demoted, if they already exist at the target with an owner role they cannot be demoted to member. From ac7e53caadb6c6dc8801241b1eb927219c6f036b Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 15:04:48 -0500 Subject: [PATCH 16/32] Revert "feat: add copy-discussions.sh script for transferring discussions between repositories" This reverts commit 02cd02462af7c2f313e822d346ccd91a43ee8349. --- gh-cli/README.md | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/gh-cli/README.md b/gh-cli/README.md index e99b85c..13b1e83 100644 --- a/gh-cli/README.md +++ b/gh-cli/README.md @@ -276,26 +276,6 @@ Usage: ./check-repository-admin.sh ``` -### copy-discussions.sh - -Copies discussions from one repository to another repository using GraphQL API. Supports cross-enterprise copying with different authentication tokens. - -Usage: - -```shell -./copy-discussions.sh -``` - -Prerequisites: - -- `SOURCE_TOKEN` environment variable with read access to source repository discussions -- `TARGET_TOKEN` environment variable with write access to target repository discussions -- Both tokens must have the `public_repo` or `repo` scope -- GitHub CLI (gh) must be installed - -> [!NOTE] -> This script copies discussion content, comments, replies, and basic metadata. Reactions are not copied. The script will attempt to match discussion categories by name or slug between repositories. Attachments (images and files) will not copy over and require manual handling. - ### copy-organization-members.sh Copy organization members from one organization to the other, the member will **retain** the source role (owner or member), member cannot be demoted, if they already exist at the target with an owner role they cannot be demoted to member. From fbf843fd3ae1e9565efab2435d309da972576c0a Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 15:05:19 -0500 Subject: [PATCH 17/32] refactor: remove copy-discussions.sh script for transferring discussions between repositories use the ./scripts/copy-discussions.js instead --- gh-cli/copy-discussions.sh | 1369 ------------------------------------ 1 file changed, 1369 deletions(-) delete mode 100755 gh-cli/copy-discussions.sh diff --git a/gh-cli/copy-discussions.sh b/gh-cli/copy-discussions.sh deleted file mode 100755 index bc3fd9b..0000000 --- a/gh-cli/copy-discussions.sh +++ /dev/null @@ -1,1369 +0,0 @@ -#!/bin/bash - -# Copy Discussions between repositories in different enterprises -# This script copies discussions from a source repository to a target repository -# using different GitHub tokens for authentication to support cross-enterprise copying -# -# Usage: ./copy-discussions.sh -# Example: ./copy-discussions.sh source-org repo1 target-org repo2 -# -# Prerequisites: -# - SOURCE_TOKEN environment variable with read access to source repository discussions -# - TARGET_TOKEN environment variable with write access to target repository discussions -# - Both tokens must have the 'public_repo' or 'repo' scope -# - GitHub CLI (gh) must be installed -# -# Note: This script copies discussion content, comments, replies, and basic metadata. -# Reactions and other advanced interactions are not copied. -# Attachments (images and files) will not copy over - they need manual handling. - -# TODO: Polls don't copy options -# TODO: mark as answers? -# TODO: copy closed discussions and mark as closed in target? - -set -e - -# Color codes for output -RED='\033[0;31m' -GREEN='\033[0;32m' -YELLOW='\033[1;33m' -NC='\033[0m' # No Color - -# Function to display usage -usage() { - echo "Usage: $0 " - echo "" - echo "Copy discussions from source repository to target repository" - echo "" - echo "Required environment variables:" - echo " SOURCE_TOKEN - GitHub token with read access to source repository" - echo " TARGET_TOKEN - GitHub token with write access to target repository" - echo "" - echo "Example:" - echo " $0 source-org repo1 target-org repo2" - exit 1 -} - -# Function to log messages -log() { - echo -e "${GREEN}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" >&2 -} - -# Function to log warnings -warn() { - echo -e "${YELLOW}[$(date +'%Y-%m-%d %H:%M:%S')] WARNING:${NC} $1" >&2 -} - -# Function to log errors -error() { - echo -e "${RED}[$(date +'%Y-%m-%d %H:%M:%S')] ERROR:${NC} $1" >&2 -} - -# Function to handle rate limiting -rate_limit_sleep() { - local seconds=${1:-2} - log "Waiting ${seconds}s to avoid rate limiting..." - sleep "$seconds" -} - -# Function to handle rate limit errors with exponential backoff -handle_rate_limit_error() { - local response="$1" - local attempt=${2:-1} - - if echo "$response" | grep -q "exceeded a secondary rate limit\|rate limit"; then - local wait_time=$((attempt * 60)) # Start with 1 minute, then 2, 3, etc. - warn "Hit rate limit! Waiting ${wait_time} seconds before retrying (attempt $attempt)" - sleep "$wait_time" - return 0 # Indicates we should retry - fi - - return 1 # Not a rate limit error -} - -# Function to check if a command exists -check_command() { - if ! command -v "$1" &> /dev/null; then - error "$1 is required but not installed. Please install $1 and try again." - exit 1 - fi -} - -# Check for required dependencies -log "Checking for required dependencies..." -check_command "gh" -check_command "jq" -log "✓ All required dependencies are installed" - -# Validate input parameters -if [ $# -ne 4 ]; then - usage -fi - -SOURCE_ORG="$1" -SOURCE_REPO="$2" -TARGET_ORG="$3" -TARGET_REPO="$4" - -# Initialize tracking variables -missing_categories=() - -# Validate required environment variables -if [ -z "$SOURCE_TOKEN" ]; then - error "SOURCE_TOKEN environment variable is required" - exit 1 -fi - -if [ -z "$TARGET_TOKEN" ]; then - error "TARGET_TOKEN environment variable is required" - exit 1 -fi - -log "Starting discussion copy process..." -log "Source: $SOURCE_ORG/$SOURCE_REPO" -log "Target: $TARGET_ORG/$TARGET_REPO" -log "" -log "⚡ This script uses conservative rate limiting to avoid GitHub API limits" -log " If you encounter rate limit errors, the script will automatically retry" -log "" - -# GraphQL query to fetch discussions from source repository -fetch_discussions_query=' -query($owner: String!, $name: String!, $cursor: String) { - repository(owner: $owner, name: $name) { - discussions(first: 100, after: $cursor, orderBy: {field: CREATED_AT, direction: ASC}) { - pageInfo { - hasNextPage - endCursor - } - nodes { - id - title - body - category { - id - name - slug - description - emoji - } - labels(first: 100) { - nodes { - id - name - color - description - } - } - author { - login - } - createdAt - closed - locked - upvoteCount - url - number - - } - } - } -}' - -# GraphQL query to fetch discussion categories from target repository -fetch_categories_query=' -query($owner: String!, $name: String!) { - repository(owner: $owner, name: $name) { - discussionCategories(first: 100) { - nodes { - id - name - slug - emoji - description - } - } - } -}' - -# GraphQL query to check if discussions are enabled -check_discussions_enabled_query=' -query($owner: String!, $name: String!) { - repository(owner: $owner, name: $name) { - hasDiscussionsEnabled - discussionCategories(first: 1) { - nodes { - id - } - } - } -}' - -# GraphQL query to fetch comments for a specific discussion -fetch_discussion_comments_query=' -query($discussionId: ID!) { - node(id: $discussionId) { - ... on Discussion { - comments(first: 100) { - nodes { - id - body - author { - login - } - createdAt - upvoteCount - replies(first: 50) { - nodes { - id - body - author { - login - } - createdAt - upvoteCount - } - } - } - } - } - } -}' - -# GraphQL query to fetch labels from target repository -fetch_labels_query=' -query($owner: String!, $name: String!) { - repository(owner: $owner, name: $name) { - labels(first: 100) { - nodes { - id - name - color - description - } - } - } -}' - -# GraphQL mutation to create label in target repository -create_label_mutation=' -mutation($repositoryId: ID!, $name: String!, $color: String!, $description: String) { - createLabel(input: { - repositoryId: $repositoryId, - name: $name, - color: $color, - description: $description - }) { - label { - id - name - } - } -}' - -# GraphQL mutation to create discussion in target repository -create_discussion_mutation=' -mutation($repositoryId: ID!, $categoryId: ID!, $title: String!, $body: String!) { - createDiscussion(input: { - repositoryId: $repositoryId, - categoryId: $categoryId, - title: $title, - body: $body - }) { - clientMutationId - discussion { - id - title - url - number - } - } -}' - -# GraphQL mutation to add labels to discussion -add_labels_to_discussion_mutation=' -mutation($labelableId: ID!, $labelIds: [ID!]!) { - addLabelsToLabelable(input: { - labelableId: $labelableId, - labelIds: $labelIds - }) { - labelable { - labels(first: 100) { - nodes { - name - } - } - } - } -}' - -# GraphQL mutation to add comment to discussion -add_discussion_comment_mutation=' -mutation($discussionId: ID!, $body: String!) { - addDiscussionComment(input: { - discussionId: $discussionId, - body: $body - }) { - comment { - id - body - createdAt - } - } -}' - -# GraphQL mutation to add reply to discussion comment -add_discussion_comment_reply_mutation=' -mutation($discussionId: ID!, $replyToId: ID!, $body: String!) { - addDiscussionComment(input: { - discussionId: $discussionId, - replyToId: $replyToId, - body: $body - }) { - comment { - id - body - createdAt - } - } -}' - -# Function to get repository ID -get_repository_id() { - local org=$1 - local repo=$2 - local token=$3 - - local query=' - query($owner: String!, $name: String!) { - repository(owner: $owner, name: $name) { - id - } - }' - - GH_TOKEN="$token" gh api graphql \ - -f query="$query" \ - -f owner="$org" \ - -f name="$repo" \ - --jq '.data.repository.id' -} - -# Function to fetch discussion categories from target repository -# Function to check if discussions are enabled in target repository -check_discussions_enabled() { - log "Checking if discussions are enabled in target repository..." - - rate_limit_sleep 4 - - local response - response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ - -f query="$check_discussions_enabled_query" \ - -f owner="$TARGET_ORG" \ - -f name="$TARGET_REPO" 2>&1) - - if [ $? -ne 0 ]; then - error "Failed to check discussions status: $response" - return 1 - fi - - local has_discussions_enabled - has_discussions_enabled=$(echo "$response" | jq -r '.data.repository.hasDiscussionsEnabled // false') - - if [ "$has_discussions_enabled" != "true" ]; then - error "Discussions are not enabled in the target repository: $TARGET_ORG/$TARGET_REPO" - error "Please enable discussions in the repository settings before running this script." - return 1 - fi - - log "✓ Discussions are enabled in target repository" - return 0 -} - -# Function to fetch available categories from target repository -fetch_target_categories() { - log "Fetching available categories from target repository..." - - rate_limit_sleep 4 - - local response - response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ - -f query="$fetch_categories_query" \ - -f owner="$TARGET_ORG" \ - -f name="$TARGET_REPO" 2>&1) - - if [ $? -ne 0 ]; then - error "Failed to fetch categories: $response" - return 1 - fi - - # Check for GraphQL errors - if echo "$response" | jq -e '.errors // empty' > /dev/null 2>&1; then - error "GraphQL error in fetch categories: $(echo "$response" | jq -r '.errors[0].message // "Unknown error"')" - return 1 - fi - - target_categories=$(echo "$response" | jq -c '.data.repository.discussionCategories.nodes[]?' 2>/dev/null) - - if [ -z "$target_categories" ]; then - warn "No discussion categories found in target repository" - else - local category_count - category_count=$(echo "$target_categories" | wc -l | tr -d ' ') - log "Found $category_count categories in target repository" - fi -} - -# Function to find matching category ID by name or slug -find_category_id() { - local source_category_name="$1" - local source_category_slug="$2" - - echo "$target_categories" | jq -r --arg name "$source_category_name" --arg slug "$source_category_slug" ' - select(.name == $name or .slug == $slug) | .id - ' | head -1 -} - -# Function to create discussion category if it doesn't exist -create_or_get_category_id() { - local category_name="$1" - local category_slug="$2" - local category_description="$3" - local category_emoji="$4" - - # First try to find existing category - - # Validate target_categories JSON - if ! echo "$target_categories" | jq . > /dev/null 2>&1; then - error "target_categories contains invalid JSON:" - error "$target_categories" - return 1 - fi - - local existing_id - existing_id=$(echo "$target_categories" | jq -r --arg name "$category_name" --arg slug "$category_slug" ' - select(.name == $name or .slug == $slug) | .id - ' | head -1) - - if [ -n "$existing_id" ] && [ "$existing_id" != "null" ]; then - echo "$existing_id" - return 0 - fi - - # Category doesn't exist - GitHub doesn't support creating discussion categories via API - warn "Category '$category_name' ($category_slug) not found in target repository" - - # Track missing category for summary - local found=false - for existing_cat in "${missing_categories[@]}"; do - if [ "$existing_cat" = "$category_name" ]; then - found=true - break - fi - done - if [ "$found" = false ]; then - missing_categories+=("$category_name") - fi - - # Try to find "General" category as fallback - local general_id - general_id=$(echo "$target_categories" | jq -r ' - select(.name == "General" or .slug == "general") | .id - ' | head -1) - - if [ -n "$general_id" ] && [ "$general_id" != "null" ]; then - warn "Using 'General' category as fallback for '$category_name'" - echo "$general_id" - return 0 - fi - - # If no General category, use the first available category - local first_category_id - first_category_id=$(echo "$target_categories" | jq -r '.id' | head -1) - - if [ -n "$first_category_id" ] && [ "$first_category_id" != "null" ]; then - local first_category_name - first_category_name=$(echo "$target_categories" | jq -r '.name' | head -1) - warn "Using '$first_category_name' category as fallback for '$category_name'" - echo "$first_category_id" - return 0 - fi - - error "No available categories found in target repository to use as fallback" - return 1 -} - -# Function to fetch labels from target repository -fetch_target_labels() { - log "Fetching labels from target repository..." - - local max_retries=3 - local attempt=1 - - while [ $attempt -le $max_retries ]; do - rate_limit_sleep 3 # Increased default wait time - - local response - response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ - -f query="$fetch_labels_query" \ - -f owner="$TARGET_ORG" \ - -f name="$TARGET_REPO" 2>&1) - - local exit_code=$? - - if [ $exit_code -eq 0 ]; then - # Success, process the response - break - else - # Check if it's a rate limit error - if handle_rate_limit_error "$response" "$attempt"; then - attempt=$((attempt + 1)) - log "Retrying labels fetch (attempt $attempt/$max_retries)..." - continue - else - error "Failed to fetch labels: $response" - return 1 - fi - fi - done - - if [ $attempt -gt $max_retries ]; then - error "Failed to fetch labels after $max_retries attempts due to rate limiting" - return 1 - fi - - # Check if response is valid JSON - if ! echo "$response" | jq . > /dev/null 2>&1; then - error "Invalid JSON response from labels API: $response" - return 1 - fi - - # Check for GraphQL errors - if echo "$response" | jq -e '.errors // empty' > /dev/null 2>&1; then - error "GraphQL error in fetch labels: $(echo "$response" | jq -r '.errors[0].message // "Unknown error"')" - return 1 - fi - - echo "$response" | jq -c '.data.repository.labels.nodes[]?' 2>/dev/null -} - -# Function to fetch comments for a specific discussion -fetch_discussion_comments() { - local discussion_id="$1" - - log "Fetching comments for discussion $discussion_id..." - - rate_limit_sleep 2 - - local response - response=$(GH_TOKEN="$SOURCE_TOKEN" gh api graphql \ - -f query="$fetch_discussion_comments_query" \ - -f discussionId="$discussion_id" 2>&1) - - if [ $? -ne 0 ]; then - error "Failed to fetch comments for discussion $discussion_id: $response" - return 1 - fi - - # Check for GraphQL errors - if echo "$response" | jq -e '.errors // empty' > /dev/null 2>&1; then - error "GraphQL error in fetch comments: $(echo "$response" | jq -r '.errors[0].message // "Unknown error"')" - return 1 - fi - - # Extract comments - local comments - comments=$(echo "$response" | jq -c '.data.node.comments.nodes // []' 2>/dev/null) - - if [ -z "$comments" ]; then - log "No comments found for discussion" - echo "[]" - else - echo "$comments" - fi -} - -# Function to find matching label ID by name -find_label_id() { - local label_name="$1" - - echo "$target_labels" | jq -r --arg name "$label_name" ' - select(.name == $name) | .id - ' | head -1 -} - -# Function to create label if it doesn't exist -create_or_get_label_id() { - local label_name="$1" - local label_color="$2" - local label_description="$3" - - # First try to find existing label - local existing_id - existing_id=$(find_label_id "$label_name") - - if [ -n "$existing_id" ] && [ "$existing_id" != "null" ]; then - echo "$existing_id" - return 0 - fi - - # Label doesn't exist, create it - log "Creating new label: '$label_name'" - - rate_limit_sleep 3 - - local response - response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ - -f query="$create_label_mutation" \ - -f repositoryId="$target_repo_id" \ - -f name="$label_name" \ - -f color="$label_color" \ - -f description="$label_description" 2>&1) - - if [ $? -eq 0 ]; then - local new_label_id - new_label_id=$(echo "$response" | jq -r '.data.createLabel.label.id') - - if [ -n "$new_label_id" ] && [ "$new_label_id" != "null" ]; then - log "✓ Created label '$label_name' with ID: $new_label_id" - - # Update our local cache of target labels (if target_labels is an array) - if echo "$target_labels" | jq -e 'type == "array"' >/dev/null 2>&1; then - target_labels=$(echo "$target_labels" | jq --arg id "$new_label_id" --arg name "$label_name" --arg color "$label_color" --arg desc "$label_description" '. + [{id: $id, name: $name, color: $color, description: $desc}]') - else - # If target_labels is not an array, convert it - target_labels=$(jq -n --arg id "$new_label_id" --arg name "$label_name" --arg color "$label_color" --arg desc "$label_description" '[{id: $id, name: $name, color: $color, description: $desc}]') - fi - - echo "$new_label_id" - return 0 - fi - fi - - error "Failed to create label '$label_name': $response" - return 1 -} - -# Function to add labels to a discussion -add_labels_to_discussion() { - local discussion_id="$1" - shift - local label_ids=("$@") - - if [ ${#label_ids[@]} -eq 0 ]; then - return 0 - fi - - # Convert array to JSON array format for GraphQL - local label_ids_json - label_ids_json=$(printf '%s\n' "${label_ids[@]}" | jq -R . | jq -s . | jq -c .) - - log "Adding ${#label_ids[@]} labels to discussion" - log "Discussion ID: $discussion_id" - log "Label IDs (compact JSON): $label_ids_json" - - rate_limit_sleep 2 - - # Construct the full GraphQL request with variables - local graphql_request - graphql_request=$(jq -n \ - --arg query "$add_labels_to_discussion_mutation" \ - --arg labelableId "$discussion_id" \ - --argjson labelIds "$label_ids_json" \ - '{ - query: $query, - variables: { - labelableId: $labelableId, - labelIds: $labelIds - } - }') - - log "GraphQL request: $graphql_request" - - local response - response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql --input - <<< "$graphql_request" 2>&1) - - local api_exit_code=$? - - log "GraphQL API exit code: $api_exit_code" - log "GraphQL API response: $response" - - if [ $api_exit_code -eq 0 ]; then - # Check if there are any errors in the response - local errors - errors=$(echo "$response" | jq -r '.errors // empty | .[] | .message' 2>/dev/null) - if [ -n "$errors" ]; then - error "GraphQL errors in response: $errors" - return 1 - fi - - log "✓ Successfully added labels to discussion" - return 0 - else - error "Failed to add labels to discussion (exit code: $api_exit_code): $response" - return 1 - fi -} - -# Function to add comment to discussion -add_discussion_comment() { - local discussion_id="$1" - local comment_body="$2" - local original_author="$3" - local original_created="$4" - - # Add metadata to comment body with collapsible section - local enhanced_body="$comment_body"$'\n\n'"---"$'\n\n'"
"$'\n'"Original comment details"$'\n\n'"**Original author:** @$original_author"$'\n'"**Created:** $original_created"$'\n\n'"
" - - log "Adding comment to discussion" - - rate_limit_sleep 2 - - local response - response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ - -f query="$add_discussion_comment_mutation" \ - -f discussionId="$discussion_id" \ - -f body="$enhanced_body" 2>&1) - - local exit_code=$? - - if [ $exit_code -eq 0 ]; then - local comment_id - comment_id=$(echo "$response" | jq -r '.data.addDiscussionComment.comment.id // empty') - - if [ -n "$comment_id" ] && [ "$comment_id" != "null" ]; then - log "✓ Added comment with ID: $comment_id" - echo "$comment_id" - return 0 - else - error "Failed to extract comment ID from response: $response" - return 1 - fi - else - error "Failed to add comment: $response" - return 1 - fi -} - -# Function to add reply to discussion comment -add_discussion_comment_reply() { - local discussion_id="$1" - local parent_comment_id="$2" - local reply_body="$3" - local original_author="$4" - local original_created="$5" - - # Add metadata to reply body with collapsible section - local enhanced_body="$reply_body"$'\n\n'"---"$'\n\n'"
"$'\n'"Original reply details"$'\n\n'"**Original author:** @$original_author"$'\n'"**Created:** $original_created"$'\n\n'"
" - - log "Adding reply to comment $parent_comment_id" - - rate_limit_sleep 2 - - local response - response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ - -f query="$add_discussion_comment_reply_mutation" \ - -f discussionId="$discussion_id" \ - -f replyToId="$parent_comment_id" \ - -f body="$enhanced_body" 2>&1) - - local exit_code=$? - - if [ $exit_code -eq 0 ]; then - local reply_id - reply_id=$(echo "$response" | jq -r '.data.addDiscussionComment.comment.id // empty') - - if [ -n "$reply_id" ] && [ "$reply_id" != "null" ]; then - log "✓ Added reply with ID: $reply_id" - echo "$reply_id" - return 0 - else - error "Failed to extract reply ID from response: $response" - return 1 - fi - else - error "Failed to add reply: $response" - return 1 - fi -} - -# Function to copy discussion comments -copy_discussion_comments() { - local discussion_id="$1" - local comments_json="$2" - - if [ -z "$comments_json" ] || [ "$comments_json" = "null" ]; then - log "No comments to copy for this discussion" - return 0 - fi - - local comment_count - comment_count=$(echo "$comments_json" | jq -r 'length // 0') - - if [ "$comment_count" -eq 0 ]; then - log "No comments to copy for this discussion" - return 0 - fi - - log "Copying $comment_count comments..." - total_comments=$((total_comments + comment_count)) - - local comment_index=0 - while [ $comment_index -lt "$comment_count" ]; do - local comment - comment=$(echo "$comments_json" | jq -r ".[$comment_index]") - - if [ "$comment" != "null" ]; then - local comment_body author created_at replies - comment_body=$(echo "$comment" | jq -r '.body // ""') - author=$(echo "$comment" | jq -r '.author.login // "unknown"') - created_at=$(echo "$comment" | jq -r '.createdAt // ""') - replies=$(echo "$comment" | jq -c '.replies.nodes // []') - - if [ -n "$comment_body" ]; then - log "Copying comment by @$author" - - # Add the comment - set +e # Don't exit on error - local new_comment_id - new_comment_id=$(add_discussion_comment "$discussion_id" "$comment_body" "$author" "$created_at") - local comment_result=$? - set -e - - if [ $comment_result -eq 0 ] && [ -n "$new_comment_id" ]; then - copied_comments=$((copied_comments + 1)) - # Copy replies if any exist - local reply_count - reply_count=$(echo "$replies" | jq -r 'length // 0') - - if [ "$reply_count" -gt 0 ]; then - log "Copying $reply_count replies to comment..." - - local reply_index=0 - while [ $reply_index -lt "$reply_count" ]; do - local reply - reply=$(echo "$replies" | jq -r ".[$reply_index]") - - if [ "$reply" != "null" ]; then - local reply_body reply_author reply_created - reply_body=$(echo "$reply" | jq -r '.body // ""') - reply_author=$(echo "$reply" | jq -r '.author.login // "unknown"') - reply_created=$(echo "$reply" | jq -r '.createdAt // ""') - - if [ -n "$reply_body" ]; then - log "Copying reply by @$reply_author" - - set +e - add_discussion_comment_reply "$discussion_id" "$new_comment_id" "$reply_body" "$reply_author" "$reply_created" >/dev/null - set -e - fi - fi - - reply_index=$((reply_index + 1)) - done - fi - else - warn "Failed to copy comment by @$author, skipping replies" - fi - fi - fi - - comment_index=$((comment_index + 1)) - done - - log "✓ Finished copying comments" -} - -# Function to create discussion -create_discussion() { - local repo_id="$1" - local category_id="$2" - local title="$3" - local body="$4" - local source_url="$5" - local source_author="$6" - local source_created="$7" - - # Add metadata to body with collapsible section - local enhanced_body="$body"$'\n\n'"---"$'\n\n'"
"$'\n'"Original discussion details"$'\n\n'"**Original author:** @$source_author"$'\n'"**Created:** $source_created"$'\n'"**Source:** $source_url"$'\n\n'"
" - - log "Creating discussion: '$title'" - - rate_limit_sleep 3 - - local response - response=$(GH_TOKEN="$TARGET_TOKEN" gh api graphql \ - -f query="$create_discussion_mutation" \ - -f repositoryId="$repo_id" \ - -f categoryId="$category_id" \ - -f title="$title" \ - -f body="$enhanced_body" 2>&1) - - local exit_code=$? - - if [ $exit_code -eq 0 ]; then - echo "$response" - return 0 - else - error "Failed to create discussion: $response" - return $exit_code - fi -} - -# Get source repository ID to verify access -log "Verifying access to source repository..." -source_repo_id=$(get_repository_id "$SOURCE_ORG" "$SOURCE_REPO" "$SOURCE_TOKEN") -if [ -z "$source_repo_id" ]; then - error "Failed to get source repository ID. Check if repository exists and SOURCE_TOKEN has access." - exit 1 -fi -log "Source repository ID: $source_repo_id" - -# Check if discussions are enabled in source repository -log "Checking if discussions are enabled in source repository..." -rate_limit_sleep 2 - -source_discussions_check=$(GH_TOKEN="$SOURCE_TOKEN" gh api graphql \ - -f query="$check_discussions_enabled_query" \ - -f owner="$SOURCE_ORG" \ - -f name="$SOURCE_REPO" 2>&1) - -if [ $? -ne 0 ]; then - error "Failed to check discussions status in source repository: $source_discussions_check" - exit 1 -fi - -source_has_discussions=$(echo "$source_discussions_check" | jq -r '.data.repository.hasDiscussionsEnabled // false') -if [ "$source_has_discussions" != "true" ]; then - error "Discussions are not enabled in the source repository: $SOURCE_ORG/$SOURCE_REPO" - exit 1 -fi -log "✓ Discussions are enabled in source repository" - -# Get target repository ID -log "Getting target repository ID..." -target_repo_id=$(get_repository_id "$TARGET_ORG" "$TARGET_REPO" "$TARGET_TOKEN") -if [ -z "$target_repo_id" ]; then - error "Failed to get target repository ID. Check if repository exists and token has access." - exit 1 -fi -log "Target repository ID: $target_repo_id" - -# Check if discussions are enabled in target repository -if ! check_discussions_enabled; then - exit 1 -fi - -# Fetch target repository categories -if ! fetch_target_categories; then - exit 1 -fi - -if [ -z "$target_categories" ]; then - error "Failed to fetch discussion categories from target repository" - exit 1 -fi - -log "Available categories in target repository:" -echo "$target_categories" | jq -r '" " + .name + " (" + .slug + ")"' - -# Fetch target repository labels -target_labels=$(fetch_target_labels) -if [ $? -ne 0 ] || [ -z "$target_labels" ]; then - warn "Failed to fetch labels or no labels found in target repository" - target_labels="[]" - log "Available labels in target repository: 0 labels" -else - # Count labels properly - label_count=$(echo "$target_labels" | jq -s 'length' 2>/dev/null || echo "0") - log "Available labels in target repository: $label_count labels" -fi - -# Initialize counters -total_discussions=0 -created_discussions=0 -skipped_discussions=0 -total_comments=0 -copied_comments=0 - -# Function to process discussions page -process_discussions_page() { - local cursor="$1" - - # Build cursor parameter - local cursor_param="" - if [ -n "$cursor" ]; then - cursor_param="-f cursor=$cursor" - fi - - log "Fetching discussions page (cursor: ${cursor:-"null"})..." - - rate_limit_sleep 3 - - # Fetch discussions from source repository - log "Executing GraphQL query with parameters:" - log " owner: $SOURCE_ORG" - log " name: $SOURCE_REPO" - log " cursor: ${cursor:-"null"}" - - local response - response=$(GH_TOKEN="$SOURCE_TOKEN" gh api graphql \ - -f query="$fetch_discussions_query" \ - -f owner="$SOURCE_ORG" \ - -f name="$SOURCE_REPO" \ - $cursor_param 2>&1) - - local api_exit_code=$? - log "API call exit code: $api_exit_code" - log "Response length: ${#response} characters" - log "First 200 chars of response: ${response:0:200}" - - # Debug: Show what we got back - if ! echo "$response" | jq . > /dev/null 2>&1; then - error "Invalid JSON response from source discussions API!" - error "Full response:" - error "$response" - error "---" - error "API exit code was: $api_exit_code" - error "This could be:" - error " 1. Authentication issue with SOURCE_TOKEN" - error " 2. Repository access permissions" - error " 3. Repository doesn't exist or discussions disabled" - error " 4. Network/API connectivity issue" - error " 5. GraphQL query syntax error" - return 1 - fi - - # Check for GraphQL errors - if echo "$response" | jq -e '.errors // empty' > /dev/null 2>&1; then - error "GraphQL error in fetch discussions: $(echo "$response" | jq -r '.errors[0].message // "Unknown error"')" - return 1 - fi - - local discussions - discussions=$(echo "$response" | jq -c '.data.repository.discussions.nodes[]' 2>&1) - local jq_extract_exit_code=$? - - log "JQ extraction exit code: $jq_extract_exit_code" - log "Extracted discussions from response" - log "Discussions data length: ${#discussions} characters" - - if [ $jq_extract_exit_code -ne 0 ]; then - error "Failed to extract discussions with jq:" - error "$discussions" - return 1 - fi - - if [ -z "$discussions" ]; then - log "No discussions found on this page" - log "Checking response structure:" - echo "$response" | jq '.data.repository.discussions' 2>/dev/null || log "Failed to parse discussions structure" - return 1 - fi - - local discussion_count - discussion_count=$(echo "$discussions" | wc -l | tr -d ' ') - log "Found $discussion_count discussions to process on this page" - - # Process each discussion - local discussion_counter=0 - log "Starting to iterate through discussions..." - log "About to process discussions with while loop" - - while IFS= read -r discussion; do - discussion_counter=$((discussion_counter + 1)) - log "=== DISCUSSION $discussion_counter ===" - - if [ -z "$discussion" ]; then - log "Skipping empty discussion entry at position $discussion_counter" - continue - fi - - total_discussions=$((total_discussions + 1)) - - log "Processing discussion $discussion_counter of this page (total: $total_discussions)" - - # Show the COMPLETE JSON for debugging - log "=== COMPLETE DISCUSSION JSON ===" - printf '%s\n' "$discussion" - log "=== END COMPLETE JSON ===" - - # Debug: Show what we're trying to parse - log "Discussion data length: ${#discussion} characters" - log "Discussion data (first 200 chars): ${discussion:0:200}" - log "Discussion data (last 200 chars): ${discussion: -200}" - - # Try to identify the exact jq error - local jq_error - jq_error=$(echo "$discussion" | jq . 2>&1) - local jq_exit_code=$? - - if [ $jq_exit_code -ne 0 ]; then - error "JSON parsing failed with exit code: $jq_exit_code" - error "JQ error message: $jq_error" - error "Full discussion data:" - error "$discussion" - error "---" - error "Hexdump of first 50 bytes:" - echo "$discussion" | head -c 50 | hexdump -C - error "---" - skipped_discussions=$((skipped_discussions + 1)) - continue - fi - - log "✓ Discussion JSON is valid" - - # Extract discussion details with error handling - local title body category_name category_slug category_description category_emoji author created_at source_url number - - log "Extracting title..." - title=$(echo "$discussion" | jq -r '.title' 2>&1) - if [ $? -ne 0 ]; then - error "Failed to extract title: $title" - skipped_discussions=$((skipped_discussions + 1)) - continue - fi - log "Title: $title" - - log "Extracting body..." - body=$(echo "$discussion" | jq -r '.body // ""' 2>&1) - if [ $? -ne 0 ]; then - error "Failed to extract body: $body" - skipped_discussions=$((skipped_discussions + 1)) - continue - fi - - log "Extracting category details..." - category_name=$(echo "$discussion" | jq -r '.category.name' 2>&1) - category_slug=$(echo "$discussion" | jq -r '.category.slug' 2>&1) - category_description=$(echo "$discussion" | jq -r '.category.description // ""' 2>&1) - category_emoji=$(echo "$discussion" | jq -r '.category.emoji // ":speech_balloon:"' 2>&1) - - log "Extracting author..." - author=$(echo "$discussion" | jq -r '.author.login // "unknown"' 2>&1) - if [ $? -ne 0 ]; then - error "Failed to extract author: $author" - skipped_discussions=$((skipped_discussions + 1)) - continue - fi - - log "Extracting createdAt..." - created_at=$(echo "$discussion" | jq -r '.createdAt' 2>&1) - if [ $? -ne 0 ]; then - error "Failed to extract createdAt: $created_at" - skipped_discussions=$((skipped_discussions + 1)) - continue - fi - - log "Extracting url..." - source_url=$(echo "$discussion" | jq -r '.url' 2>&1) - if [ $? -ne 0 ]; then - error "Failed to extract url: $source_url" - skipped_discussions=$((skipped_discussions + 1)) - continue - fi - - log "Extracting number..." - number=$(echo "$discussion" | jq -r '.number' 2>&1) - if [ $? -ne 0 ]; then - error "Failed to extract number: $number" - skipped_discussions=$((skipped_discussions + 1)) - continue - fi - - # Get or create category in target repository - log "Getting/creating category: '$category_name' ($category_slug)" - local target_category_id - set +e # Temporarily disable exit on error - target_category_id=$(create_or_get_category_id "$category_name" "$category_slug" "$category_description" "$category_emoji") - local category_exit_code=$? - set -e # Re-enable exit on error - - if [ $category_exit_code -ne 0 ]; then - error "create_or_get_category_id failed with exit code: $category_exit_code" - error "Output was: $target_category_id" - skipped_discussions=$((skipped_discussions + 1)) - continue - fi - - if [ -z "$target_category_id" ] || [ "$target_category_id" == "null" ]; then - error "Failed to get or create category '$category_name' ($category_slug). Skipping discussion #$number: '$title'" - skipped_discussions=$((skipped_discussions + 1)) - continue - fi - - # Create the discussion - local new_discussion_id new_discussion_response - new_discussion_response=$(create_discussion "$target_repo_id" "$target_category_id" "$title" "$body" "$source_url" "$author" "$created_at") - - if [ $? -eq 0 ]; then - # Extract the discussion ID from the response - new_discussion_id=$(echo "$new_discussion_response" | jq -r '.data.createDiscussion.discussion.id // empty') - - if [ -n "$new_discussion_id" ]; then - created_discussions=$((created_discussions + 1)) - log "✓ Created discussion #$number: '$title'" - - # Process labels if any - local labels - labels=$(echo "$discussion" | jq -c '.labels.nodes[]?') - - if [ -n "$labels" ]; then - local label_ids=() - - # Process each label - while IFS= read -r label; do - if [ -n "$label" ]; then - local label_name label_color label_description label_id - label_name=$(echo "$label" | jq -r '.name') - label_color=$(echo "$label" | jq -r '.color') - label_description=$(echo "$label" | jq -r '.description // ""') - - # Get or create label - log "Processing label: '$label_name' (color: $label_color)" - set +e # Temporarily disable exit on error - label_id=$(create_or_get_label_id "$label_name" "$label_color" "$label_description") - local label_exit_code=$? - set -e # Re-enable exit on error - log "Label ID result: '$label_id' (exit code: $label_exit_code)" - - if [ $label_exit_code -eq 0 ] && [ -n "$label_id" ] && [ "$label_id" != "null" ]; then - label_ids+=("$label_id") - log "Added label ID to array: $label_id" - else - log "Skipping invalid label ID: '$label_id' (exit code: $label_exit_code)" - fi - fi - done <<< "$labels" - - log "Finished processing labels. Total label IDs collected: ${#label_ids[@]}" - - # Add labels to the discussion if we have any - if [ ${#label_ids[@]} -gt 0 ]; then - if add_labels_to_discussion "$new_discussion_id" "${label_ids[@]}"; then - log "Completed adding labels to discussion" - else - error "Failed to add labels to discussion, but continuing..." - fi - else - log "No valid labels to add to discussion" - fi - fi - - # Copy discussion comments (always run regardless of labels) - log "Processing comments for discussion..." - local source_discussion_id - source_discussion_id=$(echo "$discussion" | jq -r '.id') - - if [ -n "$source_discussion_id" ] && [ "$source_discussion_id" != "null" ]; then - set +e # Don't exit on error for comment fetching - local comments - comments=$(fetch_discussion_comments "$source_discussion_id") - local fetch_result=$? - set -e - - if [ $fetch_result -eq 0 ] && [ -n "$comments" ] && [ "$comments" != "null" ] && [ "$comments" != "[]" ]; then - copy_discussion_comments "$new_discussion_id" "$comments" - else - log "No comments to copy for this discussion" - fi - else - warn "Could not extract source discussion ID for comment fetching" - fi - else - warn "Discussion created but couldn't extract ID from response" - created_discussions=$((created_discussions + 1)) - fi - else - error "Failed to create discussion #$number: '$title'" - skipped_discussions=$((skipped_discussions + 1)) - fi - - log "✅ Finished processing discussion #$number: '$title'" - - # Delay between discussions to avoid rate limiting - sleep 5 - - done <<< "$discussions" - - # Check if there are more pages - local has_next_page next_cursor - has_next_page=$(echo "$response" | jq -r '.data.repository.discussions.pageInfo.hasNextPage') - next_cursor=$(echo "$response" | jq -r '.data.repository.discussions.pageInfo.endCursor') - - log "Pagination info:" - log " hasNextPage: $has_next_page" - log " endCursor: ${next_cursor:-"null"}" - - if [ "$has_next_page" = "true" ]; then - log "Processing next page with cursor: $next_cursor" - process_discussions_page "$next_cursor" - else - log "No more pages to process" - fi -} - -# Test discussions access first -log "Testing discussions access..." -rate_limit_sleep 2 - -test_discussions_query=' -query($owner: String!, $name: String!) { - repository(owner: $owner, name: $name) { - discussions(first: 1) { - totalCount - nodes { - title - } - } - } -}' - -test_response=$(GH_TOKEN="$SOURCE_TOKEN" gh api graphql \ - -f query="$test_discussions_query" \ - -f owner="$SOURCE_ORG" \ - -f name="$SOURCE_REPO" 2>&1) - -if ! echo "$test_response" | jq . > /dev/null 2>&1; then - error "Failed to test discussions access:" - error "Raw response: $test_response" - exit 1 -fi - -discussion_count=$(echo "$test_response" | jq -r '.data.repository.discussions.totalCount // 0') -log "Found $discussion_count total discussions in source repository" - -if [ "$discussion_count" -eq 0 ]; then - log "No discussions found in source repository. Nothing to copy." - exit 0 -fi - -# Start processing discussions -log "Starting to fetch and copy discussions..." -process_discussions_page "" - -# Summary -log "Discussion copy completed!" -log "Total discussions found: $total_discussions" -log "Discussions created: $created_discussions" -log "Discussions skipped: $skipped_discussions" -log "Total comments found: $total_comments" -log "Comments copied: $copied_comments" - -if [ ${#missing_categories[@]} -gt 0 ]; then - warn "The following categories were missing and need to be created manually:" - for missing_cat in "${missing_categories[@]}"; do - warn " - $missing_cat" - done - warn "" - warn "To create categories manually:" - warn "1. Go to https://github.com/$TARGET_ORG/$TARGET_REPO/discussions" - warn "2. Click 'New discussion'" - warn "3. Look for category management options" - warn "4. Create the missing categories with appropriate names and descriptions" -fi - -if [ $skipped_discussions -gt 0 ]; then - warn "Some discussions were skipped. Please check the categories in the target repository." -fi - -log "All done! ✨" \ No newline at end of file From 8335272d20f3b9fb4495b9edeaca36b26e741f49 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 15:10:36 -0500 Subject: [PATCH 18/32] fix: enhance help message for copy-discussions.js script --- scripts/copy-discussions.js | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 3d92e03..2ca9c94 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -28,10 +28,40 @@ const { Octokit } = require("octokit"); // Parse command line arguments const args = process.argv.slice(2); + +// Check for help flag +if (args.includes('--help') || args.includes('-h')) { + console.log('Copy Discussions between GitHub repositories'); + console.log(''); + console.log('Usage:'); + console.log(' node copy-discussions.js '); + console.log(''); + console.log('Arguments:'); + console.log(' source_org Source organization name'); + console.log(' source_repo Source repository name'); + console.log(' target_org Target organization name'); + console.log(' target_repo Target repository name'); + console.log(''); + console.log('Environment Variables:'); + console.log(' SOURCE_TOKEN GitHub token with read access to source repository discussions'); + console.log(' TARGET_TOKEN GitHub token with write access to target repository discussions'); + console.log(''); + console.log('Example:'); + console.log(' node copy-discussions.js source-org repo1 target-org repo2'); + console.log(''); + console.log('Note:'); + console.log(' - Both tokens must have the "repo" scope'); + console.log(' - This script copies discussion content, comments, replies, polls, reactions,'); + console.log(' locked status, and pinned status'); + console.log(' - Attachments (images and files) will not copy over and require manual handling'); + process.exit(0); +} + if (args.length !== 4) { console.error("Usage: node copy-discussions.js "); console.error("\nExample:"); console.error(" node copy-discussions.js source-org repo1 target-org repo2"); + console.error("\nFor more information, use --help"); process.exit(1); } From 5610b6b30e66ec90d89ee48f03fb0c09d24cafe5 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 15:19:07 -0500 Subject: [PATCH 19/32] fix: improve help message and clarify environment variable usage in copy-discussions.js --- scripts/README.md | 16 ++++++++++++++++ scripts/copy-discussions.js | 31 ++++++++++++++++++++++++++----- 2 files changed, 42 insertions(+), 5 deletions(-) diff --git a/scripts/README.md b/scripts/README.md index d6b0910..ba62c13 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -54,6 +54,22 @@ npm i octokit node ./copy-discussions.js source-org source-repo target-org target-repo ``` +Optional environment variables: + +- `SOURCE_API_URL` - API endpoint for source (defaults to `https://api.github.com`) +- `TARGET_API_URL` - API endpoint for target (defaults to `https://api.github.com`) + +Example with GitHub Enterprise Server: + +```bash +export SOURCE_API_URL=https://github.mycompany.com/api/v3 +export TARGET_API_URL=https://api.github.com +export SOURCE_TOKEN=ghp_abc +export TARGET_TOKEN=ghp_xyz +npm i octokit +node ./copy-discussions.js source-org source-repo target-org target-repo +``` + Features: - Automatically creates missing discussion categories in the target repository diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 2ca9c94..254ff62 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -17,6 +17,10 @@ // - Both tokens must have the 'repo' scope // - Dependencies installed via `npm i octokit` // +// Optional Environment Variables: +// - SOURCE_API_URL: API endpoint for source (defaults to https://api.github.com) +// - TARGET_API_URL: API endpoint for target (defaults to https://api.github.com) +// // Note: This script copies discussion content, comments, replies, polls, reactions, locked status, // and pinned status. Reactions are copied as read-only summaries. // Attachments (images and files) will not copy over - they need manual handling. @@ -42,13 +46,24 @@ if (args.includes('--help') || args.includes('-h')) { console.log(' target_org Target organization name'); console.log(' target_repo Target repository name'); console.log(''); - console.log('Environment Variables:'); - console.log(' SOURCE_TOKEN GitHub token with read access to source repository discussions'); - console.log(' TARGET_TOKEN GitHub token with write access to target repository discussions'); + console.log('Environment Variables (Required):'); + console.log(' SOURCE_TOKEN GitHub token with read access to source repository discussions'); + console.log(' TARGET_TOKEN GitHub token with write access to target repository discussions'); + console.log(''); + console.log('Environment Variables (Optional):'); + console.log(' SOURCE_API_URL API endpoint for source (defaults to https://api.github.com)'); + console.log(' TARGET_API_URL API endpoint for target (defaults to https://api.github.com)'); console.log(''); console.log('Example:'); console.log(' node copy-discussions.js source-org repo1 target-org repo2'); console.log(''); + console.log('Example with GHES:'); + console.log(' SOURCE_API_URL=https://github.mycompany.com/api/v3 \\'); + console.log(' TARGET_API_URL=https://api.github.com \\'); + console.log(' SOURCE_TOKEN=ghp_xxx \\'); + console.log(' TARGET_TOKEN=ghp_yyy \\'); + console.log(' node copy-discussions.js source-org repo1 target-org repo2'); + console.log(''); console.log('Note:'); console.log(' - Both tokens must have the "repo" scope'); console.log(' - This script copies discussion content, comments, replies, polls, reactions,'); @@ -78,13 +93,19 @@ if (!process.env.TARGET_TOKEN) { process.exit(1); } +// Get API endpoints from environment variables (optional) +const SOURCE_API_URL = process.env.SOURCE_API_URL || 'https://api.github.com'; +const TARGET_API_URL = process.env.TARGET_API_URL || 'https://api.github.com'; + // Initialize Octokit instances const sourceOctokit = new Octokit({ - auth: process.env.SOURCE_TOKEN + auth: process.env.SOURCE_TOKEN, + baseUrl: SOURCE_API_URL }); const targetOctokit = new Octokit({ - auth: process.env.TARGET_TOKEN + auth: process.env.TARGET_TOKEN, + baseUrl: TARGET_API_URL }); // Tracking variables From e5b6475fc8d6d9630661eff70c6d93952e4b7c75 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 16:53:05 -0500 Subject: [PATCH 20/32] feat: configurable rate limit --- scripts/copy-discussions.js | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 254ff62..bf38eb6 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -27,6 +27,7 @@ // Configuration const INCLUDE_POLL_MERMAID_CHART = true; // Set to false to disable Mermaid pie chart for polls +const RATE_LIMIT_SLEEP_SECONDS = 2; // Default sleep duration between API calls to avoid rate limiting const { Octokit } = require("octokit"); @@ -136,7 +137,7 @@ async function sleep(seconds) { return new Promise(resolve => setTimeout(resolve, seconds * 1000)); } -async function rateLimitSleep(seconds = 2) { +async function rateLimitSleep(seconds = RATE_LIMIT_SLEEP_SECONDS) { log(`Waiting ${seconds}s to avoid rate limiting...`); await sleep(seconds); } @@ -493,7 +494,7 @@ const MARK_DISCUSSION_COMMENT_AS_ANSWER_MUTATION = ` async function checkDiscussionsEnabled(octokit, owner, repo) { log(`Checking if discussions are enabled in ${owner}/${repo}...`); - await rateLimitSleep(2); + await rateLimitSleep(); try { const response = await octokit.graphql(CHECK_DISCUSSIONS_ENABLED_QUERY, { @@ -517,7 +518,7 @@ async function checkDiscussionsEnabled(octokit, owner, repo) { async function fetchCategories(octokit, owner, repo) { log(`Fetching categories from ${owner}/${repo}...`); - await rateLimitSleep(2); + await rateLimitSleep(); try { const response = await octokit.graphql(FETCH_CATEGORIES_QUERY, { @@ -538,7 +539,7 @@ async function fetchCategories(octokit, owner, repo) { async function fetchLabels(octokit, owner, repo) { log(`Fetching labels from ${owner}/${repo}...`); - await rateLimitSleep(2); + await rateLimitSleep(); try { const response = await octokit.graphql(FETCH_LABELS_QUERY, { @@ -606,7 +607,7 @@ function findLabelId(labels, labelName) { async function createLabel(octokit, repositoryId, name, color, description, targetLabels) { log(`Creating new label: '${name}'`); - await rateLimitSleep(2); + await rateLimitSleep(); try { const response = await octokit.graphql(CREATE_LABEL_MUTATION, { @@ -651,7 +652,7 @@ async function addLabelsToDiscussion(octokit, discussionId, labelIds) { log(`Adding ${labelIds.length} labels to discussion`); - await rateLimitSleep(2); + await rateLimitSleep(); try { await octokit.graphql(ADD_LABELS_MUTATION, { @@ -719,7 +720,7 @@ async function createDiscussion(octokit, repositoryId, categoryId, title, body, async function lockDiscussion(octokit, discussionId) { log(`Locking discussion ${discussionId}...`); - await rateLimitSleep(2); + await rateLimitSleep(); try { await octokit.graphql(LOCK_DISCUSSION_MUTATION, { @@ -734,7 +735,7 @@ async function lockDiscussion(octokit, discussionId) { async function fetchDiscussionComments(octokit, discussionId) { log(`Fetching comments for discussion ${discussionId}...`); - await rateLimitSleep(2); + await rateLimitSleep(); try { const response = await octokit.graphql(FETCH_DISCUSSION_COMMENTS_QUERY, { @@ -761,7 +762,7 @@ async function addDiscussionComment(octokit, discussionId, body, originalAuthor, log("Adding comment to discussion"); - await rateLimitSleep(2); + await rateLimitSleep(); try { const response = await octokit.graphql(ADD_DISCUSSION_COMMENT_MUTATION, { @@ -791,7 +792,7 @@ async function addDiscussionCommentReply(octokit, discussionId, replyToId, body, log(`Adding reply to comment ${replyToId}`); - await rateLimitSleep(2); + await rateLimitSleep(); try { const response = await octokit.graphql(ADD_DISCUSSION_COMMENT_REPLY_MUTATION, { @@ -812,7 +813,7 @@ async function addDiscussionCommentReply(octokit, discussionId, replyToId, body, async function closeDiscussion(octokit, discussionId) { log("Closing discussion..."); - await rateLimitSleep(2); + await rateLimitSleep(); try { await octokit.graphql(CLOSE_DISCUSSION_MUTATION, { @@ -831,7 +832,7 @@ async function closeDiscussion(octokit, discussionId) { async function markCommentAsAnswer(octokit, commentId) { log("Marking comment as answer..."); - await rateLimitSleep(2); + await rateLimitSleep(); try { await octokit.graphql(MARK_DISCUSSION_COMMENT_AS_ANSWER_MUTATION, { From 02b7fa743f31c60f8533f408127abbb7c2839a34 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Wed, 1 Oct 2025 16:57:22 -0500 Subject: [PATCH 21/32] fix: reduce rate limit sleep duration and improve discussion processing efficiency --- scripts/copy-discussions.js | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index bf38eb6..a4b18fa 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -27,7 +27,8 @@ // Configuration const INCLUDE_POLL_MERMAID_CHART = true; // Set to false to disable Mermaid pie chart for polls -const RATE_LIMIT_SLEEP_SECONDS = 2; // Default sleep duration between API calls to avoid rate limiting +const RATE_LIMIT_SLEEP_SECONDS = 0.5; // Default sleep duration between API calls to avoid rate limiting +const DISCUSSION_PROCESSING_DELAY_SECONDS = 5; // Delay between processing discussions const { Octokit } = require("octokit"); @@ -693,7 +694,7 @@ async function createDiscussion(octokit, repositoryId, categoryId, title, body, log(`Creating discussion: '${title}'`); - await rateLimitSleep(3); + await rateLimitSleep(); try { const response = await octokit.graphql(CREATE_DISCUSSION_MUTATION, { @@ -924,7 +925,7 @@ async function copyDiscussionComments(octokit, discussionId, comments, answerCom async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, targetRepoId, targetCategories, targetLabels, cursor = null) { log(`Fetching discussions page (cursor: ${cursor || "null"})...`); - await rateLimitSleep(3); + await rateLimitSleep(); try { const response = await sourceOctokit.graphql(FETCH_DISCUSSIONS_QUERY, { @@ -1050,7 +1051,7 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, log(`✅ Finished processing discussion #${discussion.number}: '${discussion.title}'`); // Delay between discussions - await sleep(5); + await sleep(DISCUSSION_PROCESSING_DELAY_SECONDS); } catch (err) { error(`Failed to create discussion #${discussion.number}: '${discussion.title}' - ${err.message}`); From 95187df801c9a87c42d3b6996ad554bcbb833d43 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 12:31:10 -0500 Subject: [PATCH 22/32] feat: add support for resuming discussion copying with --start-from option --- scripts/copy-discussions.js | 55 +++++++++++++++++++++++++++++++------ 1 file changed, 47 insertions(+), 8 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index a4b18fa..290df59 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -40,7 +40,7 @@ if (args.includes('--help') || args.includes('-h')) { console.log('Copy Discussions between GitHub repositories'); console.log(''); console.log('Usage:'); - console.log(' node copy-discussions.js '); + console.log(' node copy-discussions.js [options]'); console.log(''); console.log('Arguments:'); console.log(' source_org Source organization name'); @@ -48,6 +48,9 @@ if (args.includes('--help') || args.includes('-h')) { console.log(' target_org Target organization name'); console.log(' target_repo Target repository name'); console.log(''); + console.log('Options:'); + console.log(' --start-from Start copying from a specific discussion number (useful for resuming)'); + console.log(''); console.log('Environment Variables (Required):'); console.log(' SOURCE_TOKEN GitHub token with read access to source repository discussions'); console.log(' TARGET_TOKEN GitHub token with write access to target repository discussions'); @@ -59,6 +62,9 @@ if (args.includes('--help') || args.includes('-h')) { console.log('Example:'); console.log(' node copy-discussions.js source-org repo1 target-org repo2'); console.log(''); + console.log('Example with resume from discussion #50:'); + console.log(' node copy-discussions.js source-org repo1 target-org repo2 --start-from 50'); + console.log(''); console.log('Example with GHES:'); console.log(' SOURCE_API_URL=https://github.mycompany.com/api/v3 \\'); console.log(' TARGET_API_URL=https://api.github.com \\'); @@ -71,13 +77,33 @@ if (args.includes('--help') || args.includes('-h')) { console.log(' - This script copies discussion content, comments, replies, polls, reactions,'); console.log(' locked status, and pinned status'); console.log(' - Attachments (images and files) will not copy over and require manual handling'); + console.log(' - Use --start-from to resume from a specific discussion in case of interruption'); process.exit(0); } +// Parse --start-from option +let startFromNumber = null; +const startFromIndex = args.indexOf('--start-from'); +if (startFromIndex !== -1) { + if (startFromIndex + 1 >= args.length) { + console.error("ERROR: --start-from requires a discussion number"); + process.exit(1); + } + startFromNumber = parseInt(args[startFromIndex + 1], 10); + if (isNaN(startFromNumber) || startFromNumber < 1) { + console.error("ERROR: --start-from must be a positive integer"); + process.exit(1); + } + // Remove the option and its value from args + args.splice(startFromIndex, 2); +} + if (args.length !== 4) { - console.error("Usage: node copy-discussions.js "); + console.error("Usage: node copy-discussions.js [--start-from ]"); console.error("\nExample:"); console.error(" node copy-discussions.js source-org repo1 target-org repo2"); + console.error("\nExample with resume:"); + console.error(" node copy-discussions.js source-org repo1 target-org repo2 --start-from 50"); console.error("\nFor more information, use --help"); process.exit(1); } @@ -690,7 +716,7 @@ async function createDiscussion(octokit, repositoryId, categoryId, title, body, } // Add metadata - enhancedBody += `\n---\n
\nOriginal discussion metadata\n\n_Original discussion by @${sourceAuthor} on ${sourceCreated}_\n_Source: ${sourceUrl}_\n${locked ? '\n_🔒 This discussion was locked in the source repository_' : ''}\n
`; + enhancedBody += `\n\n
\nOriginal discussion metadata\n\n_Original discussion by @${sourceAuthor} on ${sourceCreated}_\n_Source: ${sourceUrl}_\n${locked ? '\n_🔒 This discussion was locked in the source repository_' : ''}\n
`; log(`Creating discussion: '${title}'`); @@ -759,7 +785,7 @@ async function addDiscussionComment(octokit, discussionId, body, originalAuthor, enhancedBody += reactionsMarkdown; } - enhancedBody += `\n---\n
\nOriginal comment metadata\n\n_Original comment by @${originalAuthor} on ${originalCreated}_\n
`; + enhancedBody += `\n\n
\nOriginal comment metadata\n\n_Original comment by @${originalAuthor} on ${originalCreated}_\n
`; log("Adding comment to discussion"); @@ -789,7 +815,7 @@ async function addDiscussionCommentReply(octokit, discussionId, replyToId, body, enhancedBody += reactionsMarkdown; } - enhancedBody += `\n---\n_Original reply by @${originalAuthor} on ${originalCreated}_`; + enhancedBody += `\n\n
\nOriginal reply metadata\n\n_Original reply by @${originalAuthor} on ${originalCreated}_\n
`; log(`Adding reply to comment ${replyToId}`); @@ -922,7 +948,7 @@ async function copyDiscussionComments(octokit, discussionId, comments, answerCom return null; } -async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, targetRepoId, targetCategories, targetLabels, cursor = null) { +async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, targetRepoId, targetCategories, targetLabels, cursor = null, startFromNumber = null) { log(`Fetching discussions page (cursor: ${cursor || "null"})...`); await rateLimitSleep(); @@ -946,6 +972,13 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, for (const discussion of discussions) { totalDiscussions++; + // Skip discussions before the start-from number + if (startFromNumber !== null && discussion.number < startFromNumber) { + log(`Skipping discussion #${discussion.number}: '${discussion.title}' (before start-from #${startFromNumber})`); + skippedDiscussions++; + continue; + } + log(`\n=== Processing discussion #${discussion.number}: '${discussion.title}' ===`); // Get or fallback category @@ -1070,7 +1103,8 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, targetRepoId, targetCategories, targetLabels, - pageInfo.endCursor + pageInfo.endCursor, + startFromNumber ); } else { log("No more pages to process"); @@ -1088,6 +1122,9 @@ async function main() { log("Starting discussion copy process..."); log(`Source: ${SOURCE_ORG}/${SOURCE_REPO}`); log(`Target: ${TARGET_ORG}/${TARGET_REPO}`); + if (startFromNumber !== null) { + log(`Resume mode: Starting from discussion #${startFromNumber}`); + } log(""); log("⚡ This script uses conservative rate limiting to avoid GitHub API limits"); log(""); @@ -1133,7 +1170,9 @@ async function main() { SOURCE_REPO, targetRepoId, targetCategories, - targetLabels + targetLabels, + null, + startFromNumber ); // Summary From 8bbdf32c0c09f8d11f53f732d112774c7add4b5d Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 12:31:44 -0500 Subject: [PATCH 23/32] fix: implement retry logic for discussion creation to handle rate limits --- scripts/copy-discussions.js | 228 +++++++++++++++++++++++------------- 1 file changed, 144 insertions(+), 84 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 290df59..7ab8864 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -29,6 +29,8 @@ const INCLUDE_POLL_MERMAID_CHART = true; // Set to false to disable Mermaid pie chart for polls const RATE_LIMIT_SLEEP_SECONDS = 0.5; // Default sleep duration between API calls to avoid rate limiting const DISCUSSION_PROCESSING_DELAY_SECONDS = 5; // Delay between processing discussions +const RATE_LIMIT_RETRY_DELAY_SECONDS = 60; // Delay when hitting rate limits before retrying +const MAX_RETRIES = 3; // Maximum number of retries for failed operations const { Octokit } = require("octokit"); @@ -169,6 +171,39 @@ async function rateLimitSleep(seconds = RATE_LIMIT_SLEEP_SECONDS) { await sleep(seconds); } +function isRateLimitError(err) { + const message = err.message?.toLowerCase() || ''; + const status = err.status || 0; + + // Check for primary rate limit (403 with rate limit message) + if (status === 403 && (message.includes('rate limit') || message.includes('api rate limit'))) { + return true; + } + + // Check for secondary rate limit (403 with abuse/secondary message) + if (status === 403 && (message.includes('secondary') || message.includes('abuse'))) { + return true; + } + + // Check for retry-after header indication + if (message.includes('retry after') || message.includes('try again later')) { + return true; + } + + return false; +} + +async function handleRateLimitError(err, attemptNumber) { + if (isRateLimitError(err)) { + const waitTime = RATE_LIMIT_RETRY_DELAY_SECONDS * attemptNumber; // Exponential-ish backoff + warn(`Rate limit detected (attempt ${attemptNumber}). Waiting ${waitTime}s before retry...`); + warn(`Error details: ${err.message}`); + await sleep(waitTime); + return true; + } + return false; +} + function formatPollData(poll) { if (!poll || !poll.options || poll.options.nodes.length === 0) { return ''; @@ -997,99 +1032,124 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, // Check if discussion is pinned const isPinned = pinnedDiscussionIds.has(discussion.id); - // Create discussion - try { - const newDiscussion = await createDiscussion( - targetOctokit, - targetRepoId, - targetCategoryId, - discussion.title, - discussion.body || "", - discussion.url, - discussion.author?.login || "unknown", - discussion.createdAt, - discussion.poll || null, - discussion.locked || false, - isPinned, - discussion.reactionGroups || [] - ); - - createdDiscussions++; - log(`✓ Created discussion #${discussion.number}: '${discussion.title}'`); - - // Log additional metadata info - if (discussion.poll && discussion.poll.options?.nodes?.length > 0) { - log(` ℹ️ Poll included with ${discussion.poll.options.nodes.length} options (${discussion.poll.totalVoteCount} total votes)`); - } - if (discussion.locked) { - log(` 🔒 Discussion was locked in source and has been locked in target`); - } - if (isPinned) { - log(` 📌 Discussion was pinned in source (indicator added to body)`); - } - const totalReactions = discussion.reactionGroups?.reduce((sum, group) => sum + (group.users.totalCount || 0), 0) || 0; - if (totalReactions > 0) { - log(` ❤️ ${totalReactions} reaction${totalReactions !== 1 ? 's' : ''} copied`); - } - - // Process labels - if (discussion.labels.nodes.length > 0) { - const labelIds = []; + // Create discussion with retry logic + let newDiscussion = null; + let createSuccess = false; + + for (let attempt = 1; attempt <= MAX_RETRIES && !createSuccess; attempt++) { + try { + newDiscussion = await createDiscussion( + targetOctokit, + targetRepoId, + targetCategoryId, + discussion.title, + discussion.body || "", + discussion.url, + discussion.author?.login || "unknown", + discussion.createdAt, + discussion.poll || null, + discussion.locked || false, + isPinned, + discussion.reactionGroups || [] + ); - for (const label of discussion.labels.nodes) { - log(`Processing label: '${label.name}' (color: ${label.color})`); - - const labelId = await getOrCreateLabelId( - targetOctokit, - targetRepoId, - label.name, - label.color, - label.description || "", - targetLabels - ); - - if (labelId) { - labelIds.push(labelId); - } - } + createSuccess = true; + createdDiscussions++; + log(`✓ Created discussion #${discussion.number}: '${discussion.title}'`); + + } catch (err) { + // Handle rate limit errors with retry + const shouldRetry = await handleRateLimitError(err, attempt); - if (labelIds.length > 0) { - await addLabelsToDiscussion(targetOctokit, newDiscussion.id, labelIds); + if (!shouldRetry) { + // Not a rate limit error, or unrecoverable error + error(`Failed to create discussion #${discussion.number}: '${discussion.title}' - ${err.message}`); + if (attempt < MAX_RETRIES) { + warn(`Retrying (attempt ${attempt + 1}/${MAX_RETRIES})...`); + await sleep(5); // Brief pause before retry + } else { + error(`Max retries (${MAX_RETRIES}) reached. Skipping discussion #${discussion.number}.`); + skippedDiscussions++; + break; + } } + // If shouldRetry is true, loop will continue to next attempt } + } + + // If we exhausted retries without success, skip this discussion + if (!createSuccess) { + continue; + } + + // Log additional metadata info + if (discussion.poll && discussion.poll.options?.nodes?.length > 0) { + log(` ℹ️ Poll included with ${discussion.poll.options.nodes.length} options (${discussion.poll.totalVoteCount} total votes)`); + } + if (discussion.locked) { + log(` 🔒 Discussion was locked in source and has been locked in target`); + } + if (isPinned) { + log(` 📌 Discussion was pinned in source (indicator added to body)`); + } + const totalReactions = discussion.reactionGroups?.reduce((sum, group) => sum + (group.users.totalCount || 0), 0) || 0; + if (totalReactions > 0) { + log(` ❤️ ${totalReactions} reaction${totalReactions !== 1 ? 's' : ''} copied`); + } + + // Process labels + if (discussion.labels.nodes.length > 0) { + const labelIds = []; - // Copy comments - log("Processing comments for discussion..."); - const comments = await fetchDiscussionComments(sourceOctokit, discussion.id); - const answerCommentId = discussion.answer?.id || null; - const newAnswerCommentId = await copyDiscussionComments( - targetOctokit, - newDiscussion.id, - comments, - answerCommentId - ); - - // Mark answer if applicable - if (newAnswerCommentId) { - log("Source discussion has an answer comment, marking it in target..."); - await markCommentAsAnswer(targetOctokit, newAnswerCommentId); + for (const label of discussion.labels.nodes) { + log(`Processing label: '${label.name}' (color: ${label.color})`); + + const labelId = await getOrCreateLabelId( + targetOctokit, + targetRepoId, + label.name, + label.color, + label.description || "", + targetLabels + ); + + if (labelId) { + labelIds.push(labelId); + } } - // Close discussion if it was closed in source - if (discussion.closed) { - log("Source discussion is closed, closing target discussion..."); - await closeDiscussion(targetOctokit, newDiscussion.id); + if (labelIds.length > 0) { + await addLabelsToDiscussion(targetOctokit, newDiscussion.id, labelIds); } - - log(`✅ Finished processing discussion #${discussion.number}: '${discussion.title}'`); - - // Delay between discussions - await sleep(DISCUSSION_PROCESSING_DELAY_SECONDS); - - } catch (err) { - error(`Failed to create discussion #${discussion.number}: '${discussion.title}' - ${err.message}`); - skippedDiscussions++; } + + // Copy comments + log("Processing comments for discussion..."); + const comments = await fetchDiscussionComments(sourceOctokit, discussion.id); + const answerCommentId = discussion.answer?.id || null; + const newAnswerCommentId = await copyDiscussionComments( + targetOctokit, + newDiscussion.id, + comments, + answerCommentId + ); + + // Mark answer if applicable + if (newAnswerCommentId) { + log("Source discussion has an answer comment, marking it in target..."); + await markCommentAsAnswer(targetOctokit, newAnswerCommentId); + } + + // Close discussion if it was closed in source + if (discussion.closed) { + log("Source discussion is closed, closing target discussion..."); + await closeDiscussion(targetOctokit, newDiscussion.id); + } + + log(`✅ Finished processing discussion #${discussion.number}: '${discussion.title}'`); + + // Delay between discussions + await sleep(DISCUSSION_PROCESSING_DELAY_SECONDS); } // Process next page if exists From 72c220b0f68d9521e8989627cb9b7842e9d48064 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 12:41:02 -0500 Subject: [PATCH 24/32] docs: update documentation to clarify secondary rate limit guidelines and request limits --- scripts/copy-discussions.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 7ab8864..e46aacd 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -24,6 +24,13 @@ // Note: This script copies discussion content, comments, replies, polls, reactions, locked status, // and pinned status. Reactions are copied as read-only summaries. // Attachments (images and files) will not copy over - they need manual handling. +// +// Secondary Rate Limit Guidelines: +// GitHub limits content-generating requests to avoid abuse: +// - No more than 80 content-generating requests per minute +// - No more than 500 content-generating requests per hour +// This script includes automatic retry logic and rate limit handling to stay within these limits. +// See: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28#about-secondary-rate-limits // Configuration const INCLUDE_POLL_MERMAID_CHART = true; // Set to false to disable Mermaid pie chart for polls From 6ea6480e6cd39d668368647c54d252ee2ce09d81 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 12:49:09 -0500 Subject: [PATCH 25/32] feat: enhance rate limit handling with Octokit throttling and remove manual retry logic --- scripts/copy-discussions.js | 93 ++++++++++++++++--------------------- 1 file changed, 40 insertions(+), 53 deletions(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index e46aacd..2f92025 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -36,8 +36,7 @@ const INCLUDE_POLL_MERMAID_CHART = true; // Set to false to disable Mermaid pie chart for polls const RATE_LIMIT_SLEEP_SECONDS = 0.5; // Default sleep duration between API calls to avoid rate limiting const DISCUSSION_PROCESSING_DELAY_SECONDS = 5; // Delay between processing discussions -const RATE_LIMIT_RETRY_DELAY_SECONDS = 60; // Delay when hitting rate limits before retrying -const MAX_RETRIES = 3; // Maximum number of retries for failed operations +const MAX_RETRIES = 3; // Maximum number of retries for failed operations (rate limits handled automatically by Octokit) const { Octokit } = require("octokit"); @@ -134,15 +133,41 @@ if (!process.env.TARGET_TOKEN) { const SOURCE_API_URL = process.env.SOURCE_API_URL || 'https://api.github.com'; const TARGET_API_URL = process.env.TARGET_API_URL || 'https://api.github.com'; -// Initialize Octokit instances +// Configure throttling for rate limit handling +// Octokit's throttling plugin automatically handles both REST and GraphQL rate limits +// by intercepting HTTP 403 responses and retry-after headers +const throttleOptions = { + onRateLimit: (retryAfter, options, octokit) => { + warn(`Primary rate limit exhausted for request ${options.method} ${options.url}`); + if (options.request.retryCount <= 2) { + warn(`Retrying after ${retryAfter} seconds (retry ${options.request.retryCount + 1}/3)`); + return true; + } + error(`Max retries reached for rate limit`); + return false; + }, + onSecondaryRateLimit: (retryAfter, options, octokit) => { + warn(`Secondary rate limit detected for request ${options.method} ${options.url}`); + if (options.request.retryCount <= 2) { + warn(`Retrying after ${retryAfter} seconds (retry ${options.request.retryCount + 1}/3)`); + return true; + } + error(`Max retries reached for secondary rate limit`); + return false; + } +}; + +// Initialize Octokit instances with throttling enabled const sourceOctokit = new Octokit({ auth: process.env.SOURCE_TOKEN, - baseUrl: SOURCE_API_URL + baseUrl: SOURCE_API_URL, + throttle: throttleOptions }); const targetOctokit = new Octokit({ auth: process.env.TARGET_TOKEN, - baseUrl: TARGET_API_URL + baseUrl: TARGET_API_URL, + throttle: throttleOptions }); // Tracking variables @@ -178,39 +203,6 @@ async function rateLimitSleep(seconds = RATE_LIMIT_SLEEP_SECONDS) { await sleep(seconds); } -function isRateLimitError(err) { - const message = err.message?.toLowerCase() || ''; - const status = err.status || 0; - - // Check for primary rate limit (403 with rate limit message) - if (status === 403 && (message.includes('rate limit') || message.includes('api rate limit'))) { - return true; - } - - // Check for secondary rate limit (403 with abuse/secondary message) - if (status === 403 && (message.includes('secondary') || message.includes('abuse'))) { - return true; - } - - // Check for retry-after header indication - if (message.includes('retry after') || message.includes('try again later')) { - return true; - } - - return false; -} - -async function handleRateLimitError(err, attemptNumber) { - if (isRateLimitError(err)) { - const waitTime = RATE_LIMIT_RETRY_DELAY_SECONDS * attemptNumber; // Exponential-ish backoff - warn(`Rate limit detected (attempt ${attemptNumber}). Waiting ${waitTime}s before retry...`); - warn(`Error details: ${err.message}`); - await sleep(waitTime); - return true; - } - return false; -} - function formatPollData(poll) { if (!poll || !poll.options || poll.options.nodes.length === 0) { return ''; @@ -1039,7 +1031,7 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, // Check if discussion is pinned const isPinned = pinnedDiscussionIds.has(discussion.id); - // Create discussion with retry logic + // Create discussion (Octokit throttling plugin handles rate limits automatically) let newDiscussion = null; let createSuccess = false; @@ -1065,22 +1057,17 @@ async function processDiscussionsPage(sourceOctokit, targetOctokit, owner, repo, log(`✓ Created discussion #${discussion.number}: '${discussion.title}'`); } catch (err) { - // Handle rate limit errors with retry - const shouldRetry = await handleRateLimitError(err, attempt); + // Octokit throttling handles rate limits; this catches other errors + error(`Failed to create discussion #${discussion.number}: '${discussion.title}' - ${err.message}`); - if (!shouldRetry) { - // Not a rate limit error, or unrecoverable error - error(`Failed to create discussion #${discussion.number}: '${discussion.title}' - ${err.message}`); - if (attempt < MAX_RETRIES) { - warn(`Retrying (attempt ${attempt + 1}/${MAX_RETRIES})...`); - await sleep(5); // Brief pause before retry - } else { - error(`Max retries (${MAX_RETRIES}) reached. Skipping discussion #${discussion.number}.`); - skippedDiscussions++; - break; - } + if (attempt < MAX_RETRIES) { + warn(`Retrying (attempt ${attempt + 1}/${MAX_RETRIES}) in 5 seconds...`); + await sleep(5); + } else { + error(`Max retries (${MAX_RETRIES}) reached. Skipping discussion #${discussion.number}.`); + skippedDiscussions++; + break; } - // If shouldRetry is true, loop will continue to next attempt } } From 84e340ca6469849d9730fc654acbd9035c6ed311 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 13:00:13 -0500 Subject: [PATCH 26/32] fix: adjust discussion processing delay to comply with GitHub's rate limit recommendations --- scripts/copy-discussions.js | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index 2f92025..b092037 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -29,13 +29,14 @@ // GitHub limits content-generating requests to avoid abuse: // - No more than 80 content-generating requests per minute // - No more than 500 content-generating requests per hour +// - Try to stay under 1 discussion or comment created every 3 seconds // This script includes automatic retry logic and rate limit handling to stay within these limits. // See: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28#about-secondary-rate-limits // Configuration const INCLUDE_POLL_MERMAID_CHART = true; // Set to false to disable Mermaid pie chart for polls const RATE_LIMIT_SLEEP_SECONDS = 0.5; // Default sleep duration between API calls to avoid rate limiting -const DISCUSSION_PROCESSING_DELAY_SECONDS = 5; // Delay between processing discussions +const DISCUSSION_PROCESSING_DELAY_SECONDS = 3; // Delay between processing discussions (GitHub recommends 1 discussion per 3 seconds) const MAX_RETRIES = 3; // Maximum number of retries for failed operations (rate limits handled automatically by Octokit) const { Octokit } = require("octokit"); @@ -965,8 +966,14 @@ async function copyDiscussionComments(octokit, discussionId, comments, answerCom replyCreated, reply.reactionGroups || [] ); + + // Delay between replies to avoid rate limits (1 comment per 3 seconds) + await sleep(DISCUSSION_PROCESSING_DELAY_SECONDS); } } + + // Delay between comments to avoid rate limits (1 comment per 3 seconds) + await sleep(DISCUSSION_PROCESSING_DELAY_SECONDS); } else { warn(`Failed to copy comment by @${author}, skipping replies`); } From 6689cf30c5a034b0fddc4e5dffcf6eeeecb064a5 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 13:01:39 -0500 Subject: [PATCH 27/32] feat: track primary and secondary rate limit hits for improved monitoring --- scripts/copy-discussions.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/scripts/copy-discussions.js b/scripts/copy-discussions.js index b092037..5ccc82f 100644 --- a/scripts/copy-discussions.js +++ b/scripts/copy-discussions.js @@ -139,6 +139,7 @@ const TARGET_API_URL = process.env.TARGET_API_URL || 'https://api.github.com'; // by intercepting HTTP 403 responses and retry-after headers const throttleOptions = { onRateLimit: (retryAfter, options, octokit) => { + primaryRateLimitHits++; warn(`Primary rate limit exhausted for request ${options.method} ${options.url}`); if (options.request.retryCount <= 2) { warn(`Retrying after ${retryAfter} seconds (retry ${options.request.retryCount + 1}/3)`); @@ -148,6 +149,7 @@ const throttleOptions = { return false; }, onSecondaryRateLimit: (retryAfter, options, octokit) => { + secondaryRateLimitHits++; warn(`Secondary rate limit detected for request ${options.method} ${options.url}`); if (options.request.retryCount <= 2) { warn(`Retrying after ${retryAfter} seconds (retry ${options.request.retryCount + 1}/3)`); @@ -178,6 +180,8 @@ let createdDiscussions = 0; let skippedDiscussions = 0; let totalComments = 0; let copiedComments = 0; +let primaryRateLimitHits = 0; +let secondaryRateLimitHits = 0; // Helper functions function log(message) { @@ -1245,6 +1249,8 @@ async function main() { log(`Discussions skipped: ${skippedDiscussions}`); log(`Total comments found: ${totalComments}`); log(`Comments copied: ${copiedComments}`); + log(`Primary rate limits hit: ${primaryRateLimitHits}`); + log(`Secondary rate limits hit: ${secondaryRateLimitHits}`); if (missingCategories.length > 0) { warn("\nThe following categories were missing and need to be created manually:"); From 39d34ea852c22bf031c48e5076dca2b57f21a627 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 13:40:00 -0500 Subject: [PATCH 28/32] refactor(scripts)!: reorganize and rename copy-discussions to migrate-discussions - Move script to migrate-discussions/ subdirectory structure - Rename from copy-discussions.js to migrate-discussions.js - Update documentation to reflect migration terminology - Add package.json with octokit dependency - Enhance README with rate limiting and resume features - Update .gitignore to allow nested package.json files --- .gitignore | 1 + scripts/README.md | 64 +- scripts/migrate-discussions/README.md | 126 +++ .../migrate-discussions.js} | 0 scripts/migrate-discussions/package-lock.json | 726 ++++++++++++++++++ scripts/migrate-discussions/package.json | 37 + 6 files changed, 894 insertions(+), 60 deletions(-) create mode 100644 scripts/migrate-discussions/README.md rename scripts/{copy-discussions.js => migrate-discussions/migrate-discussions.js} (100%) create mode 100644 scripts/migrate-discussions/package-lock.json create mode 100644 scripts/migrate-discussions/package.json diff --git a/.gitignore b/.gitignore index a465620..54f5823 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,7 @@ **/.DS_Store *.pem *.json +!/scripts/*/package*.json node_modules* test*.js test*.sh diff --git a/scripts/README.md b/scripts/README.md index ba62c13..a0ed54b 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -36,66 +36,6 @@ Configuration values to change in the script: Migrate work items from Azure DevOps to GitHub issues - this just links out to a [separate repo](https://github.com/joshjohanning/ado_workitems_to_github_issues) -## copy-discussions.js - -Copy GitHub Discussions between repositories, including categories, labels, comments, and replies. This script can copy discussions across different GitHub instances and enterprises. - -The script is expecting: - -- environment variables `SOURCE_TOKEN` and `TARGET_TOKEN` with GitHub PATs that have `repo` and `read:discussion` scopes -- dependencies installed via `npm i octokit` - -Script usage: - -```bash -export SOURCE_TOKEN=ghp_abc -export TARGET_TOKEN=ghp_xyz -npm i octokit -node ./copy-discussions.js source-org source-repo target-org target-repo -``` - -Optional environment variables: - -- `SOURCE_API_URL` - API endpoint for source (defaults to `https://api.github.com`) -- `TARGET_API_URL` - API endpoint for target (defaults to `https://api.github.com`) - -Example with GitHub Enterprise Server: - -```bash -export SOURCE_API_URL=https://github.mycompany.com/api/v3 -export TARGET_API_URL=https://api.github.com -export SOURCE_TOKEN=ghp_abc -export TARGET_TOKEN=ghp_xyz -npm i octokit -node ./copy-discussions.js source-org source-repo target-org target-repo -``` - -Features: - -- Automatically creates missing discussion categories in the target repository -- Creates labels in the target repository if they don't exist -- Copies all comments and threaded replies with proper attribution -- Copies poll results as static snapshots (with table and optional Mermaid chart) -- Preserves reaction counts on discussions, comments, and replies -- Maintains locked status of discussions -- Indicates pinned discussions with a visual indicator -- Handles rate limiting with exponential backoff -- Provides colored console output for better visibility - -Configuration: - -- Set `INCLUDE_POLL_MERMAID_CHART = false` at the top of the script to disable Mermaid pie charts for polls - -Notes: - -- If a category doesn't exist in the target repository, discussions will be created in the "General" category -- The script preserves discussion metadata by adding attribution text to the body and comments -- Poll results are copied as static snapshots - voting is not available in copied discussions -- Reactions are copied as read-only summaries (users cannot add new reactions) -- Locked discussions will be locked in the target repository -- Pinned status is indicated in the discussion body (GitHub API doesn't allow pinning via GraphQL) -- Both source and target repositories must have GitHub Discussions enabled - ## delete-branch-protection-rules.ps1 Delete branch protection rules programmatically based on a pattern. @@ -151,6 +91,10 @@ My use case is to use this list to determine who needs to be added to a organiza 1. Run: `./new-users-to-add-to-project.sh ` 2. Don't delete the `` as it functions as your user database +## migrate-discussions + +See: [migrate-discussions](./migrate-discussions/README.md) + ## migrate-docker-containers-between-github-instances.sh Migrate Docker Containers in GitHub Packages (GitHub Container Registry) from one GitHub organization to another. diff --git a/scripts/migrate-discussions/README.md b/scripts/migrate-discussions/README.md new file mode 100644 index 0000000..6681f96 --- /dev/null +++ b/scripts/migrate-discussions/README.md @@ -0,0 +1,126 @@ +# migrate-discussions.js + +Migrate GitHub Discussions between repositories, including categories, labels, comments, and replies. This script can migrate discussions across different GitHub instances and enterprises with comprehensive rate limit handling and resume capabilities. + +## Prerequisites + +- `SOURCE_TOKEN` environment variable with GitHub PAT that has `repo` scope and read access to source repository discussions +- `TARGET_TOKEN` environment variable with GitHub PAT that has `repo` scope and write access to target repository discussions +- Dependencies installed via `npm i octokit` +- Both source and target repositories must have GitHub Discussions enabled + +## Script usage + +Basic usage: + +```bash +export SOURCE_TOKEN=ghp_abc +export TARGET_TOKEN=ghp_xyz +npm i octokit +node ./migrate-discussions.js source-org source-repo target-org target-repo +``` + +Resume from a specific discussion number (useful if interrupted): + +```bash +node ./migrate-discussions.js source-org source-repo target-org target-repo --start-from 50 +``` + +## Optional environment variables + +- `SOURCE_API_URL` - API endpoint for source (defaults to `https://api.github.com`) +- `TARGET_API_URL` - API endpoint for target (defaults to `https://api.github.com`) + +Example with GitHub Enterprise Server: + +```bash +export SOURCE_API_URL=https://github.mycompany.com/api/v3 +export TARGET_API_URL=https://api.github.com +export SOURCE_TOKEN=ghp_abc +export TARGET_TOKEN=ghp_xyz +npm i octokit +node ./migrate-discussions.js source-org source-repo target-org target-repo +``` + +## Features + +### Content Migration + +- Automatically creates missing discussion categories in the target repository +- Creates labels in the target repository if they don't exist +- Copies all comments and threaded replies with proper attribution +- Copies poll results as static snapshots (with table and optional Mermaid chart) +- Preserves reaction counts on discussions, comments, and replies +- Maintains locked status of discussions +- Indicates pinned discussions with a visual indicator +- Marks answered discussions and preserves the accepted answer + +### Rate Limiting & Reliability + +- **Automatic rate limit handling** with Octokit's built-in throttling plugin +- **Intelligent retry logic** for both primary and secondary rate limits (up to 3 retries) +- **GitHub-recommended delays** - 3 seconds between discussions/comments to stay under secondary rate limits +- **Resume capability** - Use `--start-from ` to resume from a specific discussion if interrupted +- **Rate limit tracking** - Summary shows how many times primary and secondary rate limits were hit + +### User Experience + +- Colored console output with timestamps for better visibility +- Comprehensive summary statistics at completion +- Detailed progress logging for each discussion, comment, and reply + +## Configuration options + +Edit these constants at the top of the script: + +- `INCLUDE_POLL_MERMAID_CHART` - Set to `false` to disable Mermaid pie charts for polls (default: `true`) +- `RATE_LIMIT_SLEEP_SECONDS` - Sleep duration between API calls (default: `0.5` seconds) +- `DISCUSSION_PROCESSING_DELAY_SECONDS` - Delay between processing discussions/comments (default: `3` seconds) +- `MAX_RETRIES` - Maximum retries for non-rate-limit errors (default: `3`) + +## Summary output + +After completion, the script displays comprehensive statistics: + +- Total discussions found and created +- Discussions skipped (when using `--start-from`) +- Total comments found and copied +- **Primary rate limits hit** - How many times the script hit GitHub's primary rate limit +- **Secondary rate limits hit** - How many times the script hit GitHub's secondary rate limit +- List of missing categories that need manual creation + +## Notes + +### Category handling + +- If a category doesn't exist in the target repository, discussions will be created in the "General" category as a fallback +- Missing categories are tracked and reported at the end of the script + +### Content preservation + +- The script preserves discussion metadata by adding attribution text to the body and comments +- Poll results are copied as static snapshots - voting is not available in copied discussions +- Reactions are copied as read-only summaries (users cannot add new reactions to the migrated content) +- Attachments (images and files) will not copy over and require manual handling + +### Discussion states + +- Locked discussions will be locked in the target repository +- Closed discussions will be closed in the target repository +- Answered discussions will have the same comment marked as the answer +- Pinned status is indicated in the discussion body (GitHub API doesn't allow pinning via GraphQL) + +### Rate limiting + +- GitHub limits content-generating requests to avoid abuse + - No more than 80 content-generating requests per minute + - No more than 500 content-generating requests per hour +- The script stays under 1 discussion or comment created every 3 seconds (GitHub's recommendation) +- Automatic retry with wait times from GitHub's `retry-after` headers +- If rate limits are consistently hit, the script will retry up to 3 times before failing + +### Resume capability + +- Use `--start-from ` to skip discussions before a specific discussion number +- Useful for resuming after interruptions or failures +- Discussion numbers are the user-friendly numbers (e.g., #50), not GraphQL IDs diff --git a/scripts/copy-discussions.js b/scripts/migrate-discussions/migrate-discussions.js similarity index 100% rename from scripts/copy-discussions.js rename to scripts/migrate-discussions/migrate-discussions.js diff --git a/scripts/migrate-discussions/package-lock.json b/scripts/migrate-discussions/package-lock.json new file mode 100644 index 0000000..9a9381b --- /dev/null +++ b/scripts/migrate-discussions/package-lock.json @@ -0,0 +1,726 @@ +{ + "name": "migrate-discussions", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "migrate-discussions", + "version": "1.0.0", + "license": "MIT", + "dependencies": { + "octokit": "^3.1.2" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@octokit/app": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/@octokit/app/-/app-14.1.0.tgz", + "integrity": "sha512-g3uEsGOQCBl1+W1rgfwoRFUIR6PtvB2T1E4RpygeUU5LrLvlOqcxrt5lfykIeRpUPpupreGJUYl70fqMDXdTpw==", + "license": "MIT", + "dependencies": { + "@octokit/auth-app": "^6.0.0", + "@octokit/auth-unauthenticated": "^5.0.0", + "@octokit/core": "^5.0.0", + "@octokit/oauth-app": "^6.0.0", + "@octokit/plugin-paginate-rest": "^9.0.0", + "@octokit/types": "^12.0.0", + "@octokit/webhooks": "^12.0.4" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/app/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "license": "MIT" + }, + "node_modules/@octokit/app/node_modules/@octokit/plugin-paginate-rest": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.2.tgz", + "integrity": "sha512-u3KYkGF7GcZnSD/3UP0S7K5XUFT2FkOQdcfXZGZQPGv3lm4F2Xbf71lvjldr8c1H3nNbF+33cLEkWYbokGWqiQ==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^12.6.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@octokit/app/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@octokit/auth-app": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/@octokit/auth-app/-/auth-app-6.1.4.tgz", + "integrity": "sha512-QkXkSOHZK4dA5oUqY5Dk3S+5pN2s1igPjEASNQV8/vgJgW034fQWR16u7VsNOK/EljA00eyjYF5mWNxWKWhHRQ==", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-app": "^7.1.0", + "@octokit/auth-oauth-user": "^4.1.0", + "@octokit/request": "^8.3.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", + "deprecation": "^2.3.1", + "lru-cache": "npm:@wolfy1339/lru-cache@^11.0.2-patch.1", + "universal-github-app-jwt": "^1.1.2", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-app": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-app/-/auth-oauth-app-7.1.0.tgz", + "integrity": "sha512-w+SyJN/b0l/HEb4EOPRudo7uUOSW51jcK1jwLa+4r7PA8FPFpoxEnHBHMITqCsc/3Vo2qqFjgQfz/xUUvsSQnA==", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-device": "^6.1.0", + "@octokit/auth-oauth-user": "^4.1.0", + "@octokit/request": "^8.3.1", + "@octokit/types": "^13.0.0", + "@types/btoa-lite": "^1.0.0", + "btoa-lite": "^1.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-device": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-device/-/auth-oauth-device-6.1.0.tgz", + "integrity": "sha512-FNQ7cb8kASufd6Ej4gnJ3f1QB5vJitkoV1O0/g6e6lUsQ7+VsSNRHRmFScN2tV4IgKA12frrr/cegUs0t+0/Lw==", + "license": "MIT", + "dependencies": { + "@octokit/oauth-methods": "^4.1.0", + "@octokit/request": "^8.3.1", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-oauth-user": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-oauth-user/-/auth-oauth-user-4.1.0.tgz", + "integrity": "sha512-FrEp8mtFuS/BrJyjpur+4GARteUCrPeR/tZJzD8YourzoVhRics7u7we/aDcKv+yywRNwNi/P4fRi631rG/OyQ==", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-device": "^6.1.0", + "@octokit/oauth-methods": "^4.1.0", + "@octokit/request": "^8.3.1", + "@octokit/types": "^13.0.0", + "btoa-lite": "^1.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-token": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", + "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-unauthenticated": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/@octokit/auth-unauthenticated/-/auth-unauthenticated-5.0.1.tgz", + "integrity": "sha512-oxeWzmBFxWd+XolxKTc4zr+h3mt+yofn4r7OfoIkR/Cj/o70eEGmPsFbueyJE2iBAGpjgTnEOKM3pnuEGVmiqg==", + "license": "MIT", + "dependencies": { + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^12.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/auth-unauthenticated/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "license": "MIT" + }, + "node_modules/@octokit/auth-unauthenticated/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@octokit/core": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz", + "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==", + "license": "MIT", + "dependencies": { + "@octokit/auth-token": "^4.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.4.1", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.0.0", + "before-after-hook": "^2.2.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/endpoint": { + "version": "9.0.6", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz", + "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/graphql": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz", + "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==", + "license": "MIT", + "dependencies": { + "@octokit/request": "^8.4.1", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/oauth-app": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@octokit/oauth-app/-/oauth-app-6.1.0.tgz", + "integrity": "sha512-nIn/8eUJ/BKUVzxUXd5vpzl1rwaVxMyYbQkNZjHrF7Vk/yu98/YDF/N2KeWO7uZ0g3b5EyiFXFkZI8rJ+DH1/g==", + "license": "MIT", + "dependencies": { + "@octokit/auth-oauth-app": "^7.0.0", + "@octokit/auth-oauth-user": "^4.0.0", + "@octokit/auth-unauthenticated": "^5.0.0", + "@octokit/core": "^5.0.0", + "@octokit/oauth-authorization-url": "^6.0.2", + "@octokit/oauth-methods": "^4.0.0", + "@types/aws-lambda": "^8.10.83", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/oauth-authorization-url": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@octokit/oauth-authorization-url/-/oauth-authorization-url-6.0.2.tgz", + "integrity": "sha512-CdoJukjXXxqLNK4y/VOiVzQVjibqoj/xHgInekviUJV73y/BSIcwvJ/4aNHPBPKcPWFnd4/lO9uqRV65jXhcLA==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/oauth-methods": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@octokit/oauth-methods/-/oauth-methods-4.1.0.tgz", + "integrity": "sha512-4tuKnCRecJ6CG6gr0XcEXdZtkTDbfbnD5oaHBmLERTjTMZNi2CbfEHZxPU41xXLDG4DfKf+sonu00zvKI9NSbw==", + "license": "MIT", + "dependencies": { + "@octokit/oauth-authorization-url": "^6.0.2", + "@octokit/request": "^8.3.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.0.0", + "btoa-lite": "^1.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "24.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz", + "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==", + "license": "MIT" + }, + "node_modules/@octokit/plugin-paginate-graphql": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-graphql/-/plugin-paginate-graphql-4.0.1.tgz", + "integrity": "sha512-R8ZQNmrIKKpHWC6V2gum4x9LG2qF1RxRjo27gjQcG3j+vf2tLsEfE7I/wRWEPzYMaenr1M+qDAtNcwZve1ce1A==", + "license": "MIT", + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=5" + } + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "11.4.4-cjs.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.4.4-cjs.2.tgz", + "integrity": "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.7.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "13.3.2-cjs.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.3.2-cjs.1.tgz", + "integrity": "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.8.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "^5" + } + }, + "node_modules/@octokit/plugin-retry": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-6.1.0.tgz", + "integrity": "sha512-WrO3bvq4E1Xh1r2mT9w6SDFg01gFmP81nIG77+p/MqW1JeXXgL++6umim3t6x0Zj5pZm3rXAN+0HEjmmdhIRig==", + "license": "MIT", + "dependencies": { + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^13.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "5" + } + }, + "node_modules/@octokit/plugin-throttling": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/@octokit/plugin-throttling/-/plugin-throttling-8.2.0.tgz", + "integrity": "sha512-nOpWtLayKFpgqmgD0y3GqXafMFuKcA4tRPZIfu7BArd2lEZeb1988nhWhwx4aZWmjDmUfdgVf7W+Tt4AmvRmMQ==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^12.2.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "^5.0.0" + } + }, + "node_modules/@octokit/plugin-throttling/node_modules/@octokit/openapi-types": { + "version": "20.0.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", + "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==", + "license": "MIT" + }, + "node_modules/@octokit/plugin-throttling/node_modules/@octokit/types": { + "version": "12.6.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", + "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^20.0.0" + } + }, + "node_modules/@octokit/request": { + "version": "8.4.1", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz", + "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==", + "license": "MIT", + "dependencies": { + "@octokit/endpoint": "^9.0.6", + "@octokit/request-error": "^5.1.1", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^6.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/request-error": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz", + "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==", + "license": "MIT", + "dependencies": { + "@octokit/types": "^13.1.0", + "deprecation": "^2.0.0", + "once": "^1.4.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/types": { + "version": "13.10.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz", + "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==", + "license": "MIT", + "dependencies": { + "@octokit/openapi-types": "^24.2.0" + } + }, + "node_modules/@octokit/webhooks": { + "version": "12.3.2", + "resolved": "https://registry.npmjs.org/@octokit/webhooks/-/webhooks-12.3.2.tgz", + "integrity": "sha512-exj1MzVXoP7xnAcAB3jZ97pTvVPkQF9y6GA/dvYC47HV7vLv+24XRS6b/v/XnyikpEuvMhugEXdGtAlU086WkQ==", + "license": "MIT", + "dependencies": { + "@octokit/request-error": "^5.0.0", + "@octokit/webhooks-methods": "^4.1.0", + "@octokit/webhooks-types": "7.6.1", + "aggregate-error": "^3.1.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/webhooks-methods": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-methods/-/webhooks-methods-4.1.0.tgz", + "integrity": "sha512-zoQyKw8h9STNPqtm28UGOYFE7O6D4Il8VJwhAtMHFt2C4L0VQT1qGKLeefUOqHNs1mNRYSadVv7x0z8U2yyeWQ==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/webhooks-types": { + "version": "7.6.1", + "resolved": "https://registry.npmjs.org/@octokit/webhooks-types/-/webhooks-types-7.6.1.tgz", + "integrity": "sha512-S8u2cJzklBC0FgTwWVLaM8tMrDuDMVE4xiTK4EYXM9GntyvrdbSoxqDQa+Fh57CCNApyIpyeqPhhFEmHPfrXgw==", + "license": "MIT" + }, + "node_modules/@types/aws-lambda": { + "version": "8.10.153", + "resolved": "https://registry.npmjs.org/@types/aws-lambda/-/aws-lambda-8.10.153.tgz", + "integrity": "sha512-j5zuETAQtPKuU8ZeqtcLdqLxQeNffX1Dd1Sr3tP56rYZD21Ph49iIqWbiHHqwLXugsMPSsgX/bAZI29Patlbbw==", + "license": "MIT" + }, + "node_modules/@types/btoa-lite": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@types/btoa-lite/-/btoa-lite-1.0.2.tgz", + "integrity": "sha512-ZYbcE2x7yrvNFJiU7xJGrpF/ihpkM7zKgw8bha3LNJSesvTtUNxbpzaT7WXBIryf6jovisrxTBvymxMeLLj1Mg==", + "license": "MIT" + }, + "node_modules/@types/jsonwebtoken": { + "version": "9.0.10", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.10.tgz", + "integrity": "sha512-asx5hIG9Qmf/1oStypjanR7iKTv0gXQ1Ov/jfrX6kS/EO0OFni8orbmGCn0672NHR3kXHwpAwR+B368ZGN/2rA==", + "license": "MIT", + "dependencies": { + "@types/ms": "*", + "@types/node": "*" + } + }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.6.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.6.2.tgz", + "integrity": "sha512-d2L25Y4j+W3ZlNAeMKcy7yDsK425ibcAOO2t7aPTz6gNMH0z2GThtwENCDc0d/Pw9wgyRqE5Px1wkV7naz8ang==", + "license": "MIT", + "dependencies": { + "undici-types": "~7.13.0" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "license": "MIT", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/before-after-hook": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", + "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==", + "license": "Apache-2.0" + }, + "node_modules/bottleneck": { + "version": "2.19.5", + "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==", + "license": "MIT" + }, + "node_modules/btoa-lite": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/btoa-lite/-/btoa-lite-1.0.0.tgz", + "integrity": "sha512-gvW7InbIyF8AicrqWoptdW08pUxuhq8BEgowNajy9RhiE86fmGAGl+bLKo6oB8QP0CkqHLowfN0oJdKC/J6LbA==", + "license": "MIT" + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA==", + "license": "BSD-3-Clause" + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/deprecation": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", + "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==", + "license": "ISC" + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "license": "Apache-2.0", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "license": "MIT", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jwa": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.2.tgz", + "integrity": "sha512-eeH5JO+21J78qMvTIDdBXidBd6nG2kZjg5Ohz/1fpa28Z4CcsWUzJ1ZZyFq/3z3N17aZy+ZuBoHljASbL1WfOw==", + "license": "MIT", + "dependencies": { + "buffer-equal-constant-time": "^1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "license": "MIT", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==", + "license": "MIT" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==", + "license": "MIT" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==", + "license": "MIT" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==", + "license": "MIT" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==", + "license": "MIT" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==", + "license": "MIT" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "license": "MIT" + }, + "node_modules/lru-cache": { + "name": "@wolfy1339/lru-cache", + "version": "11.0.2-patch.1", + "resolved": "https://registry.npmjs.org/@wolfy1339/lru-cache/-/lru-cache-11.0.2-patch.1.tgz", + "integrity": "sha512-BgYZfL2ADCXKOw2wJtkM3slhHotawWkgIRRxq4wEybnZQPjvAp71SPX35xepMykTw8gXlzWcWPTY31hlbnRsDA==", + "license": "ISC", + "engines": { + "node": "18 >=18.20 || 20 || >=22" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/octokit": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/octokit/-/octokit-3.2.2.tgz", + "integrity": "sha512-7Abo3nADdja8l/aglU6Y3lpnHSfv0tw7gFPiqzry/yCU+2gTAX7R1roJ8hJrxIK+S1j+7iqRJXtmuHJ/UDsBhQ==", + "license": "MIT", + "dependencies": { + "@octokit/app": "^14.0.2", + "@octokit/core": "^5.0.0", + "@octokit/oauth-app": "^6.0.0", + "@octokit/plugin-paginate-graphql": "^4.0.0", + "@octokit/plugin-paginate-rest": "11.4.4-cjs.2", + "@octokit/plugin-rest-endpoint-methods": "13.3.2-cjs.1", + "@octokit/plugin-retry": "^6.0.0", + "@octokit/plugin-throttling": "^8.0.0", + "@octokit/request-error": "^5.0.0", + "@octokit/types": "^13.0.0", + "@octokit/webhooks": "^12.3.1" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/semver": { + "version": "7.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz", + "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/undici-types": { + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.13.0.tgz", + "integrity": "sha512-Ov2Rr9Sx+fRgagJ5AX0qvItZG/JKKoBRAVITs1zk7IqZGTJUwgUr7qoYBpWwakpWilTZFM98rG/AFRocu10iIQ==", + "license": "MIT" + }, + "node_modules/universal-github-app-jwt": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/universal-github-app-jwt/-/universal-github-app-jwt-1.2.0.tgz", + "integrity": "sha512-dncpMpnsKBk0eetwfN8D8OUHGfiDhhJ+mtsbMl+7PfW7mYjiH8LIcqRmYMtzYLgSh47HjfdBtrBwIQ/gizKR3g==", + "license": "MIT", + "dependencies": { + "@types/jsonwebtoken": "^9.0.0", + "jsonwebtoken": "^9.0.2" + } + }, + "node_modules/universal-user-agent": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", + "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==", + "license": "ISC" + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + } + } +} diff --git a/scripts/migrate-discussions/package.json b/scripts/migrate-discussions/package.json new file mode 100644 index 0000000..68f8b6d --- /dev/null +++ b/scripts/migrate-discussions/package.json @@ -0,0 +1,37 @@ +{ + "name": "migrate-discussions", + "version": "1.0.0", + "description": "Migrate GitHub Discussions between repositories, including categories, labels, comments, and replies. Supports cross-enterprise and cross-instance migrations with automatic rate limit handling.", + "main": "migrate-discussions.js", + "scripts": { + "start": "node migrate-discussions.js", + "help": "node migrate-discussions.js --help" + }, + "keywords": [ + "github", + "discussions", + "migrate", + "copy", + "github-api", + "graphql", + "octokit", + "github-enterprise" + ], + "author": "Josh Johanning", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + }, + "dependencies": { + "octokit": "^3.1.2" + }, + "repository": { + "type": "git", + "url": "https://github.com/joshjohanning/github-misc-scripts.git", + "directory": "scripts/migrate-discussions" + }, + "bugs": { + "url": "https://github.com/joshjohanning/github-misc-scripts/issues" + }, + "homepage": "https://github.com/joshjohanning/github-misc-scripts/tree/main/scripts/migrate-discussions" +} From 8ec4d523a6cd9c5d363a20baa09808ecb805d833 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 14:20:07 -0500 Subject: [PATCH 29/32] docs: update README to clarify npm installation steps and environment variable usage --- scripts/migrate-discussions/README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/migrate-discussions/README.md b/scripts/migrate-discussions/README.md index 6681f96..d9972e6 100644 --- a/scripts/migrate-discussions/README.md +++ b/scripts/migrate-discussions/README.md @@ -16,7 +16,10 @@ Basic usage: ```bash export SOURCE_TOKEN=ghp_abc export TARGET_TOKEN=ghp_xyz -npm i octokit +# export SOURCE_API_URL= # if GHES +# export TARGET_API_URL= # if GHES/ghe.com +cd ./scripts/migrate-discussions +npm i node ./migrate-discussions.js source-org source-repo target-org target-repo ``` @@ -38,7 +41,6 @@ export SOURCE_API_URL=https://github.mycompany.com/api/v3 export TARGET_API_URL=https://api.github.com export SOURCE_TOKEN=ghp_abc export TARGET_TOKEN=ghp_xyz -npm i octokit node ./migrate-discussions.js source-org source-repo target-org target-repo ``` From 09b29280c0937bfb707e7e3eee5b3d75a50199e7 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 14:25:52 -0500 Subject: [PATCH 30/32] docs: update README to enhance GitHub App usage instructions and clarify token recommendations --- scripts/migrate-discussions/README.md | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/scripts/migrate-discussions/README.md b/scripts/migrate-discussions/README.md index d9972e6..c43e0ab 100644 --- a/scripts/migrate-discussions/README.md +++ b/scripts/migrate-discussions/README.md @@ -5,10 +5,25 @@ Migrate GitHub Discussions between repositories, including categories, labels, c ## Prerequisites - `SOURCE_TOKEN` environment variable with GitHub PAT that has `repo` scope and read access to source repository discussions + - Alternatively, use a GitHub App token (recommended for better rate limits and security) - `TARGET_TOKEN` environment variable with GitHub PAT that has `repo` scope and write access to target repository discussions -- Dependencies installed via `npm i octokit` + - Alternatively, use a GitHub App token (recommended for better rate limits, security, and authorship) (✨ **recommended for target token!**) +- Dependencies installed via `npm i` - Both source and target repositories must have GitHub Discussions enabled +### Using a GitHub App (Recommended) + +GitHub Apps provide better rate limits and security compared to personal access tokens. To use a GitHub App: + +1. Create or use an existing GitHub App with `repo` permissions +2. Install the app on the source and/or target repositories +3. Generate a token using the GitHub CLI and [`gh-token`](https://github.com/Link-/gh-token) extension: + +```bash +export SOURCE_TOKEN=$(gh token generate --app-id YOUR_SOURCE_APP_ID --installation-id YOUR_SOURCE_INSTALLATION_ID --key /path/to/source/private-key.pem --token-only) +export TARGET_TOKEN=$(gh token generate --app-id YOUR_TARGET_APP_ID --installation-id YOUR_TARGET_INSTALLATION_ID --key /path/to/target/private-key.pem --token-only) +``` + ## Script usage Basic usage: @@ -16,6 +31,8 @@ Basic usage: ```bash export SOURCE_TOKEN=ghp_abc export TARGET_TOKEN=ghp_xyz +# export SOURCE_TOKEN=$(gh token generate --app-id YOUR_SOURCE_APP_ID --installation-id YOUR_SOURCE_INSTALLATION_ID --key /path/to/source/private-key.pem --token-only) +# export TARGET_TOKEN=$(gh token generate --app-id YOUR_TARGET_APP_ID --installation-id YOUR_TARGET_INSTALLATION_ID --key /path/to/target/private-key.pem --token-only) # export SOURCE_API_URL= # if GHES # export TARGET_API_URL= # if GHES/ghe.com cd ./scripts/migrate-discussions From 69633847650e7fbc594bcd21deec43796eecafd8 Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Thu, 2 Oct 2025 16:18:21 -0500 Subject: [PATCH 31/32] docs: update README and script to enhance rate limit handling and retry logic --- scripts/migrate-discussions/README.md | 35 ++++++++++++++++--- .../migrate-discussions.js | 10 +++--- 2 files changed, 35 insertions(+), 10 deletions(-) diff --git a/scripts/migrate-discussions/README.md b/scripts/migrate-discussions/README.md index c43e0ab..79a52ce 100644 --- a/scripts/migrate-discussions/README.md +++ b/scripts/migrate-discussions/README.md @@ -74,15 +74,15 @@ node ./migrate-discussions.js source-org source-repo target-org target-repo - Indicates pinned discussions with a visual indicator - Marks answered discussions and preserves the accepted answer -### Rate Limiting & Reliability +### Rate limiting and reliability - **Automatic rate limit handling** with Octokit's built-in throttling plugin -- **Intelligent retry logic** for both primary and secondary rate limits (up to 3 retries) +- **Intelligent retry logic** with configurable retries for both rate-limit and non-rate-limit errors - **GitHub-recommended delays** - 3 seconds between discussions/comments to stay under secondary rate limits - **Resume capability** - Use `--start-from ` to resume from a specific discussion if interrupted - **Rate limit tracking** - Summary shows how many times primary and secondary rate limits were hit -### User Experience +### User experience - Colored console output with timestamps for better visibility - Comprehensive summary statistics at completion @@ -95,7 +95,7 @@ Edit these constants at the top of the script: - `INCLUDE_POLL_MERMAID_CHART` - Set to `false` to disable Mermaid pie charts for polls (default: `true`) - `RATE_LIMIT_SLEEP_SECONDS` - Sleep duration between API calls (default: `0.5` seconds) - `DISCUSSION_PROCESSING_DELAY_SECONDS` - Delay between processing discussions/comments (default: `3` seconds) -- `MAX_RETRIES` - Maximum retries for non-rate-limit errors (default: `3`) +- `MAX_RETRIES` - Maximum retries for both rate-limit and non-rate-limit errors (default: `15`) ## Summary output @@ -108,6 +108,31 @@ After completion, the script displays comprehensive statistics: - **Secondary rate limits hit** - How many times the script hit GitHub's secondary rate limit - List of missing categories that need manual creation +### Example summary output + +```text +[2025-10-02 19:38:44] ============================================================ +[2025-10-02 19:38:44] Discussion copy completed! +[2025-10-02 19:38:44] Total discussions found: 10 +[2025-10-02 19:38:44] Discussions created: 10 +[2025-10-02 19:38:44] Discussions skipped: 0 +[2025-10-02 19:38:44] Total comments found: 9 +[2025-10-02 19:38:44] Comments copied: 9 +[2025-10-02 19:38:44] Primary rate limits hit: 0 +[2025-10-02 19:38:44] Secondary rate limits hit: 0 +[2025-10-02 19:38:44] WARNING: +The following categories were missing and need to be created manually: +[2025-10-02 19:38:44] WARNING: - Blog posts! +[2025-10-02 19:38:44] WARNING: +[2025-10-02 19:38:44] WARNING: To create categories manually: +[2025-10-02 19:38:44] WARNING: 1. Go to https://github.com/joshjohanning-emu/discussions-test/discussions +[2025-10-02 19:38:44] WARNING: 2. Click 'New discussion' +[2025-10-02 19:38:44] WARNING: 3. Look for category management options +[2025-10-02 19:38:44] WARNING: 4. Create the missing categories with appropriate names and descriptions +[2025-10-02 19:38:44] +All done! ✨ +``` + ## Notes ### Category handling @@ -136,7 +161,7 @@ After completion, the script displays comprehensive statistics: - No more than 500 content-generating requests per hour - The script stays under 1 discussion or comment created every 3 seconds (GitHub's recommendation) - Automatic retry with wait times from GitHub's `retry-after` headers -- If rate limits are consistently hit, the script will retry up to 3 times before failing +- If rate limits are consistently hit, the script will retry up to 15 times before failing ### Resume capability diff --git a/scripts/migrate-discussions/migrate-discussions.js b/scripts/migrate-discussions/migrate-discussions.js index 5ccc82f..000fb49 100644 --- a/scripts/migrate-discussions/migrate-discussions.js +++ b/scripts/migrate-discussions/migrate-discussions.js @@ -37,7 +37,7 @@ const INCLUDE_POLL_MERMAID_CHART = true; // Set to false to disable Mermaid pie chart for polls const RATE_LIMIT_SLEEP_SECONDS = 0.5; // Default sleep duration between API calls to avoid rate limiting const DISCUSSION_PROCESSING_DELAY_SECONDS = 3; // Delay between processing discussions (GitHub recommends 1 discussion per 3 seconds) -const MAX_RETRIES = 3; // Maximum number of retries for failed operations (rate limits handled automatically by Octokit) +const MAX_RETRIES = 15; // Maximum number of retries for both rate-limit and non-rate-limit errors const { Octokit } = require("octokit"); @@ -141,8 +141,8 @@ const throttleOptions = { onRateLimit: (retryAfter, options, octokit) => { primaryRateLimitHits++; warn(`Primary rate limit exhausted for request ${options.method} ${options.url}`); - if (options.request.retryCount <= 2) { - warn(`Retrying after ${retryAfter} seconds (retry ${options.request.retryCount + 1}/3)`); + if (options.request.retryCount < MAX_RETRIES) { + warn(`Retrying after ${retryAfter} seconds (retry ${options.request.retryCount + 1}/${MAX_RETRIES})`); return true; } error(`Max retries reached for rate limit`); @@ -151,8 +151,8 @@ const throttleOptions = { onSecondaryRateLimit: (retryAfter, options, octokit) => { secondaryRateLimitHits++; warn(`Secondary rate limit detected for request ${options.method} ${options.url}`); - if (options.request.retryCount <= 2) { - warn(`Retrying after ${retryAfter} seconds (retry ${options.request.retryCount + 1}/3)`); + if (options.request.retryCount < MAX_RETRIES) { + warn(`Retrying after ${retryAfter} seconds (retry ${options.request.retryCount + 1}/${MAX_RETRIES})`); return true; } error(`Max retries reached for secondary rate limit`); From 726d14cab1f2f47e7bb5126d5b3d7c274d59400c Mon Sep 17 00:00:00 2001 From: Josh Johanning Date: Mon, 6 Oct 2025 13:32:27 -0500 Subject: [PATCH 32/32] fix: update script usage examples to reflect correct script name --- .../migrate-discussions/migrate-discussions.js | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/scripts/migrate-discussions/migrate-discussions.js b/scripts/migrate-discussions/migrate-discussions.js index 000fb49..1cb2481 100644 --- a/scripts/migrate-discussions/migrate-discussions.js +++ b/scripts/migrate-discussions/migrate-discussions.js @@ -6,10 +6,10 @@ // using different GitHub tokens for authentication to support cross-enterprise copying // // Usage: -// node copy-discussions.js +// node migrate-discussions.js // // Example: -// node copy-discussions.js source-org repo1 target-org repo2 +// node migrate-discussions.js source-org repo1 target-org repo2 // // Prerequisites: // - SOURCE_TOKEN environment variable with read access to source repository discussions @@ -49,7 +49,7 @@ if (args.includes('--help') || args.includes('-h')) { console.log('Copy Discussions between GitHub repositories'); console.log(''); console.log('Usage:'); - console.log(' node copy-discussions.js [options]'); + console.log(' node migrate-discussions.js [options]'); console.log(''); console.log('Arguments:'); console.log(' source_org Source organization name'); @@ -69,17 +69,17 @@ if (args.includes('--help') || args.includes('-h')) { console.log(' TARGET_API_URL API endpoint for target (defaults to https://api.github.com)'); console.log(''); console.log('Example:'); - console.log(' node copy-discussions.js source-org repo1 target-org repo2'); + console.log(' node migrate-discussions.js source-org repo1 target-org repo2'); console.log(''); console.log('Example with resume from discussion #50:'); - console.log(' node copy-discussions.js source-org repo1 target-org repo2 --start-from 50'); + console.log(' node migrate-discussions.js source-org repo1 target-org repo2 --start-from 50'); console.log(''); console.log('Example with GHES:'); console.log(' SOURCE_API_URL=https://github.mycompany.com/api/v3 \\'); console.log(' TARGET_API_URL=https://api.github.com \\'); console.log(' SOURCE_TOKEN=ghp_xxx \\'); console.log(' TARGET_TOKEN=ghp_yyy \\'); - console.log(' node copy-discussions.js source-org repo1 target-org repo2'); + console.log(' node migrate-discussions.js source-org repo1 target-org repo2'); console.log(''); console.log('Note:'); console.log(' - Both tokens must have the "repo" scope'); @@ -108,11 +108,11 @@ if (startFromIndex !== -1) { } if (args.length !== 4) { - console.error("Usage: node copy-discussions.js [--start-from ]"); + console.error("Usage: node migrate-discussions.js [--start-from ]"); console.error("\nExample:"); - console.error(" node copy-discussions.js source-org repo1 target-org repo2"); + console.error(" node migrate-discussions.js source-org repo1 target-org repo2"); console.error("\nExample with resume:"); - console.error(" node copy-discussions.js source-org repo1 target-org repo2 --start-from 50"); + console.error(" node migrate-discussions.js source-org repo1 target-org repo2 --start-from 50"); console.error("\nFor more information, use --help"); process.exit(1); }