Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Dockerfile_ui
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ RUN --mount=type=cache,target=/root/.npm \
# Bring in built assets and server entry from builder
COPY --from=builder /app/dist ./dist
COPY --from=builder /app/server-start.js ./
COPY --from=builder /app/request-logging.js ./

EXPOSE 3030

Expand Down
26 changes: 13 additions & 13 deletions backend/controllers/github.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,20 +66,20 @@ func (d DiggerController) GithubAppWebHook(c *gin.Context) {
"installationId", *event.Installation.ID,
)

if *event.Action == "deleted" {
err := handleInstallationDeletedEvent(event, appId64)
if err != nil {
slog.Error("Failed to handle installation deleted event", "error", err)
c.String(http.StatusAccepted, "Failed to handle webhook event.")
return
}
} else if *event.Action == "created" || *event.Action == "unsuspended" || *event.Action == "new_permissions_accepted" {
if err := handleInstallationUpsertEvent(c.Request.Context(), gh, event, appId64); err != nil {
slog.Error("Failed to handle installation upsert event", "error", err)
c.String(http.StatusAccepted, "Failed to handle webhook event.")
return
// Run in goroutine to avoid webhook timeouts for large installations
go func(ctx context.Context) {
defer logging.InheritRequestLogger(ctx)()
if *event.Action == "deleted" {
if err := handleInstallationDeletedEvent(event, appId64); err != nil {
slog.Error("Failed to handle installation deleted event", "error", err)
}
} else if *event.Action == "created" || *event.Action == "unsuspended" || *event.Action == "new_permissions_accepted" {
// Use background context so work continues after HTTP response
if err := handleInstallationUpsertEvent(context.Background(), gh, event, appId64); err != nil {
slog.Error("Failed to handle installation upsert event", "error", err)
}
}
}
}(c.Request.Context())
case *github.InstallationRepositoriesEvent:
slog.Info("Processing InstallationRepositoriesEvent",
"action", event.GetAction(),
Expand Down
2 changes: 1 addition & 1 deletion backend/controllers/github_comment.go
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ func handleIssueCommentEvent(gh utils.GithubClientProvider, payload *github.Issu
}

diggerYmlStr, ghService, config, projectsGraph, prSourceBranch, commitSha, changedFiles, err := getDiggerConfigForPR(gh, orgId, prLabelsStr, installationId, repoFullName, repoOwner, repoName, cloneURL, issueNumber)
if err != nil {
if err != nil {
slog.Error("Error getting Digger config for PR",
"issueNumber", issueNumber,
"repoFullName", repoFullName,
Expand Down
4 changes: 2 additions & 2 deletions backend/controllers/github_helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -830,7 +830,7 @@ func getDiggerConfigForPR(gh utils.GithubClientProvider, orgId uint, prLabels []
"branch", prBranch,
"error", err,
)
return "", nil, nil, nil, nil, nil, nil, fmt.Errorf("error loading digger.yml: %v", err)
return "", nil, nil, nil, nil, nil, nil, fmt.Errorf("error loading digger.yml: %w", err)
}

return diggerYmlStr, ghService, config, dependencyGraph, &prBranch, &prCommitSha, changedFiles, nil
Expand Down Expand Up @@ -893,7 +893,7 @@ func GetDiggerConfigForBranchOrSha(gh utils.GithubClientProvider, installationId
"branch", branch,
"error", err,
)
return "", nil, nil, nil, fmt.Errorf("error cloning and loading config %v", err)
return "", nil, nil, nil, fmt.Errorf("error cloning and loading config: %w", err)
}

projectCount := 0
Expand Down
53 changes: 53 additions & 0 deletions backend/utils/github.go
Original file line number Diff line number Diff line change
Expand Up @@ -408,6 +408,59 @@ func GetGithubHostname() string {
return githubHostname
}

// IsAllReposInstallation checks if the GitHub App installation is configured to access all repositories
// (as opposed to a selected subset). Returns true if installation is for "all" repos.
// Note: This requires app-level JWT authentication, not installation token authentication.
func IsAllReposInstallation(appId int64, installationId int64) (bool, error) {
githubAppPrivateKey := ""
githubAppPrivateKeyB64 := os.Getenv("GITHUB_APP_PRIVATE_KEY_BASE64")
if githubAppPrivateKeyB64 != "" {
decodedBytes, err := base64.StdEncoding.DecodeString(githubAppPrivateKeyB64)
if err != nil {
slog.Error("Failed to decode GITHUB_APP_PRIVATE_KEY_BASE64", "error", err)
return false, fmt.Errorf("error decoding private key: %v", err)
}
githubAppPrivateKey = string(decodedBytes)
} else {
githubAppPrivateKey = os.Getenv("GITHUB_APP_PRIVATE_KEY")
if githubAppPrivateKey == "" {
return false, fmt.Errorf("missing GitHub app private key")
}
}

// Use app-level transport (JWT) instead of installation token
atr, err := ghinstallation.NewAppsTransport(net.DefaultTransport, appId, []byte(githubAppPrivateKey))
if err != nil {
slog.Error("Failed to create GitHub app transport",
"appId", appId,
"error", err,
)
return false, fmt.Errorf("error creating app transport: %v", err)
}

client := github.NewClient(&net.Client{Transport: atr})

installation, _, err := client.Apps.GetInstallation(context.Background(), installationId)
if err != nil {
slog.Error("Failed to get GitHub installation details",
"installationId", installationId,
"error", err,
)
return false, fmt.Errorf("error getting installation details: %v", err)
}

repositorySelection := installation.GetRepositorySelection()
isAllRepos := repositorySelection == "all"

slog.Debug("Checked installation repository selection",
"installationId", installationId,
"repositorySelection", repositorySelection,
"isAllRepos", isAllRepos,
)

return isAllRepos, nil
}

func GetWorkflowIdAndUrlFromDiggerJobId(client *github.Client, repoOwner string, repoName string, diggerJobID string) (int64, string, error) {
slog.Debug("Looking for workflow for job",
"diggerJobId", diggerJobID,
Expand Down
5 changes: 4 additions & 1 deletion libs/digger_config/digger_config.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@ import (
"gopkg.in/yaml.v3"
)

// ErrDiggerConfigNotFound is returned when neither digger.yml nor digger.yaml exists in the repository
var ErrDiggerConfigNotFound = errors.New("digger config file not found")

type DirWalker interface {
GetDirs(workingDir string, config DiggerConfigYaml) ([]string, error)
}
Expand All @@ -39,7 +42,7 @@ func ReadDiggerYmlFileContents(dir string) (string, error) {
slog.Error("could not read digger config file",
"error", err,
"dir", dir)
return "", fmt.Errorf("could not read the file both digger.yml and digger.yaml are missing: %v", err)
return "", fmt.Errorf("%w: both digger.yml and digger.yaml are missing: %v", ErrDiggerConfigNotFound, err)
}
}
diggerYmlStr := string(diggerYmlBytes)
Expand Down
1 change: 1 addition & 0 deletions taco/Dockerfile_token_service
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ WORKDIR /go/src/github.com/diggerhq/digger/taco

# Copy go.mod/go.sum first for better layer caching
COPY cmd/token_service/go.mod cmd/token_service/go.sum ./cmd/token_service/
COPY internal/go.mod internal/go.sum ./internal/
RUN cd cmd/token_service && go mod download

# Copy source code
Expand Down
74 changes: 74 additions & 0 deletions ui/request-logging.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import { unsealData } from 'iron-session';
import { decodeJwt } from 'jose';

// Request logging utilities
export async function extractUserInfoFromRequest(req) {
try {
const cookieName = process.env.WORKOS_COOKIE_NAME || 'wos-session';
const cookiePassword = process.env.WORKOS_COOKIE_PASSWORD;

if (!cookiePassword) {
return { userId: 'anonymous', orgId: 'anonymous' };
}

const cookieHeader = req.headers?.cookie || req.getHeader?.('cookie');
if (!cookieHeader) {
return { userId: 'anonymous', orgId: 'anonymous' };
}

const cookies = cookieHeader.split(';').reduce((acc, cookie) => {
const [key, value] = cookie.trim().split('=');
acc[key] = decodeURIComponent(value);
return acc;
}, {});

const sessionCookie = cookies[cookieName];
if (!sessionCookie) {
return { userId: 'anonymous', orgId: 'anonymous' };
}

const session = await unsealData(sessionCookie, {
password: cookiePassword,
});

if (!session?.user?.id || !session?.accessToken) {
return { userId: 'anonymous', orgId: 'anonymous' };
}

// Decode JWT to get organization ID
let orgId = 'anonymous';
try {
const decoded = decodeJwt(session.accessToken);
orgId = decoded.org_id || 'anonymous';
} catch (error) {
// If JWT decode fails, just use anonymous
}

return { userId: session.user.id, orgId };
} catch (error) {
return { userId: 'anonymous', orgId: 'anonymous' };
}
}

export function logRequestInit(method, path, requestId, userId, orgId) {
console.log(JSON.stringify({
event: 'request_initialized',
method,
path,
requestId,
userId,
orgId,
}));
}

export function logResponse(method, path, requestId, latency, statusCode) {
console.log(JSON.stringify({
event: 'response_sent',
method,
path,
requestId,
latency,
statusCode,
}));
}

68 changes: 64 additions & 4 deletions ui/server-start.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ import { fileURLToPath } from 'node:url';
import { Readable } from 'node:stream';
import { createGzip } from 'node:zlib';
import serverHandler from './dist/server/server.js';
import { extractUserInfoFromRequest, logRequestInit, logResponse } from './request-logging.js';

// Verify logging functions are loaded
console.log('✅ Request logging module loaded');

const __dirname = fileURLToPath(new URL('.', import.meta.url));
const PORT = process.env.PORT || 3030;
Expand Down Expand Up @@ -68,6 +72,42 @@ const server = createServer(async (req, res) => {
const requestId = req.headers['x-request-id'] || `ssr-${Math.random().toString(36).slice(2, 10)}`;
const requestStart = Date.now();

// Parse URL early for logging
const url = new URL(req.url, `http://${req.headers.host}`);
const pathname = url.pathname;
const method = req.method;

// Debug: Log that request handler is being called
console.log(`[DEBUG] Request received: ${method} ${pathname} [${requestId}]`);

// Extract user ID and org ID and log request initialization
// Always log, even if extraction fails
let userId = 'anonymous';
let orgId = 'anonymous';
try {
const userInfo = await extractUserInfoFromRequest(req);
userId = userInfo.userId;
orgId = userInfo.orgId;
} catch (error) {
console.error(`User info extraction error [${requestId}]:`, error);
}

// Always log request initialization
try {
logRequestInit(method, pathname, requestId, userId, orgId);
} catch (error) {
console.error(`Request logging error [${requestId}]:`, error);
// Fallback to direct console.log if logging function fails
console.log(JSON.stringify({
event: 'request_initialized',
method,
path: pathname,
requestId,
userId,
orgId,
}));
}

// Set request timeout
req.setTimeout(REQUEST_TIMEOUT, () => {
console.error(`⏱️ Request timeout (${REQUEST_TIMEOUT}ms): ${req.method} ${req.url} [${requestId}]`);
Expand All @@ -78,8 +118,6 @@ const server = createServer(async (req, res) => {
});

try {
const url = new URL(req.url, `http://${req.headers.host}`);
const pathname = url.pathname;

// Try to serve static files from dist/client first
// Serve: /assets/*, *.js, *.css, *.json, images, fonts, favicons
Expand All @@ -99,6 +137,13 @@ const server = createServer(async (req, res) => {
'Cache-Control': 'public, max-age=31536000, immutable',
});
res.end(content);
// Log response for static files
try {
const latency = Date.now() - requestStart;
logResponse(method, pathname, requestId, latency, 200);
} catch (err) {
console.error(`Response logging error [${requestId}]:`, err);
}
return;
} catch (err) {
// File not found, fall through to SSR handler
Expand Down Expand Up @@ -132,9 +177,9 @@ const server = createServer(async (req, res) => {

// Log slow SSR requests
if (ssrTime > 2000) {
console.log(`🔥 VERY SLOW SSR: ${req.method} ${pathname} took ${ssrTime}ms [${requestId}]`);
console.debug(`🔥 VERY SLOW SSR: ${req.method} ${pathname} took ${ssrTime}ms [${requestId}]`);
} else if (ssrTime > 1000) {
console.log(`⚠️ SLOW SSR: ${req.method} ${pathname} took ${ssrTime}ms [${requestId}]`);
console.debug(`⚠️ SLOW SSR: ${req.method} ${pathname} took ${ssrTime}ms [${requestId}]`);
}

// Convert Web Standard Response to Node.js response
Expand Down Expand Up @@ -221,13 +266,28 @@ const server = createServer(async (req, res) => {
} else {
res.end();
}

// Log response after sending
try {
const latency = Date.now() - requestStart;
logResponse(method, pathname, requestId, latency, res.statusCode);
} catch (err) {
console.error(`Response logging error [${requestId}]:`, err);
}
} catch (error) {
console.error(`Server error [${requestId}]:`, error);
if (!res.headersSent) {
res.statusCode = 500;
res.setHeader('Content-Type', 'text/plain');
res.end('Internal Server Error');
}
// Log error response
try {
const latency = Date.now() - requestStart;
logResponse(method, pathname, requestId, latency, res.statusCode || 500);
} catch (err) {
console.error(`Error response logging error [${requestId}]:`, err);
}
}
});

Expand Down
4 changes: 2 additions & 2 deletions ui/src/authkit/serverFunctions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -137,12 +137,12 @@ export const getWidgetsAuthToken = createServerFn({method: 'GET'})
// Check cache first
const cached = serverCache.getWidgetToken(userId, organizationId);
if (cached) {
console.log(`✅ Widget token cache hit for ${userId}:${organizationId}`);
console.debug(`✅ Widget token cache hit for ${userId}:${organizationId}`);
return cached;
}

// Cache miss - generate new token
console.log(`❌ Widget token cache miss, generating new token for ${userId}:${organizationId}`);
console.debug(`❌ Widget token cache miss, generating new token for ${userId}:${organizationId}`);
const token = await getWorkOS().widgets.getToken({
userId: userId,
organizationId: organizationId,
Expand Down
Loading
Loading