diff --git a/.github/workflows/azure-acr-deploy-frontend-dev.yml b/.github/workflows/azure-acr-deploy-frontend-dev.yml new file mode 100644 index 0000000..e65d175 --- /dev/null +++ b/.github/workflows/azure-acr-deploy-frontend-dev.yml @@ -0,0 +1,71 @@ +name: Frontend - Build and Deploy on Azure (Dev) + +on: + push: + branches: [development] + paths: + - "frontend/**" + - ".github/workflows/azure-deploy-frontend-dev.yml" + workflow_dispatch: + +env: + REGISTRY: mploycontainerregistry-hncsekeah2gagbgb.azurecr.io + FRONTEND_IMAGE_NAME: nextjs-frontend + +jobs: + build-and-push: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + driver-opts: | + image=moby/buildkit:latest + network=host + + - name: Log in to Azure Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ secrets.ACR_USERNAME }} + password: ${{ secrets.ACR_PASSWORD }} + + - name: Build and push image (with ACR cache) + uses: docker/build-push-action@v6 + with: + context: ./frontend + file: ./frontend/Dockerfile + push: true + tags: | + ${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE_NAME }}:development + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE_NAME }}:buildcache + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE_NAME }}:buildcache,mode=max + + deploy: + needs: build-and-push + runs-on: ubuntu-latest + environment: development + steps: + - name: Azure login + uses: azure/login@v2 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Deploy to Azure Container Apps (Dev) + uses: azure/container-apps-deploy-action@v2 + with: + registryUrl: ${{ env.REGISTRY }} + registryUsername: ${{ secrets.ACR_USERNAME }} + registryPassword: ${{ secrets.ACR_PASSWORD }} + containerAppName: mploy-frontend-dev + resourceGroup: ${{ secrets.AZURE_RESOURCE_GROUP_DEV }} + imageToDeploy: ${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE_NAME }}:development + targetPort: 3000 + environmentVariables: > + NOTION_API_KEY=${{ secrets.NOTION_API_KEY }} + NOTION_DATABASE_ID=${{ secrets.NOTION_DATABASE_ID }} + MONGODB_URI=${{ secrets.MONGODB_URI }} + NODE_ENV=development diff --git a/.github/workflows/azure-acr-deploy-frontend-prod.yml b/.github/workflows/azure-acr-deploy-frontend-prod.yml new file mode 100644 index 0000000..13cd5f5 --- /dev/null +++ b/.github/workflows/azure-acr-deploy-frontend-prod.yml @@ -0,0 +1,78 @@ +name: Frontend - Build and Deploy on Azure (Prod) + +on: + push: + branches: [production] + paths: + - "frontend/**" + - ".github/workflows/azure-deploy-frontend-prod.yml" + workflow_dispatch: + +env: + REGISTRY: mploycontainerregistry-hncsekeah2gagbgb.azurecr.io + FRONTEND_IMAGE_NAME: nextjs-frontend + +jobs: + build-and-push: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + with: + driver-opts: | + image=moby/buildkit:latest + network=host + + - name: Log in to Azure Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ secrets.ACR_USERNAME }} + password: ${{ secrets.ACR_PASSWORD }} + + - name: Set version tag + id: vars + run: | + VERSION="v$(date +'%Y.%m.%d')-${GITHUB_RUN_NUMBER}" + echo "version=$VERSION" >> $GITHUB_OUTPUT + + - name: Build and push :production and versioned tags (with ACR cache) + uses: docker/build-push-action@v6 + with: + context: ./frontend + file: ./frontend/Dockerfile + push: true + tags: | + ${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE_NAME }}:production + ${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE_NAME }}:${{ steps.vars.outputs.version }} + cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE_NAME }}:buildcache + cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE_NAME }}:buildcache,mode=max + + deploy: + needs: build-and-push + runs-on: ubuntu-latest + environment: production + steps: + - name: Azure login + uses: azure/login@v2 + with: + creds: ${{ secrets.AZURE_CREDENTIALS }} + + - name: Deploy to Azure Container Apps (Prod) + uses: azure/container-apps-deploy-action@v2 + with: + registryUrl: ${{ env.REGISTRY }} + registryUsername: ${{ secrets.ACR_USERNAME }} + registryPassword: ${{ secrets.ACR_PASSWORD }} + containerAppName: mploy-frontend + resourceGroup: ${{ secrets.AZURE_RESOURCE_GROUP_PROD }} + imageToDeploy: ${{ env.REGISTRY }}/${{ env.FRONTEND_IMAGE_NAME }}:production + targetPort: 3000 + environmentVariables: > + NOTION_API_KEY=${{ secrets.NOTION_API_KEY }} + NOTION_DATABASE_ID=${{ secrets.NOTION_DATABASE_ID }} + MONGODB_URI=${{ secrets.MONGODB_URI }} + NODE_ENV=production diff --git a/.github/workflows/azure-static-web-apps-lively-desert-03e284d00.yml b/.github/workflows/azure-static-web-apps-lively-desert-03e284d00.yml deleted file mode 100644 index dd310a4..0000000 --- a/.github/workflows/azure-static-web-apps-lively-desert-03e284d00.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: Azure Static Web Apps CI/CD - -on: - push: - branches: - - production - pull_request: - types: [opened, synchronize, reopened, closed] - branches: - - production - -jobs: - build_and_deploy_job: - if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.action != 'closed') - runs-on: ubuntu-latest - name: Build and Deploy Job - steps: - - uses: actions/checkout@v3 - with: - submodules: true - lfs: false - - name: Build And Deploy - id: builddeploy - uses: Azure/static-web-apps-deploy@v1 - with: - azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_LIVELY_DESERT_03E284D00 }} - repo_token: ${{ secrets.GITHUB_TOKEN }} # Used for Github integrations (i.e. PR comments) - action: "upload" - ###### Repository/Build Configurations - These values can be configured to match your app requirements. ###### - # For more information regarding Static Web App workflow configurations, please visit: https://aka.ms/swaworkflowconfig - app_location: "./frontend" # App source code path - api_location: "" # Api source code path - optional - output_location: "" # Built app content directory - optional - ###### End of Repository/Build Configurations ###### - - close_pull_request_job: - if: github.event_name == 'pull_request' && github.event.action == 'closed' - runs-on: ubuntu-latest - name: Close Pull Request Job - steps: - - name: Close Pull Request - id: closepullrequest - uses: Azure/static-web-apps-deploy@v1 - with: - azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_LIVELY_DESERT_03E284D00 }} - action: "close" diff --git a/Makefile b/Makefile deleted file mode 100644 index bf71c6a..0000000 --- a/Makefile +++ /dev/null @@ -1,8 +0,0 @@ -.PHONY: dev dev-docker - -dev: - docker compose -f docker-compose.dev.yml up - -dev-clean: - docker compose -f docker-compose.dev.yml down - docker compose -f docker-compose.dev.yml up --build \ No newline at end of file diff --git a/README.md b/README.md index 89d5379..cc445ef 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,22 @@ -# MAC Job Board +# MPLOY - MAC Job Board Hey there! πŸ‘‹ This is a modern, intelligent job board platform that automatically aggregates job listings, providing users with a streamlined experience to search, filter, and discover relevant opportunities. The platform updates daily with fresh listings through our smart AI robots. ## Features πŸš€ + - Jobs update automatically every single day from various sources (automatically deduplicated) - We use AI to help fix, sort and summarise the listings -- You can filter for exactly what you want (e.g. Big Tech Intern Roles for Interational students) +- You can filter for exactly what you want (e.g. Big Tech Intern Roles for International students) - Works perfectly on phone or laptop +- Server-side rendering (where possible) with Next.js 15 App Router +- Multiple filters can be applied at once, including text search +- Desktop/mobile responsive UI: list/details on desktop, modal on mobile +- State persists in URL: search, filters, pagination (`/jobs?q=dev&location=sydney&page=2`) +- Direct job links supported (`/jobs/[id]`) +- Parallel data fetching for faster loads +- Real-time job search with debouncing ## Frontend @@ -18,6 +26,31 @@ This is a modern, intelligent job board platform that automatically aggregates j - Mantine UI: For consistent, accessible UI components - Tailwind CSS: For utility-first styling and responsive design +### Key Patterns + +- **State Management**: Start with props; use custom hooks for reusable logic; Context for global state (e.g., job filters). +- **Data Flow**: URL as source of truth for filters/search; debounced API calls; prefetching for pagination. +- **Components**: Keep thin (50-150 lines); use layouts for shared UI; mark client components with "use client". +- **Features Used**: Intercepting routes for modals; Suspense for loading; Error boundaries; useMemo/useRef for optimization. + +### Structure + +``` +src/ +β”œβ”€β”€ app/ +β”‚ β”œβ”€β”€ jobs/ # Main jobs route with filters, listing, details +β”‚ β”‚ β”œβ”€β”€ [id]/ # Dynamic job page +β”‚ β”‚ └── error.tsx # Job-specific error handling +β”‚ β”œβ”€β”€ layout.tsx # Root layout with providers +β”‚ └── page.tsx # Home (redirects to /jobs) +β”œβ”€β”€ components/ +β”‚ β”œβ”€β”€ jobs/ # Job cards, lists, details, filters +β”‚ └── layout/ # Nav, logo, search bar +β”œβ”€β”€ context/ +β”‚ └── filter/ # Filter state provider +└── lib/ # Utils, theme +``` + ## Backend - Server Actions: Answers search and feedback requests @@ -27,6 +60,7 @@ This is a modern, intelligent job board platform that automatically aggregates j ## Getting Started ### Prerequisites + - Node.js 20+ - Java 17 - Go 1.21+ @@ -34,20 +68,21 @@ This is a modern, intelligent job board platform that automatically aggregates j - Redis ### Local Development -```bash -# Start all services -docker compose -f docker-compose.dev.yml up -# Alternative if Make is installed -make dev -# Frontend only +```bash cd frontend npm install npm run dev +``` -# Backend only -cd backend -./gradlew bootRun +## Environment Variables + +``` +MONGODB_URI= +MONGODB_DATABASE=default +MONGODB_COLLECTION=listings + +NODE_ENV=development ``` ## Development Guidelines @@ -55,7 +90,8 @@ cd backend ### Git Workflow #### Branch Structure -- `main` - Production branch + +- `production` - Production branch - `dev` - Development branch - Feature branches follow the pattern: ``` @@ -64,6 +100,19 @@ cd backend - backend/edwn/redis-caching - frontend/sarah/job-filters ``` - + +### Coding Conventions + +- Use kebab-case for files (e.g., `product-card.ts`); camelCase for hooks (e.g., `useCustomHook`). +- Group related components in feature directories (e.g., `components/jobs/filters/`). +- Prioritize server components; use "use client" only when needed. +- Build with scalability and observability in mind (e.g., structured logging with Pino). + +## Deployment (Development and Production) + +- We are deployed on Azure Container Apps via Github Actions. +- GitHub Actions -> builds containers -> Azure Containers Registry -> Azure Container Apps. + ## License -This project is licensed under the MIT License. \ No newline at end of file + +This project is licensed under the MIT License. diff --git a/backend/.gitattributes b/backend/.gitattributes deleted file mode 100644 index 8af972c..0000000 --- a/backend/.gitattributes +++ /dev/null @@ -1,3 +0,0 @@ -/gradlew text eol=lf -*.bat text eol=crlf -*.jar binary diff --git a/backend/.gitignore b/backend/.gitignore deleted file mode 100644 index 1daab95..0000000 --- a/backend/.gitignore +++ /dev/null @@ -1,42 +0,0 @@ -HELP.md -.gradle -build/ -!gradle/wrapper/gradle-wrapper.jar -!**/src/main/**/build/ -!**/src/test/**/build/ - -### STS ### -.apt_generated -.classpath -.factorypath -.project -.settings -.springBeans -.sts4-cache -bin/ -!**/src/main/**/bin/ -!**/src/test/**/bin/ - -### IntelliJ IDEA ### -.idea -*.iws -*.iml -*.ipr -out/ -!**/src/main/**/out/ -!**/src/test/**/out/ - -### NetBeans ### -/nbproject/private/ -/nbbuild/ -/dist/ -/nbdist/ -/.nb-gradle/ - -### VS Code ### -.vscode/ - -### Kotlin ### -.kotlin - -.idea \ No newline at end of file diff --git a/backend/Dockerfile.dev b/backend/Dockerfile.dev deleted file mode 100644 index 5ad26ad..0000000 --- a/backend/Dockerfile.dev +++ /dev/null @@ -1,7 +0,0 @@ -FROM gradle:8.12-jdk21 AS build -WORKDIR /app -ENV HOME /root -COPY . . -RUN chmod +x ./gradlew -RUN chmod +x start.sh && ./gradlew getDependencies -CMD ["sh", "start.sh"] diff --git a/backend/build.gradle.kts b/backend/build.gradle.kts deleted file mode 100644 index b17dc34..0000000 --- a/backend/build.gradle.kts +++ /dev/null @@ -1,49 +0,0 @@ -plugins { - kotlin("jvm") version "1.9.25" - kotlin("plugin.spring") version "1.9.25" - id("org.springframework.boot") version "3.4.1" - id("io.spring.dependency-management") version "1.1.7" -} - -group = "com.mac" -version = "0.0.1-SNAPSHOT" - -java { - toolchain { - languageVersion = JavaLanguageVersion.of(21) - } -} - -repositories { - mavenCentral() -} - -dependencies { - developmentOnly("org.springframework.boot:spring-boot-devtools") - implementation("org.springframework.boot:spring-boot-starter") - implementation("org.jetbrains.kotlin:kotlin-reflect") - implementation("org.springframework.boot:spring-boot-starter-web") - implementation("org.springframework.boot:spring-boot-starter-data-redis") -} - -kotlin { - compilerOptions { - freeCompilerArgs.addAll("-Xjsr305=strict") - } -} - -tasks.register("getDependencies") { - from(sourceSets.main.get().runtimeClasspath) - into("runtime/") - - doFirst { - val runtimeDir = File("runtime") - runtimeDir.deleteRecursively() - runtimeDir.mkdir() - } - - doLast { - File("runtime").deleteRecursively() - } -} - diff --git a/backend/gradle/wrapper/gradle-wrapper.jar b/backend/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index a4b76b9..0000000 Binary files a/backend/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/backend/gradle/wrapper/gradle-wrapper.properties b/backend/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index cea7a79..0000000 --- a/backend/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,7 +0,0 @@ -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-bin.zip -networkTimeout=10000 -validateDistributionUrl=true -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists diff --git a/backend/gradlew b/backend/gradlew deleted file mode 100755 index f5feea6..0000000 --- a/backend/gradlew +++ /dev/null @@ -1,252 +0,0 @@ -#!/bin/sh - -# -# Copyright Β© 2015-2021 the original authors. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# SPDX-License-Identifier: Apache-2.0 -# - -############################################################################## -# -# Gradle start up script for POSIX generated by Gradle. -# -# Important for running: -# -# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is -# noncompliant, but you have some other compliant shell such as ksh or -# bash, then to run this script, type that shell name before the whole -# command line, like: -# -# ksh Gradle -# -# Busybox and similar reduced shells will NOT work, because this script -# requires all of these POSIX shell features: -# * functions; -# * expansions Β«$varΒ», Β«${var}Β», Β«${var:-default}Β», Β«${var+SET}Β», -# Β«${var#prefix}Β», Β«${var%suffix}Β», and Β«$( cmd )Β»; -# * compound commands having a testable exit status, especially Β«caseΒ»; -# * various built-in commands including Β«commandΒ», Β«setΒ», and Β«ulimitΒ». -# -# Important for patching: -# -# (2) This script targets any POSIX shell, so it avoids extensions provided -# by Bash, Ksh, etc; in particular arrays are avoided. -# -# The "traditional" practice of packing multiple parameters into a -# space-separated string is a well documented source of bugs and security -# problems, so this is (mostly) avoided, by progressively accumulating -# options in "$@", and eventually passing that to Java. -# -# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, -# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; -# see the in-line comments for details. -# -# There are tweaks for specific operating systems such as AIX, CygWin, -# Darwin, MinGW, and NonStop. -# -# (3) This script is generated from the Groovy template -# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt -# within the Gradle project. -# -# You can find Gradle at https://github.com/gradle/gradle/. -# -############################################################################## - -# Attempt to set APP_HOME - -# Resolve links: $0 may be a link -app_path=$0 - -# Need this for daisy-chained symlinks. -while - APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path - [ -h "$app_path" ] -do - ls=$( ls -ld "$app_path" ) - link=${ls#*' -> '} - case $link in #( - /*) app_path=$link ;; #( - *) app_path=$APP_HOME$link ;; - esac -done - -# This is normally unused -# shellcheck disable=SC2034 -APP_BASE_NAME=${0##*/} -# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) -APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s -' "$PWD" ) || exit - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD=maximum - -warn () { - echo "$*" -} >&2 - -die () { - echo - echo "$*" - echo - exit 1 -} >&2 - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -nonstop=false -case "$( uname )" in #( - CYGWIN* ) cygwin=true ;; #( - Darwin* ) darwin=true ;; #( - MSYS* | MINGW* ) msys=true ;; #( - NONSTOP* ) nonstop=true ;; -esac - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD=$JAVA_HOME/jre/sh/java - else - JAVACMD=$JAVA_HOME/bin/java - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD=java - if ! command -v java >/dev/null 2>&1 - then - die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -fi - -# Increase the maximum file descriptors if we can. -if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then - case $MAX_FD in #( - max*) - # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC2039,SC3045 - MAX_FD=$( ulimit -H -n ) || - warn "Could not query maximum file descriptor limit" - esac - case $MAX_FD in #( - '' | soft) :;; #( - *) - # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. - # shellcheck disable=SC2039,SC3045 - ulimit -n "$MAX_FD" || - warn "Could not set maximum file descriptor limit to $MAX_FD" - esac -fi - -# Collect all arguments for the java command, stacking in reverse order: -# * args from the command line -# * the main class name -# * -classpath -# * -D...appname settings -# * --module-path (only if needed) -# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. - -# For Cygwin or MSYS, switch paths to Windows format before running java -if "$cygwin" || "$msys" ; then - APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) - CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) - - JAVACMD=$( cygpath --unix "$JAVACMD" ) - - # Now convert the arguments - kludge to limit ourselves to /bin/sh - for arg do - if - case $arg in #( - -*) false ;; # don't mess with options #( - /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath - [ -e "$t" ] ;; #( - *) false ;; - esac - then - arg=$( cygpath --path --ignore --mixed "$arg" ) - fi - # Roll the args list around exactly as many times as the number of - # args, so each arg winds up back in the position where it started, but - # possibly modified. - # - # NB: a `for` loop captures its iteration list before it begins, so - # changing the positional parameters here affects neither the number of - # iterations, nor the values presented in `arg`. - shift # remove old arg - set -- "$@" "$arg" # push replacement arg - done -fi - - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' - -# Collect all arguments for the java command: -# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, -# and any embedded shellness will be escaped. -# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be -# treated as '${Hostname}' itself on the command line. - -set -- \ - "-Dorg.gradle.appname=$APP_BASE_NAME" \ - -classpath "$CLASSPATH" \ - org.gradle.wrapper.GradleWrapperMain \ - "$@" - -# Stop when "xargs" is not available. -if ! command -v xargs >/dev/null 2>&1 -then - die "xargs is not available" -fi - -# Use "xargs" to parse quoted args. -# -# With -n1 it outputs one arg per line, with the quotes and backslashes removed. -# -# In Bash we could simply go: -# -# readarray ARGS < <( xargs -n1 <<<"$var" ) && -# set -- "${ARGS[@]}" "$@" -# -# but POSIX shell has neither arrays nor command substitution, so instead we -# post-process each arg (as a line of input to sed) to backslash-escape any -# character that might be a shell metacharacter, then use eval to reverse -# that process (while maintaining the separation between arguments), and wrap -# the whole thing up as a single "set" statement. -# -# This will of course break if any of these variables contains a newline or -# an unmatched quote. -# - -eval "set -- $( - printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | - xargs -n1 | - sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | - tr '\n' ' ' - )" '"$@"' - -exec "$JAVACMD" "$@" diff --git a/backend/settings.gradle.kts b/backend/settings.gradle.kts deleted file mode 100644 index 25d39f4..0000000 --- a/backend/settings.gradle.kts +++ /dev/null @@ -1 +0,0 @@ -rootProject.name = "mploy" diff --git a/backend/src/main/kotlin/com/mac/mploy/MployApplication.kt b/backend/src/main/kotlin/com/mac/mploy/MployApplication.kt deleted file mode 100644 index d38fac5..0000000 --- a/backend/src/main/kotlin/com/mac/mploy/MployApplication.kt +++ /dev/null @@ -1,11 +0,0 @@ -package com.mac.mploy - -import org.springframework.boot.autoconfigure.SpringBootApplication -import org.springframework.boot.runApplication - -@SpringBootApplication -class MployApplication - -fun main(args: Array) { - runApplication(*args) -} diff --git a/backend/src/main/kotlin/com/mac/mploy/config/RedisConfig.kt b/backend/src/main/kotlin/com/mac/mploy/config/RedisConfig.kt deleted file mode 100644 index 40359c6..0000000 --- a/backend/src/main/kotlin/com/mac/mploy/config/RedisConfig.kt +++ /dev/null @@ -1,30 +0,0 @@ -package com.mac.mploy.config - -import org.springframework.beans.factory.annotation.Value -import org.springframework.context.annotation.Bean -import org.springframework.context.annotation.Configuration -import org.springframework.data.redis.connection.RedisStandaloneConfiguration -import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory -import org.springframework.data.redis.core.StringRedisTemplate - -@Configuration -class RedisConfig { - @Value("\${spring.redis.host}") - private lateinit var redisHost: String - - @Value("\${spring.redis.port}") - private var redisPort: Int = 0 - - @Bean - fun lettuceConnectionFactory(): LettuceConnectionFactory { - val configuration = RedisStandaloneConfiguration(redisHost, redisPort) - return LettuceConnectionFactory(configuration) - } - - @Bean - fun stringRedisTemplate(connectionFactory: LettuceConnectionFactory): StringRedisTemplate { - val template = StringRedisTemplate() - template.connectionFactory = connectionFactory - return template - } -} \ No newline at end of file diff --git a/backend/src/main/kotlin/com/mac/mploy/controller/HelloController.kt b/backend/src/main/kotlin/com/mac/mploy/controller/HelloController.kt deleted file mode 100644 index 56c6fb3..0000000 --- a/backend/src/main/kotlin/com/mac/mploy/controller/HelloController.kt +++ /dev/null @@ -1,30 +0,0 @@ -package com.mac.mploy.controller - -import org.slf4j.LoggerFactory -import org.springframework.beans.factory.annotation.Value -import org.springframework.data.redis.core.StringRedisTemplate -import org.springframework.web.bind.annotation.* - -@RestController -@CrossOrigin(origins = ["http://localhost:3000"]) -@RequestMapping("/api") -class HelloController( - private val redis: StringRedisTemplate, - @Value("\${spring.redis.host}") private val redisHost: String, - @Value("\${spring.redis.port}") private val redisPort: Int -) { - private val logger = LoggerFactory.getLogger(HelloController::class.java) - - @GetMapping("/hello") - fun hello(): String { - logger.info("Attempting to connect to Redis at {}:{}", redisHost, redisPort) - try { - val value = redis.opsForValue().get("greeting") - logger.info("Successfully connected to Redis. Value: {}", value) - return value ?: "Hello from Redis!" - } catch (e: Exception) { - logger.error("Failed to connect to Redis", e) - throw e - } - } -} diff --git a/backend/src/main/resources/application.properties b/backend/src/main/resources/application.properties deleted file mode 100644 index 91ff81b..0000000 --- a/backend/src/main/resources/application.properties +++ /dev/null @@ -1 +0,0 @@ -spring.application.name=mploy diff --git a/backend/src/main/resources/application.yml b/backend/src/main/resources/application.yml deleted file mode 100644 index 6df13db..0000000 --- a/backend/src/main/resources/application.yml +++ /dev/null @@ -1,29 +0,0 @@ -spring: - redis: - host: ${REDIS_HOST:redis} - port: ${REDIS_PORT:6379} - database: 0 - timeout: 60000 - lettuce: - pool: - max-active: 8 - max-wait: -1 - max-idle: 8 - min-idle: 0 - -logging: - level: - org.springframework.data.redis: DEBUG - io.lettuce.core: DEBUG - -server: - port: 8080 - -devtools: - restart: - enabled: true - -# Enable LiveReload if needed -spring.devtools.livereload.enabled: true - -spring.devtools.reload.trigger-file: .reloadtrigger diff --git a/docker-compose.dev.yml b/docker-compose.dev.yml deleted file mode 100644 index 3e4d169..0000000 --- a/docker-compose.dev.yml +++ /dev/null @@ -1,48 +0,0 @@ -services: - # redis: - # image: redis:alpine - # ports: - # - "6379:6379" - # networks: - # - mploy-network - - # backend: - # build: - # context: ./backend - # dockerfile: Dockerfile.dev - # develop: - # watch: - # - action: rebuild - # path: ./backend/src # Watch the entire src directory - # volumes: - # - ./backend:/app - # ports: - # - "35729:35729" - # - "8080:8080" - # environment: - # - SPRING_PROFILES_ACTIVE=dev - # - SPRING_DEVTOOLS_RESTART_ENABLED=true - # - REDIS_HOST=redis - # - REDIS_PORT=6379 - # networks: - # - mploy-network - # depends_on: - # - redis - - frontend: - build: - context: ./frontend - dockerfile: Dockerfile.dev - ports: - - "3000:3000" - volumes: - - ./frontend:/app - - /app/node_modules - environment: - - NEXT_PUBLIC_API_URL=http://localhost:8080 - networks: - - mploy-network - -networks: - mploy-network: - driver: bridge diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 0000000..890ef68 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,52 @@ +# ------------------------- +# Base +# ------------------------- +FROM node:20-alpine AS base +WORKDIR /app + +# ------------------------- +# Dependencies +# ------------------------- +FROM base AS deps +COPY package.json package-lock.json* ./ +RUN npm ci + +# ------------------------- +# Builder +# ------------------------- +FROM base AS builder +COPY --from=deps /app/node_modules ./node_modules +COPY package.json package-lock.json* ./ +COPY tsconfig.json next.config.ts ./ +COPY postcss.config.mjs tailwind.config.ts ./ +COPY public ./public +COPY src ./src +RUN npm run build + +# ------------------------- +# Runner +# ------------------------- +FROM base AS runner +WORKDIR /app + +# Default: prod, can override with `-e NODE_ENV=development` +ENV NODE_ENV=production + +# Copy runtime deps +COPY --from=deps /app/node_modules ./node_modules + +# Copy built assets +COPY --from=builder --chown=1001:1001 /app/.next/standalone ./ +COPY --from=builder --chown=1001:1001 /app/.next/static ./.next/static +COPY --from=builder --chown=1001:1001 /app/public ./public + +# Create non-root user +RUN addgroup --system --gid 1001 nodejs \ + && adduser --system --uid 1001 nextjs +USER nextjs + +EXPOSE 3000 +ENV PORT=3000 +ENV HOSTNAME=0.0.0.0 + +CMD ["npm", "start"] diff --git a/frontend/Dockerfile.dev b/frontend/Dockerfile.dev deleted file mode 100644 index 2e24735..0000000 --- a/frontend/Dockerfile.dev +++ /dev/null @@ -1,7 +0,0 @@ -FROM node:22-alpine -WORKDIR /app -COPY package*.json ./ -RUN npm ci -COPY . . -RUN npm list mongodb -CMD ["npm", "run", "dev"] \ No newline at end of file diff --git a/frontend/README.md b/frontend/README.md deleted file mode 100644 index d0ceae8..0000000 --- a/frontend/README.md +++ /dev/null @@ -1,236 +0,0 @@ -# MPloy Job Board - -MPloy is a very simple job board with search, filter and just a website that displays all the jobs. There are 3 components in this: - -- Next.js is used for our frontend -- Kotlin (Springboot) REST backend -- GoLang scraper service that runs once a day to update our collection of jobs. - -## Core Features - -- Server-side rendering (where possible) with Next.js 15 App Router -- We want to build this app with modern principles like scalability, observability in mind. -- The job website will display 10 jobs at a time. users may filter these jobs based on whatever property. -- Multiple filters can be applied at once, including a text search filter. -- Desktop/mobile responsive UI: list/details on desktop, modal on mobile -- State persists in URL: search, filters, pagination (`/jobs?q=dev&location=sydney&page=2`) -- Direct job links supported (`/jobs/[id]`) -- Parallel data fetching for faster loads -- Real-time job search with debouncing -- Data refreshed daily via Go scraper -- There is around 1k jobs, each one with a size of around 4kBs. - -### Naming & File Structuring Conventions - -- Everything uses kebab-case `product-card.ts` unless theres an agreed standard e.g. `useCustomHook` for hook -- Group related components in feature directories (e.g. `components/layout/search/filter/` contains all components used for search filtering -- Most UI components are 50-150 lines of code. Keep pages thin, move complex logic to components -- Use layouts for shared UI across routes - -## State Management & Data Passing Patterns - -- Begin with simple props passing - max 2 levels of components -- When stateful logic needs to be reused, move it to custom hooks. -- When props drilling becomes cumbersome or state needs to be widely available, use Context. (e.g. the global fliter state of jobs should be context) -- Pre-fetch the data in the next job page -- Load the essential data first and display the page, while other job listings and details are being loaded. -- Implement parallel data fetching when possible - -## Next / React Features - -- Intercepting Routes: Use intercepting routes for modal-like experiences. -- Lazy Loading: use when we can defer the loading of heavy components -- Error Boundaries: define error.tsx files to catch errors to prevent the entire site from breaking -- Add boundaries for loading state -- Use useMemo for expensive calculations (e.g. filtered results) -- Use useRef to maintain filter input values - -## Mantine Usage Guidelines - -### Core Principles - -- Use Mantine components only when they provide significant value beyond basic HTML/CSS -- Prefer simple HTML/CSS for basic layout and text elements -- Consider bundle size and complexity impact - -## Frontend Structure - -``` -β”œβ”€β”€ next.config.ts # Next.js configuration, API routes, environment -β”œβ”€β”€ src -β”‚ β”œβ”€β”€ app -β”‚ β”‚ β”œβ”€β”€ error.tsx # Global error boundary UI -β”‚ β”‚ β”œβ”€β”€ jobs -β”‚ β”‚ β”‚ β”œβ”€β”€ [id] # Dynamic route for individual job pages -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ @modal # Intercepted route - shows job details as modal on mobile -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ page.tsx # Individual job page UI -β”‚ β”‚ β”‚ β”œβ”€β”€ error.tsx # Job section error boundary -β”‚ β”‚ β”‚ β”œβ”€β”€ layout.tsx # Job section layout wrapper (includes JobsProvider) -β”‚ β”‚ β”‚ β”œβ”€β”€ loading.tsx # Job section loading state -β”‚ β”‚ β”‚ β”œβ”€β”€ page.tsx # Main jobs listing page -β”‚ β”‚ β”œβ”€β”€ layout.tsx # Root layout with nav and theme providers -β”‚ β”‚ β”œβ”€β”€ page.tsx # Home page (redirects to /jobs) -β”‚ β”œβ”€β”€ components -β”‚ β”‚ β”œβ”€β”€ jobs -β”‚ β”‚ β”‚ β”œβ”€β”€ details -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ job-card.tsx # Individual job preview card -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ job-details.tsx # Full job details view -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ job-list.tsx # Container for job cards with virtualization -β”‚ β”‚ β”‚ β”œβ”€β”€ filters -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ dropdown-filter.tsx # Reusable filter dropdown -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ dropdown-sort.tsx # Sort options dropdown -β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ filter-section.tsx # Container for all filters -β”‚ β”‚ β”‚ β”œβ”€β”€ search -β”‚ β”‚ β”‚ β”‚ └── search-bar.tsx # Search input with suggestions -β”‚ β”‚ β”œβ”€β”€ layout -β”‚ β”‚ β”‚ └── logo.tsx # Site logo component -β”‚ β”‚ β”‚ └── nav-bar.tsx # Navigation bar -β”‚ β”œβ”€β”€ context -β”‚ β”‚ β”œβ”€β”€ jobs -β”‚ β”‚ β”‚ └── filter-context.tsx # Job state and actions context -β”‚ β”‚ β”‚ └── jobs-provider.tsx # Provider wrapper with initial state -β”‚ β”œβ”€β”€ hooks -β”‚ β”‚ β”œβ”€β”€ use-job-filters.ts # Filter logic and state management -β”‚ β”‚ β”œβ”€β”€ use-job-search.ts # Search functionality and API calls -β”‚ β”‚ β”œβ”€β”€ use-pagination.ts # Pagination state and navigation -β”‚ β”‚ β”œβ”€β”€ use-url-state.ts # URL parameters sync with app state -β”‚ β”œβ”€β”€ lib -β”‚ β”‚ β”œβ”€β”€ theme.ts # Mantine theme configuration -β”‚ β”œβ”€β”€ types -β”‚ β”‚ └── api.ts # API response/request types -β”‚ β”‚ └── filters.ts # Filter option types -β”‚ β”‚ └── job.ts # Job data types -β”œβ”€β”€ tailwind.config.ts # Tailwind CSS configuration -``` - -## Custom Hooks and Context - -### `useUrlState` - -```ts -// URL structure: /jobs?q=developer&location=sydney&page=1 -const { updateUrlState, getStateFromUrl } = useUrlState(); - -// All filter changes update URL automatically -updateUrlState({ search: "developer", location: "sydney" }); - -// URL state is initial source of truth on page load -const initialState = getStateFromUrl(); -``` - -### `useJobsContext` - -```ts -// Jobs context provides centralized state management -const { state, updateFilters, setSelectedJob, clearFilters } = useJobsContext(); - -// Access jobs data and loading state -const { jobs, isLoading, selectedJobId, totalJobs } = state; - -// Update filters (automatically syncs with URL) -updateFilters({ search: "developer" }); -``` - -### `useJobSearch()` and `useJobFilters()` - -```ts -// Debounced search with automatic API calls -const { searchJobs, debouncedSearch } = useJobSearch(); - -// Filter management with URL sync -const { handleFilterChange, handleClearFilters } = useJobFilters(); - -// Update multiple filters -handleFilterChange({ - locations: ["sydney"], - jobTypes: ["full-time"], -}); -``` - -### Pagination and Cache - -```ts -// Managed by usePagination hook -const { currentPage, nextPage, prevPage } = usePagination({ - totalItems: totalJobs, - itemsPerPage: 10, -}); - -// useJobSearch handles prefetching next page -useEffect(() => { - if (hasNextPage) prefetchNextPage(); -}, [currentPage]); -``` - -### Data Flow Example - -When user searches: - -1. SearchBar component calls useJobSearch().updateSearch() -2. useJobSearch updates FilterContext filters -3. useUrlState syncs new state to URL -4. useJobSearch triggers API call with new filters -5. Results update in FilterContext -6. JobList component re-renders with new data - -When user opens job details: - -1. JobCard calls selectJob from context -2. URL updates to include selectedJobId -3. JobDetails component renders selected job -4. On mobile, modal route is intercepted - -```ts -// 1. User searches for jobs -function SearchBar() { - const { handleFilterChange } = useJobFilters(); - - return ( - handleFilterChange({ search: e.target.value })} - /> - ); -} - -// 2. URL updates and jobs are fetched -function JobList() { - const { state: { jobs, isLoading } } = useJobsContext(); - - if (isLoading) return ; - - return jobs.map(job => ); -} - -// 3. User selects a job -function JobCard({ job }) { - const { setSelectedJob } = useJobsContext(); - - return ( -
setSelectedJob(job.id)}> - {job.title} -
- ); -} - -// 4. Job details display with URL sync -function JobDetails() { - const { state: { selectedJobId, jobs } } = useJobsContext(); - const job = jobs.find(j => j.id === selectedJobId); - - return job ? : null; -} -``` - -### Suggested AI Prompt - -``` -MPloy is a very simple job board with search, filter and just a website that displays all the jobs. More details about the project including coding guidelines and suggestions are in the README.md file. This should be strictly followed. - -Attached is our project code. All code prefixed with frontend contains the code for the next.js codebase. The README contains coding guidelines and they should be followed at all times. - -Help answer questions regarding this codebase. Unless otherwise specified, show only the changes the user needs to apply and include the directory of where the changes need to be applied as a comment. - -Consider whether components should be client or server sided, and ensure client sided components are marked with "use client" - -Always explain your thought process and await a go ahead before starting to solve a problem. If any prompt is unclear to you ask the user to clarify. -``` diff --git a/frontend/next.config.ts b/frontend/next.config.ts index e9ffa30..68a6c64 100644 --- a/frontend/next.config.ts +++ b/frontend/next.config.ts @@ -1,7 +1,7 @@ import type { NextConfig } from "next"; const nextConfig: NextConfig = { - /* config options here */ + output: "standalone", }; export default nextConfig; diff --git a/frontend/package-lock.json b/frontend/package-lock.json index e2eab00..d0e29b8 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -21,6 +21,8 @@ "jsdom": "^26.0.0", "mongodb": "^6.14.2", "next": "15.1.7", + "pino": "^9.11.0", + "pino-pretty": "^13.1.1", "react": "^19.0.0", "react-dom": "^19.0.0" }, @@ -1918,6 +1920,15 @@ "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", "license": "MIT" }, + "node_modules/atomic-sleep": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/atomic-sleep/-/atomic-sleep-1.0.0.tgz", + "integrity": "sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ==", + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/available-typed-arrays": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", @@ -2215,6 +2226,12 @@ "simple-swizzle": "^0.2.2" } }, + "node_modules/colorette": { + "version": "2.0.20", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", + "integrity": "sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==", + "license": "MIT" + }, "node_modules/combined-stream": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", @@ -2362,6 +2379,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/dateformat": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", + "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==", + "license": "MIT", + "engines": { + "node": "*" + } + }, "node_modules/debug": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", @@ -2524,6 +2550,15 @@ "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", "license": "MIT" }, + "node_modules/end-of-stream": { + "version": "1.4.5", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.5.tgz", + "integrity": "sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==", + "license": "MIT", + "dependencies": { + "once": "^1.4.0" + } + }, "node_modules/enhanced-resolve": { "version": "5.18.0", "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.0.tgz", @@ -3191,6 +3226,12 @@ "node": ">=0.10.0" } }, + "node_modules/fast-copy": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/fast-copy/-/fast-copy-3.0.2.tgz", + "integrity": "sha512-dl0O9Vhju8IrcLndv2eU4ldt1ftXMqqfgN4H1cpmGV7P6jeB9FwpN9a2c8DPGE1Ys88rNUJVYDHq73CGAGOPfQ==", + "license": "MIT" + }, "node_modules/fast-deep-equal": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", @@ -3242,6 +3283,21 @@ "dev": true, "license": "MIT" }, + "node_modules/fast-redact": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/fast-redact/-/fast-redact-3.5.0.tgz", + "integrity": "sha512-dwsoQlS7h9hMeYUq1W++23NDcBLV4KqONnITDV9DjfS3q1SgDGVrBdvvTLUotWtPSD7asWDV9/CmsZPy8Hf70A==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==", + "license": "MIT" + }, "node_modules/fastq": { "version": "1.18.0", "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.18.0.tgz", @@ -3699,6 +3755,12 @@ "node": ">= 0.4" } }, + "node_modules/help-me": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/help-me/-/help-me-5.0.0.tgz", + "integrity": "sha512-7xgomUX6ADmcYzFik0HzAxh/73YlKR9bmFzf51CZwR+b6YtzU2m0u49hQCqV6SvlqIqsaxovfwdvbnsw3b/zpg==", + "license": "MIT" + }, "node_modules/html-encoding-sniffer": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz", @@ -4287,6 +4349,15 @@ "jiti": "bin/jiti.js" } }, + "node_modules/joycon": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/joycon/-/joycon-3.1.1.tgz", + "integrity": "sha512-34wB/Y7MW7bzjKRjUKTa46I2Z7eV62Rkhva+KkopW7Qvv/OSWBqvkSY7vusOPrNuZcUG3tApvdVgNB8POj3SPw==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -4586,7 +4657,6 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "dev": true, "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" @@ -4926,6 +4996,24 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/on-exit-leak-free": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/on-exit-leak-free/-/on-exit-leak-free-2.1.2.tgz", + "integrity": "sha512-0eJJY6hXLGf1udHwfNftBqH+g73EU4B504nZeKpz1sYRKafAghwxEJunB2O7rDZkL4PGfsMVnTXZ2EjibbqcsA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, "node_modules/optionator": { "version": "0.9.4", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", @@ -5093,6 +5181,79 @@ "node": ">=0.10.0" } }, + "node_modules/pino": { + "version": "9.11.0", + "resolved": "https://registry.npmjs.org/pino/-/pino-9.11.0.tgz", + "integrity": "sha512-+YIodBB9sxcWeR8PrXC2K3gEDyfkUuVEITOcbqrfcj+z5QW4ioIcqZfYFbrLTYLsmAwunbS7nfU/dpBB6PZc1g==", + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0", + "fast-redact": "^3.1.1", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^2.0.0", + "pino-std-serializers": "^7.0.0", + "process-warning": "^5.0.0", + "quick-format-unescaped": "^4.0.3", + "real-require": "^0.2.0", + "safe-stable-stringify": "^2.3.1", + "sonic-boom": "^4.0.1", + "thread-stream": "^3.0.0" + }, + "bin": { + "pino": "bin.js" + } + }, + "node_modules/pino-abstract-transport": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pino-abstract-transport/-/pino-abstract-transport-2.0.0.tgz", + "integrity": "sha512-F63x5tizV6WCh4R6RHyi2Ml+M70DNRXt/+HANowMflpgGFMAym/VKm6G7ZOQRjqN7XbGxK1Lg9t6ZrtzOaivMw==", + "license": "MIT", + "dependencies": { + "split2": "^4.0.0" + } + }, + "node_modules/pino-pretty": { + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.1.tgz", + "integrity": "sha512-TNNEOg0eA0u+/WuqH0MH0Xui7uqVk9D74ESOpjtebSQYbNWJk/dIxCXIxFsNfeN53JmtWqYHP2OrIZjT/CBEnA==", + "license": "MIT", + "dependencies": { + "colorette": "^2.0.7", + "dateformat": "^4.6.3", + "fast-copy": "^3.0.2", + "fast-safe-stringify": "^2.1.1", + "help-me": "^5.0.0", + "joycon": "^3.1.1", + "minimist": "^1.2.6", + "on-exit-leak-free": "^2.1.0", + "pino-abstract-transport": "^2.0.0", + "pump": "^3.0.0", + "secure-json-parse": "^4.0.0", + "sonic-boom": "^4.0.1", + "strip-json-comments": "^5.0.2" + }, + "bin": { + "pino-pretty": "bin.js" + } + }, + "node_modules/pino-pretty/node_modules/strip-json-comments": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.3.tgz", + "integrity": "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==", + "license": "MIT", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pino-std-serializers": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/pino-std-serializers/-/pino-std-serializers-7.0.0.tgz", + "integrity": "sha512-e906FRY0+tV27iq4juKzSYPbUj2do2X2JX4EzSca1631EB2QJQUqGbDuERal7LCtOpxl6x3+nvo9NPZcmjkiFA==", + "license": "MIT" + }, "node_modules/pirates": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", @@ -5335,6 +5496,22 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, + "node_modules/process-warning": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz", + "integrity": "sha512-a39t9ApHNx2L4+HBnQKqxxHNs1r7KF+Intd8Q/g1bUh6q0WIp9voPXJ/x0j+ZL45KF1pJd9+q2jLIRMfvEshkA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "MIT" + }, "node_modules/prop-types": { "version": "15.8.1", "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", @@ -5346,6 +5523,16 @@ "react-is": "^16.13.1" } }, + "node_modules/pump": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.3.tgz", + "integrity": "sha512-todwxLMY7/heScKmntwQG8CXVkWUOdYxIvY2s0VWAAMh/nd8SoYiRaKjlr7+iCs984f2P8zvrfWcDDYVb73NfA==", + "license": "MIT", + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, "node_modules/punycode": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", @@ -5375,6 +5562,12 @@ ], "license": "MIT" }, + "node_modules/quick-format-unescaped": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/quick-format-unescaped/-/quick-format-unescaped-4.0.4.tgz", + "integrity": "sha512-tYC1Q1hgyRuHgloV/YXs2w15unPVh8qfu/qCTfhTYamaw7fyhumKa2yGpdSo87vY32rIclj+4fWYQXUMs9EHvg==", + "license": "MIT" + }, "node_modules/react": { "version": "19.0.0", "resolved": "https://registry.npmjs.org/react/-/react-19.0.0.tgz", @@ -5535,6 +5728,15 @@ "node": ">=8.10.0" } }, + "node_modules/real-require": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/real-require/-/real-require-0.2.0.tgz", + "integrity": "sha512-57frrGM/OCTLqLOAh0mhVA9VBMHd+9U7Zb2THMGdBUoZVOtGbJzjxsYGDJ3A9AYYCP4hn6y1TVbaOfzWtm5GFg==", + "license": "MIT", + "engines": { + "node": ">= 12.13.0" + } + }, "node_modules/reflect.getprototypeof": { "version": "1.0.10", "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz", @@ -5719,6 +5921,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/safe-stable-stringify": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/safe-stable-stringify/-/safe-stable-stringify-2.5.0.tgz", + "integrity": "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/safer-buffer": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", @@ -5743,6 +5954,22 @@ "integrity": "sha512-xFVuu11jh+xcO7JOAGJNOXld8/TcEHK/4CituBUeUb5hqxJLj9YuemAEuvm9gQ/+pgXYfbQuqAkiYu+u7YEsNA==", "license": "MIT" }, + "node_modules/secure-json-parse": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.0.0.tgz", + "integrity": "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" + }, "node_modules/semver": { "version": "7.6.3", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", @@ -5964,6 +6191,15 @@ "is-arrayish": "^0.3.1" } }, + "node_modules/sonic-boom": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", + "integrity": "sha512-INb7TM37/mAcsGmc9hyyI6+QR3rR1zVRu36B0NeGXKnOOLiZOfER5SA+N7X7k3yUYRzLWafduTDvJAfDswwEww==", + "license": "MIT", + "dependencies": { + "atomic-sleep": "^1.0.0" + } + }, "node_modules/source-map-js": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", @@ -5982,6 +6218,15 @@ "memory-pager": "^1.0.2" } }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, "node_modules/stable-hash": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.4.tgz", @@ -6430,6 +6675,15 @@ "integrity": "sha512-oB7yIimd8SuGptespDAZnNkzIz+NWaJCu2RMsbs4Wmp9zSDUM8Nhi3s2OOcqYuv3mN4hitXc8DVx+LyUmbUDiA==", "license": "ISC" }, + "node_modules/thread-stream": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/thread-stream/-/thread-stream-3.1.0.tgz", + "integrity": "sha512-OqyPZ9u96VohAyMfJykzmivOrY2wfMSf3C5TtFJVgN+Hm6aj+voFhlK+kZEIv2FBh1X6Xp3DlnCOfEQ3B2J86A==", + "license": "MIT", + "dependencies": { + "real-require": "^0.2.0" + } + }, "node_modules/tldts": { "version": "6.1.75", "resolved": "https://registry.npmjs.org/tldts/-/tldts-6.1.75.tgz", @@ -7025,6 +7279,12 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, "node_modules/ws": { "version": "8.18.0", "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.0.tgz", diff --git a/frontend/package.json b/frontend/package.json index 50c3516..8f22912 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -23,6 +23,8 @@ "jsdom": "^26.0.0", "mongodb": "^6.14.2", "next": "15.1.7", + "pino": "^9.11.0", + "pino-pretty": "^13.1.1", "react": "^19.0.0", "react-dom": "^19.0.0" }, diff --git a/frontend/public/OgImage.png b/frontend/public/OgImage.png new file mode 100644 index 0000000..fc6cbd8 Binary files /dev/null and b/frontend/public/OgImage.png differ diff --git a/frontend/public/mac.svg b/frontend/public/mac.svg new file mode 100644 index 0000000..a5e9ec5 --- /dev/null +++ b/frontend/public/mac.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/frontend/src/app/actions.ts b/frontend/src/app/actions.ts index 59efea9..3052f0e 100644 --- a/frontend/src/app/actions.ts +++ b/frontend/src/app/actions.ts @@ -1,6 +1,8 @@ // frontend/src/app/actions/feedback.ts "use server"; +import logger from "@/lib/logger"; + // Type for our form data export interface FeedbackFormData { email?: string; @@ -12,10 +14,12 @@ export async function submitFeedback(data: FeedbackFormData) { const notionApiKey = process.env.NOTION_API_KEY; if (!databaseId) { + logger.error("NOTION_DATABASE_ID environment variable is not set"); throw new Error("NOTION_DATABASE_ID environment variable is not set"); } if (!notionApiKey) { + logger.error("NOTION_API_KEY environment variable is not set"); throw new Error("NOTION_API_KEY environment variable is not set"); } @@ -61,17 +65,29 @@ export async function submitFeedback(data: FeedbackFormData) { if (!response.ok) { const errorData = await response.json(); + logger.error( + { error: errorData, status: response.status }, + "Notion API error during feedback submission", + ); return { success: false, - message: `Notion returned an API error: ${errorData.message || response.statusText}`, + message: `We're sorry, but there was an issue submitting your feedback. Please try again later.`, }; } - return { success: true, message: "Feedback submitted successfully." }; + logger.info( + { email: data.email || "Anonymous" }, + "Feedback submitted successfully", + ); + return { + success: true, + message: "Thank you! Your feedback has been submitted successfully.", + }; } catch (error) { + logger.error(error, "Unexpected error during feedback submission"); return { success: false, - message: `Encountered an internal error: ${error}`, + message: `An unexpected error occurred while submitting your feedback. Please try again.`, }; } } diff --git a/frontend/src/app/error.tsx b/frontend/src/app/error.tsx index 63a13bc..86dec9f 100644 --- a/frontend/src/app/error.tsx +++ b/frontend/src/app/error.tsx @@ -1,4 +1,30 @@ "use client"; -export default function Error() { - return
Error
; + +import { useEffect } from "react"; +import { Text, Button } from "@mantine/core"; + +export default function Error({ + error, + reset, +}: { + error: Error & { digest?: string }; + reset: () => void; +}) { + useEffect(() => { + console.error("Global error:", { + message: error.message, + stack: error.stack, + digest: error.digest, + }); + }, [error]); + + return ( +
+ + Oops! Something went wrong. Please try refreshing the page or contact + support if the issue persists. + + +
+ ); } diff --git a/frontend/src/app/jobs/[id]/page.tsx b/frontend/src/app/jobs/[id]/page.tsx index a632245..187a5a6 100644 --- a/frontend/src/app/jobs/[id]/page.tsx +++ b/frontend/src/app/jobs/[id]/page.tsx @@ -5,7 +5,6 @@ import { notFound } from "next/navigation"; import { Job } from "@/types/job"; import JobDetailsWrapper from "@/components/jobs/job-details-wrapper"; import { Metadata } from "next"; -import OgImage from "@/assets/OgImage.png"; type Props = { params: Promise<{ id: string }>; @@ -38,7 +37,7 @@ export async function generateMetadata({ params }: Props): Promise { description, images: [ { - url: OgImage.src, + url: "/OgImage.png", alt: title, }, ], diff --git a/frontend/src/app/jobs/actions.ts b/frontend/src/app/jobs/actions.ts index 4752c60..2f183cf 100644 --- a/frontend/src/app/jobs/actions.ts +++ b/frontend/src/app/jobs/actions.ts @@ -5,6 +5,7 @@ import { MongoClient, ObjectId } from "mongodb"; import { JobFilters } from "@/types/filters"; import { Job } from "@/types/job"; import serializeJob from "@/lib/utils"; +import logger from "@/lib/logger"; const PAGE_SIZE = 20; @@ -79,6 +80,7 @@ async function withDbConnection( callback: (client: MongoClient) => Promise, ): Promise { if (!process.env.MONGODB_URI) { + logger.error("MONGODB_URI environment variable is not set"); throw new Error( "MongoDB URI is not configured. Please check environment variables.", ); @@ -86,7 +88,11 @@ async function withDbConnection( const client = new MongoClient(process.env.MONGODB_URI); try { await client.connect(); + logger.debug("MongoDB connected successfully"); return await callback(client); + } catch (error) { + logger.error(error, "Failed to connect to MongoDB or execute callback"); + throw error; } finally { await client.close(); } @@ -100,6 +106,10 @@ export async function getJobs( minSponsors: number = -1, prioritySponsors: Array = ["IMC", "Atlassian"], ): Promise<{ jobs: Job[]; total: number }> { + logger.info( + { filters, minSponsors, prioritySponsors }, + "Fetching jobs with filters", + ); return await withDbConnection(async (client) => { const collection = client.db("default").collection("active_jobs"); const query = buildJobQuery(filters); @@ -107,71 +117,77 @@ export async function getJobs( const skip = (page - 1) * PAGE_SIZE; minSponsors = minSponsors === -1 ? (page == 1 ? 3 : 0) : minSponsors; - if (minSponsors == 0) { - const [jobs, total] = await Promise.all([ - collection - .find(query) - .sort({ created_at: -1 }) - .skip(skip) - .limit(PAGE_SIZE) - .toArray(), - collection.countDocuments(query), - ]); - return { - // Serialize Job and set highlight to false - jobs: (jobs as MongoJob[]) - .map(serializeJob) - .map((job) => ({ ...job, highlight: false })), - total, - }; - } else { - // Modify query to include sponsored job filtering - const sponsoredQuery = { ...query, is_sponsored: true }; + try { + if (minSponsors == 0) { + const [jobs, total] = await Promise.all([ + collection + .find(query) + .sort({ created_at: -1 }) + .skip(skip) + .limit(PAGE_SIZE) + .toArray(), + collection.countDocuments(query), + ]); + logger.debug({ total }, "Fetched non-sponsored jobs"); + return { + jobs: (jobs as MongoJob[]) + .map(serializeJob) + .map((job) => ({ ...job, highlight: false })), + total, + }; + } else { + const sponsoredQuery = { ...query, is_sponsored: true }; - // Fetch sponsored jobs (without priority filtering) - let sponsoredJobs = await collection - .aggregate([ - { $match: sponsoredQuery }, - { $sample: { size: minSponsors * 8 } }, - ]) - .toArray(); + let sponsoredJobs = await collection + .aggregate([ + { $match: sponsoredQuery }, + { $sample: { size: minSponsors * 8 } }, + ]) + .toArray(); - // Apply 65% chance selection for priority sponsors - sponsoredJobs = sponsoredJobs - .filter((job) => { - const isPriority = prioritySponsors.includes(job.company.name); - return isPriority ? Math.random() < 0.65 : Math.random() >= 0.35; // 65% chance for priority, 35% for others - }) - .slice(0, minSponsors) // Ensure we only take the required number + sponsoredJobs = sponsoredJobs + .filter((job) => { + const isPriority = prioritySponsors.includes(job.company.name); + return isPriority ? Math.random() < 0.65 : Math.random() >= 0.35; + }) + .slice(0, minSponsors) + .map((job) => ({ ...job, highlight: true })); - .map((job) => ({ ...job, highlight: true })); // Add highlight property + const sponsoredJobIds = sponsoredJobs.map((job) => job._id); - // Get IDs of selected sponsored jobs to exclude them from regular jobs - const sponsoredJobIds = sponsoredJobs.map((job) => job._id); + const filteredQuery = { ...query, _id: { $nin: sponsoredJobIds } }; - // Modify the main query to exclude sponsored jobs we already fetched - const filteredQuery = { ...query, _id: { $nin: sponsoredJobIds } }; + const [otherJobs, total] = await Promise.all([ + collection + .find(filteredQuery) + .sort({ created_at: -1 }) + .skip(skip) + .limit(PAGE_SIZE - sponsoredJobs.length) + .toArray(), + collection.countDocuments(query), + ]); - // Fetch remaining jobs with pagination - const [otherJobs, total] = await Promise.all([ - collection - .find(filteredQuery) - .sort({ created_at: -1 }) - .skip(skip) - .limit(PAGE_SIZE - sponsoredJobs.length) - .toArray(), - collection.countDocuments(query), // Total should still include all jobs matching the original query - ]); - // Merge jobs and make sure we don't exceed PAGE_SIZE also add highlight property - const mergedJobs = [ - ...sponsoredJobs.map((job) => ({ ...job, highlight: true })), - ...otherJobs.map((job) => ({ ...job, highlight: false })), - ].slice(0, PAGE_SIZE); + const mergedJobs = [ + ...sponsoredJobs.map((job) => ({ ...job, highlight: true })), + ...otherJobs.map((job) => ({ ...job, highlight: false })), + ].slice(0, PAGE_SIZE); - return { - jobs: (mergedJobs as MongoJob[]).map(serializeJob), - total, - }; + logger.debug( + { + sponsoredCount: sponsoredJobs.length, + otherCount: otherJobs.length, + total, + }, + "Fetched sponsored and other jobs", + ); + return { + jobs: (mergedJobs as MongoJob[]).map(serializeJob), + total, + }; + } + } catch (error) { + logger.error({ query, filters }, "Error fetching jobs"); + throw error; } }); } @@ -180,6 +196,7 @@ export async function getJobs( * Fetches a single job by its id. */ export async function getJobById(id: string): Promise { + logger.info({ id }, "Fetching job by ID"); return await withDbConnection(async (client) => { const collection = client.db("default").collection("active_jobs"); const job = await collection.findOne({ @@ -187,8 +204,10 @@ export async function getJobById(id: string): Promise { outdated: false, }); if (!job) { + logger.warn({ id }, "Job not found"); return null; } + logger.debug({ id }, "Job fetched successfully"); return serializeJob(job as MongoJob); }); } diff --git a/frontend/src/app/jobs/error.tsx b/frontend/src/app/jobs/error.tsx index a50ff1b..ee5432c 100644 --- a/frontend/src/app/jobs/error.tsx +++ b/frontend/src/app/jobs/error.tsx @@ -12,13 +12,18 @@ export default function JobError({ reset: () => void; }) { useEffect(() => { - console.error(error); + console.error("Jobs page error:", { + message: error.message, + stack: error.stack, + digest: error.digest, + }); }, [error]); return (
- Failed to render jobs page. Check the console for more details. + Oops! Something went wrong while loading the jobs. Please try refreshing + the page.
diff --git a/frontend/src/app/layout.tsx b/frontend/src/app/layout.tsx index 81e9e39..dbaf664 100644 --- a/frontend/src/app/layout.tsx +++ b/frontend/src/app/layout.tsx @@ -21,7 +21,6 @@ import { Metadata } from "next"; import FeedbackButton from "@/components/ui/feedback-button"; import { Notifications } from "@mantine/notifications"; -import OgImage from "../assets/OgImage.png"; import FirstVisitNotification from "@/components/ui/first-visit-notification"; export const metadata: Metadata = { @@ -34,7 +33,7 @@ export const metadata: Metadata = { description: "Stay ahead with the job board that never sleeps.", images: [ { - url: OgImage.src, + url: "/OgImage.png", alt: "MAC Jobs Board", }, ], diff --git a/frontend/src/components/layout/logo.tsx b/frontend/src/components/layout/logo.tsx index ae8fb97..55a58c3 100644 --- a/frontend/src/components/layout/logo.tsx +++ b/frontend/src/components/layout/logo.tsx @@ -1,11 +1,16 @@ -import MacLogo from "@/assets/mac.svg"; import Image from "next/image"; import Link from "next/link"; export default function Logo() { return ( - MAC Logo + MAC Logo Jobs ); diff --git a/frontend/src/components/search/search-bar.tsx b/frontend/src/components/search/search-bar.tsx index 7ca68c6..a69a07f 100644 --- a/frontend/src/components/search/search-bar.tsx +++ b/frontend/src/components/search/search-bar.tsx @@ -11,7 +11,6 @@ export default function SearchBar() { const [searchValue, setSearchValue] = useState(filters.filters.search || ""); useEffect(() => { - console.log("search filter updated"); setSearchValue(filters.filters.search || ""); }, [filters.filters.search]); diff --git a/frontend/src/components/ui/feedback-button.tsx b/frontend/src/components/ui/feedback-button.tsx index ee04c96..eac1b9b 100644 --- a/frontend/src/components/ui/feedback-button.tsx +++ b/frontend/src/components/ui/feedback-button.tsx @@ -76,10 +76,11 @@ export default function FeedbackButton() { }); } } catch (error) { + console.error("Feedback submission error:", error); notifications.show({ position: "top-center", title: "Error", - message: "Failed to submit feedback: " + error, + message: `Failed to submit feedback: ${error instanceof Error ? error.message : "An unknown error occurred"}. Please try again.`, color: "red", }); } finally { diff --git a/frontend/src/lib/logger.ts b/frontend/src/lib/logger.ts new file mode 100644 index 0000000..6b4bb26 --- /dev/null +++ b/frontend/src/lib/logger.ts @@ -0,0 +1,9 @@ +import pino from "pino"; + +const isDev = process.env.NODE_ENV !== "production"; + +const logger = pino({ + level: isDev ? "debug" : "info", +}); + +export default logger; diff --git a/frontend/src/lib/utils.ts b/frontend/src/lib/utils.ts index e8e9085..0dfcf6b 100644 --- a/frontend/src/lib/utils.ts +++ b/frontend/src/lib/utils.ts @@ -145,33 +145,48 @@ export function formatISODate(isoDate: string): string { } export const formatWorkingRights = (rights: WorkingRight[]): string => { - // If all rights are present, return "Any" - if (rights.length === WORKING_RIGHTS.length) { + // If all rights except OTHER_RIGHTS are present, return "Any Working Rights" + const essentialRights = WORKING_RIGHTS.filter( + (right) => right !== "OTHER_RIGHTS", + ); + const hasAllEssentialRights = essentialRights.every((right) => + rights.includes(right), + ); + + if (hasAllEssentialRights) { return "Any Working Rights"; } // Check for AUS and NZ Citizens/PR combination const hasAus = rights.includes("AUS_CITIZEN_PR"); const hasNz = rights.includes("NZ_CITIZEN_PR"); + const hasInt = rights.includes("INTERNATIONAL"); + const formattedRights: string[] = []; + if (hasAus && hasNz) { - return "AUS & NZ Citizen/PR"; + formattedRights.push("AUS & NZ Citizen/PR"); + } else { + if (hasAus) formattedRights.push("AUS Citizen/PR"); + if (hasNz) formattedRights.push("NZ Citizen/PR"); + } + + if (hasInt) { + formattedRights.push("International"); } // Format remaining cases - return rights - .map((right) => { + rights.forEach((right) => { + if (!["AUS_CITIZEN_PR", "NZ_CITIZEN_PR", "INTERNATIONAL"].includes(right)) { switch (right) { - case "AUS_CITIZEN_PR": - return "AUS Citizen/PR"; - case "NZ_CITIZEN_PR": - return "NZ Citizen/PR"; - case "INTERNATIONAL": - return "International"; case "OTHER_RIGHTS": - return "Other"; + formattedRights.push("Other"); + break; default: - return formatCapString(right); + formattedRights.push(formatCapString(right)); + break; } - }) - .join(", "); + } + }); + + return formattedRights.join(", "); }; diff --git a/repo-to-text.js b/repo-to-text.js deleted file mode 100644 index 1b65536..0000000 --- a/repo-to-text.js +++ /dev/null @@ -1,230 +0,0 @@ -const fs = require("fs"); -const path = require("path"); - -// Configuration -const MAX_DEPTH = 3; -const EXCLUDED_DIRS = [ - "node_modules", - "dist", - "build", - ".git", - "__tests__", - "__mocks__", - ".github", - ".vscode", - ".next", -]; -const EXCLUDED_FILES = [ - ".test.", - ".spec.", - "test.", - "spec.", - ".d.ts", - "pnpm-lock.yaml", - "export-script.js", - "package-lock.json", - "README.md", - ".env" -]; -const ALLOWED_EXTENSIONS = new Set([ - ".ts", // TypeScript - ".tsx", // TypeScript React - ".js", // JavaScript - ".jsx", // JavaScript React - ".proto", // Protocol Buffers - ".yaml", // YAML configs - ".yml", // YAML configs - ".json", // JSON configs - ".md", // Documentation -]); - -// Create output directory if it doesn't exist -const outputDir = "repo-to-text"; -if (!fs.existsSync(outputDir)) { - fs.mkdirSync(outputDir); -} - -// Helper function to check if file should be included -function shouldIncludeFile(filePath) { - if (EXCLUDED_FILES.some((exclude) => filePath.includes(exclude))) { - return false; - } - - const ext = path.extname(filePath); - return ALLOWED_EXTENSIONS.has(ext); -} - -// Helper function to get normalized directory path parts -function getDirectoryParts(filePath, baseDir) { - const relativePath = path.relative(baseDir, filePath); - const parts = relativePath.split(path.sep); - return parts.slice(0, MAX_DEPTH); -} - -// Helper function to create file group key -function createGroupKey(parts) { - return parts.join("-"); -} - -// Helper function to count lines in content -function countLines(content) { - return content.split("\n").length; -} - -// Process directory and group files -function processDirectory(dir, baseDir = dir, depth = 0, fileGroups = {}) { - const files = fs.readdirSync(dir); - const dirParts = getDirectoryParts(dir, baseDir); - const currentGroupKey = createGroupKey(dirParts); - - files.forEach((file) => { - const filePath = path.join(dir, file); - const stat = fs.statSync(filePath); - - if (stat.isDirectory()) { - // Skip excluded directories - if (!EXCLUDED_DIRS.includes(file)) { - // If we're at max depth, don't create new groups but still process files - processDirectory(filePath, baseDir, depth + 1, fileGroups); - } - } else { - // Check if file should be included - if (!shouldIncludeFile(filePath)) return; - - // Use the current directory's group key - // If we're beyond max depth, files will be included in the parent's group - const groupKey = currentGroupKey; - - // Initialize group if it doesn't exist - if (!fileGroups[groupKey]) { - fileGroups[groupKey] = []; - } - - try { - const content = fs.readFileSync(filePath, "utf8"); - fileGroups[groupKey].push({ - path: path.relative(baseDir, filePath), - content: content, - lineCount: countLines(content), - }); - } catch (error) { - console.error(`Error reading file ${filePath}:`, error); - } - } - }); - - return fileGroups; -} - -// Create output content with proper headers and formatting -function createOutputContent(files) { - // Sort files by path for consistent output - const sortedFiles = [...files].sort((a, b) => a.path.localeCompare(b.path)); - - return sortedFiles - .map((file) => { - const extension = path.extname(file.path).slice(1) || "txt"; - return `### ${file.path}\n\n\`\`\`${extension}\n${file.content}\n\`\`\`\n`; - }) - .join("\n\n"); -} - -// Calculate total lines in a group -function calculateGroupStats(files) { - const totalLines = files.reduce((sum, file) => sum + file.lineCount, 0); - const fileDetails = files - .sort((a, b) => a.path.localeCompare(b.path)) - .map((file) => ` ${file.path}: ${file.lineCount} lines`) - .join("\n"); - return { totalLines, fileDetails }; -} - -// Generate tree structure -function generateTree(dir, prefix = "", isLast = true, baseDir = dir) { - const files = fs.readdirSync(dir); - let treeOutput = ""; - - // Filter and sort files/directories - const items = files - .filter((file) => { - const filePath = path.join(dir, file); - const isDirectory = fs.statSync(filePath).isDirectory(); - if (isDirectory) { - return !EXCLUDED_DIRS.includes(file); - } - return shouldIncludeFile(filePath); - }) - .sort(); - - items.forEach((file, index) => { - const filePath = path.join(dir, file); - const isDirectory = fs.statSync(filePath).isDirectory(); - const isLastItem = index === items.length - 1; - - // Create the branch symbol - const branch = isLast ? "└── " : "β”œβ”€β”€ "; - const subBranch = isLast ? " " : "β”‚ "; - - // Add the current file/directory to the tree - treeOutput += prefix + branch + file + "\n"; - - // If it's a directory, recursively process its contents - if (isDirectory) { - treeOutput += generateTree( - filePath, - prefix + subBranch, - isLastItem, - baseDir, - ); - } - }); - - return treeOutput; -} - -// Main execution -console.log("Starting repository export..."); -const startTime = Date.now(); - -try { - // Process the repository - const fileGroups = processDirectory("."); - - // Write output files - Object.entries(fileGroups) - .sort(([a], [b]) => a.localeCompare(b)) - .forEach(([group, files]) => { - if (files.length === 0) return; - - // Calculate group statistics - const { totalLines, fileDetails } = calculateGroupStats(files); - - // Create a descriptive title for the file content - const content = `# ${group} Files\n\n${createOutputContent(files)}`; - - // Create sanitized filename - const filename = `${group.toLowerCase() || "root"}.txt`; - const outputPath = path.join(outputDir, filename); - - fs.writeFileSync(outputPath, content); - console.log(`Created ${outputPath}:`); - console.log(`Total: ${totalLines} lines in ${files.length} files`); - console.log("Files:"); - console.log(fileDetails); - console.log("---"); - }); - - // Generate and write tree structure - const treeStructure = generateTree("."); - const treeOutputPath = path.join(outputDir, "tree-structure.txt"); - fs.writeFileSync(treeOutputPath, treeStructure); - console.log(`Created ${treeOutputPath}`); - console.log("---"); - - const endTime = Date.now(); - console.log( - `Repository export completed successfully in ${(endTime - startTime) / 1000}s!`, - ); -} catch (error) { - console.error("Error during repository export:", error); -}