refactor: migrate to pnpm monorepo with Payload CMS backend and Astro frontend to support scalable website development and AI-assisted workflows
This commit is contained in:
73
.claude/commands/constitution.md
Normal file
73
.claude/commands/constitution.md
Normal file
@@ -0,0 +1,73 @@
|
||||
---
|
||||
description: Create or update the project constitution from interactive or provided principle inputs, ensuring all dependent templates stay in sync.
|
||||
---
|
||||
|
||||
The user input to you can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
You are updating the project constitution at `.specify/memory/constitution.md`. This file is a TEMPLATE containing placeholder tokens in square brackets (e.g. `[PROJECT_NAME]`, `[PRINCIPLE_1_NAME]`). Your job is to (a) collect/derive concrete values, (b) fill the template precisely, and (c) propagate any amendments across dependent artifacts.
|
||||
|
||||
Follow this execution flow:
|
||||
|
||||
1. Load the existing constitution template at `.specify/memory/constitution.md`.
|
||||
- Identify every placeholder token of the form `[ALL_CAPS_IDENTIFIER]`.
|
||||
**IMPORTANT**: The user might require less or more principles than the ones used in the template. If a number is specified, respect that - follow the general template. You will update the doc accordingly.
|
||||
|
||||
2. Collect/derive values for placeholders:
|
||||
- If user input (conversation) supplies a value, use it.
|
||||
- Otherwise infer from existing repo context (README, docs, prior constitution versions if embedded).
|
||||
- For governance dates: `RATIFICATION_DATE` is the original adoption date (if unknown ask or mark TODO), `LAST_AMENDED_DATE` is today if changes are made, otherwise keep previous.
|
||||
- `CONSTITUTION_VERSION` must increment according to semantic versioning rules:
|
||||
* MAJOR: Backward incompatible governance/principle removals or redefinitions.
|
||||
* MINOR: New principle/section added or materially expanded guidance.
|
||||
* PATCH: Clarifications, wording, typo fixes, non-semantic refinements.
|
||||
- If version bump type ambiguous, propose reasoning before finalizing.
|
||||
|
||||
3. Draft the updated constitution content:
|
||||
- Replace every placeholder with concrete text (no bracketed tokens left except intentionally retained template slots that the project has chosen not to define yet—explicitly justify any left).
|
||||
- Preserve heading hierarchy and comments can be removed once replaced unless they still add clarifying guidance.
|
||||
- Ensure each Principle section: succinct name line, paragraph (or bullet list) capturing non‑negotiable rules, explicit rationale if not obvious.
|
||||
- Ensure Governance section lists amendment procedure, versioning policy, and compliance review expectations.
|
||||
|
||||
4. Consistency propagation checklist (convert prior checklist into active validations):
|
||||
- Read `.specify/templates/plan-template.md` and ensure any "Constitution Check" or rules align with updated principles.
|
||||
- Read `.specify/templates/spec-template.md` for scope/requirements alignment—update if constitution adds/removes mandatory sections or constraints.
|
||||
- Read `.specify/templates/tasks-template.md` and ensure task categorization reflects new or removed principle-driven task types (e.g., observability, versioning, testing discipline).
|
||||
- Read each command file in `.specify/templates/commands/*.md` (including this one) to verify no outdated references (agent-specific names like CLAUDE only) remain when generic guidance is required.
|
||||
- Read any runtime guidance docs (e.g., `README.md`, `docs/quickstart.md`, or agent-specific guidance files if present). Update references to principles changed.
|
||||
|
||||
5. Produce a Sync Impact Report (prepend as an HTML comment at top of the constitution file after update):
|
||||
- Version change: old → new
|
||||
- List of modified principles (old title → new title if renamed)
|
||||
- Added sections
|
||||
- Removed sections
|
||||
- Templates requiring updates (✅ updated / ⚠ pending) with file paths
|
||||
- Follow-up TODOs if any placeholders intentionally deferred.
|
||||
|
||||
6. Validation before final output:
|
||||
- No remaining unexplained bracket tokens.
|
||||
- Version line matches report.
|
||||
- Dates ISO format YYYY-MM-DD.
|
||||
- Principles are declarative, testable, and free of vague language ("should" → replace with MUST/SHOULD rationale where appropriate).
|
||||
|
||||
7. Write the completed constitution back to `.specify/memory/constitution.md` (overwrite).
|
||||
|
||||
8. Output a final summary to the user with:
|
||||
- New version and bump rationale.
|
||||
- Any files flagged for manual follow-up.
|
||||
- Suggested commit message (e.g., `docs: amend constitution to vX.Y.Z (principle additions + governance update)`).
|
||||
|
||||
Formatting & Style Requirements:
|
||||
- Use Markdown headings exactly as in the template (do not demote/promote levels).
|
||||
- Wrap long rationale lines to keep readability (<100 chars ideally) but do not hard enforce with awkward breaks.
|
||||
- Keep a single blank line between sections.
|
||||
- Avoid trailing whitespace.
|
||||
|
||||
If the user supplies partial updates (e.g., only one principle revision), still perform validation and version decision steps.
|
||||
|
||||
If critical info missing (e.g., ratification date truly unknown), insert `TODO(<FIELD_NAME>): explanation` and include in the Sync Impact Report under deferred items.
|
||||
|
||||
Do not create a new template; always operate on the existing `.specify/memory/constitution.md` file.
|
||||
57
.claude/commands/git-commit.md
Normal file
57
.claude/commands/git-commit.md
Normal file
@@ -0,0 +1,57 @@
|
||||
---
|
||||
description: Execute a git commit following repository guidelines, analyzing changes and drafting a meaningful commit message.
|
||||
---
|
||||
|
||||
The user input to you can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
When the user invokes /git-commit, follow these steps carefully to create a new git commit:
|
||||
|
||||
1. You have the capability to call multiple tools in a single response. ALWAYS run the following bash commands in parallel, each using the Bash tool:
|
||||
- Run `git status` to see all untracked files.
|
||||
- Run `git diff` to see both staged and unstaged changes that will be committed.
|
||||
- Run `git log --oneline -10` to see recent commit messages, so that you can follow this repository's commit message style.
|
||||
|
||||
Provide descriptions for each:
|
||||
- git status: Shows working tree status
|
||||
- git diff: Displays unstaged changes
|
||||
- git log --oneline -10: Lists last 10 commit messages
|
||||
|
||||
2. Analyze all staged changes (both previously staged and newly added) and draft a commit message. Wrap your analysis process in <commit_analysis> tags:
|
||||
|
||||
<commit_analysis>
|
||||
- List the files that have been changed or added
|
||||
- Summarize the nature of the changes (eg. new feature, enhancement to an existing feature, bug fix, refactoring, test, docs, etc.)
|
||||
- Brainstorm the purpose or motivation behind these changes
|
||||
- Assess the impact of these changes on the overall project
|
||||
- Check for any sensitive information that shouldn't be committed
|
||||
- Draft a concise (1-2 sentences) commit message that focuses on the "why" rather than the "what"
|
||||
- Ensure your language is clear, concise, and to the point
|
||||
- Ensure the message accurately reflects the changes and their purpose (i.e. "add" means a wholly new feature, "update" means an enhancement to an existing feature, "fix" means a bug fix, etc.)
|
||||
- Ensure the message is not generic (avoid words like "Update" or "Fix" without context)
|
||||
- Review the draft message to ensure it accurately reflects the changes and their purpose
|
||||
</commit_analysis>
|
||||
|
||||
Use the git context at the start of this conversation to determine which files are relevant to your commit. Be careful not to stage and commit files (e.g. with `git add .`) that aren't relevant to your commit.
|
||||
|
||||
3. You have the capability to call multiple tools in a single response. ALWAYS run the following commands in parallel:
|
||||
- Add relevant untracked files to the staging area using `git add <files>`. Stage only relevant changes based on the analysis.
|
||||
- Run `git status` to make sure the staging succeeded.
|
||||
|
||||
4. Commit the changes using `git commit -m "<drafted message>"`. If the user provided arguments, incorporate them into the message if appropriate.
|
||||
|
||||
5. If the commit fails due to pre-commit hook changes, retry the commit ONCE to include these automated changes using `git commit -m "<message>" --no-verify` or similar if needed. If it fails again, report the error. If the commit succeeds but files were modified by the pre-commit hook, amend the commit using `git commit --amend -m "<updated message>"` to include them.
|
||||
|
||||
Important notes:
|
||||
- NEVER update the git config
|
||||
- DO NOT run additional commands to read or explore code, beyond what is available in the git context
|
||||
- DO NOT push to the remote repository
|
||||
- IMPORTANT: Never use git commands with the -i flag (like git rebase -i or git add -i) since they require interactive input which is not supported.
|
||||
- If there are no changes to commit (i.e., no untracked files and no modifications), do not create an empty commit. Instead, output: "No changes to commit."
|
||||
- Ensure your commit message is meaningful and concise. It should explain the purpose of the changes, not just describe them.
|
||||
- Return an empty response - the user will see the git output directly. If no commit was made, briefly explain why.
|
||||
|
||||
Use absolute paths and avoid changing directories unnecessarily.
|
||||
56
.claude/commands/implement.md
Normal file
56
.claude/commands/implement.md
Normal file
@@ -0,0 +1,56 @@
|
||||
---
|
||||
description: Execute the implementation plan by processing and executing all tasks defined in tasks.md
|
||||
---
|
||||
|
||||
The user input can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
1. Run `.specify/scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute.
|
||||
|
||||
2. Load and analyze the implementation context:
|
||||
- **REQUIRED**: Read tasks.md for the complete task list and execution plan
|
||||
- **REQUIRED**: Read plan.md for tech stack, architecture, and file structure
|
||||
- **IF EXISTS**: Read data-model.md for entities and relationships
|
||||
- **IF EXISTS**: Read contracts/ for API specifications and test requirements
|
||||
- **IF EXISTS**: Read research.md for technical decisions and constraints
|
||||
- **IF EXISTS**: Read quickstart.md for integration scenarios
|
||||
|
||||
3. Parse tasks.md structure and extract:
|
||||
- **Task phases**: Setup, Tests, Core, Integration, Polish
|
||||
- **Task dependencies**: Sequential vs parallel execution rules
|
||||
- **Task details**: ID, description, file paths, parallel markers [P]
|
||||
- **Execution flow**: Order and dependency requirements
|
||||
|
||||
4. Execute implementation following the task plan:
|
||||
- **Phase-by-phase execution**: Complete each phase before moving to the next
|
||||
- **Respect dependencies**: Run sequential tasks in order, parallel tasks [P] can run together
|
||||
- **Follow TDD approach**: Execute test tasks before their corresponding implementation tasks
|
||||
- **File-based coordination**: Tasks affecting the same files must run sequentially
|
||||
- **Validation checkpoints**: Verify each phase completion before proceeding
|
||||
|
||||
5. Implementation execution rules:
|
||||
- **Setup first**: Initialize project structure, dependencies, configuration
|
||||
- **Tests before code**: If you need to write tests for contracts, entities, and integration scenarios
|
||||
- **Core development**: Implement models, services, CLI commands, endpoints
|
||||
- **Integration work**: Database connections, middleware, logging, external services
|
||||
- **Polish and validation**: Unit tests, performance optimization, documentation
|
||||
|
||||
6. Progress tracking and error handling:
|
||||
- Report progress after each completed task
|
||||
- Halt execution if any non-parallel task fails
|
||||
- For parallel tasks [P], continue with successful tasks, report failed ones
|
||||
- Provide clear error messages with context for debugging
|
||||
- Suggest next steps if implementation cannot proceed
|
||||
- **IMPORTANT** For completed tasks, make sure to mark the task off as [X] in the tasks file.
|
||||
|
||||
7. Completion validation:
|
||||
- Verify all required tasks are completed
|
||||
- Check that implemented features match the original specification
|
||||
- Validate that tests pass and coverage meets requirements
|
||||
- Confirm the implementation follows the technical plan
|
||||
- Report final status with summary of completed work
|
||||
|
||||
Note: This command assumes a complete task breakdown exists in tasks.md. If tasks are incomplete or missing, suggest running `/tasks` first to regenerate the task list.
|
||||
@@ -2,6 +2,12 @@
|
||||
description: Execute the implementation planning workflow using the plan template to generate design artifacts.
|
||||
---
|
||||
|
||||
The user input to you can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
Given the implementation details provided as an argument, do this:
|
||||
|
||||
1. Run `.specify/scripts/bash/setup-plan.sh --json` from the repo root and parse JSON for FEATURE_SPEC, IMPL_PLAN, SPECS_DIR, BRANCH. All future file paths must be absolute.
|
||||
|
||||
@@ -2,9 +2,18 @@
|
||||
description: Create or update the feature specification from a natural language feature description.
|
||||
---
|
||||
|
||||
Given the feature description provided as an argument, do this:
|
||||
The user input to you can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
The text the user typed after `/specify` in the triggering message **is** the feature description. Assume you always have it available in this conversation even if `$ARGUMENTS` appears literally below. Do not ask the user to repeat it unless they provided an empty command.
|
||||
|
||||
Given that feature description, do this:
|
||||
|
||||
1. Run the script `.specify/scripts/bash/create-new-feature.sh --json "$ARGUMENTS"` from repo root and parse its JSON output for BRANCH_NAME and SPEC_FILE. All file paths must be absolute.
|
||||
**IMPORTANT** You must only ever run this script once. The JSON is provided in the terminal as output - always refer to it to get the actual content you're looking for.
|
||||
2. Load `.specify/templates/spec-template.md` to understand required sections.
|
||||
3. Write the specification to SPEC_FILE using the template structure, replacing placeholders with concrete details derived from the feature description (arguments) while preserving section order and headings.
|
||||
4. Report completion with branch name, spec file path, and readiness for the next phase.
|
||||
|
||||
@@ -2,9 +2,13 @@
|
||||
description: Generate an actionable, dependency-ordered tasks.md for the feature based on available design artifacts.
|
||||
---
|
||||
|
||||
Given the context provided as an argument, do this:
|
||||
The user input to you can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
1. Run `.specify/scripts/bash/check-task-prerequisites.sh --json` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute.
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
1. Run `.specify/scripts/bash/check-prerequisites.sh --json` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute.
|
||||
2. Load and analyze available design documents:
|
||||
- Always read plan.md for tech stack and libraries
|
||||
- IF EXISTS: Read data-model.md for entities
|
||||
|
||||
3
.codex/config.toml
Normal file
3
.codex/config.toml
Normal file
@@ -0,0 +1,3 @@
|
||||
model = "gpt-5-codex"
|
||||
[projects."/Users/pukpuk/Dev/website-enchuntw-migr"]
|
||||
trust_level = "trusted"
|
||||
73
.codex/prompts/constitution.md
Normal file
73
.codex/prompts/constitution.md
Normal file
@@ -0,0 +1,73 @@
|
||||
---
|
||||
description: Create or update the project constitution from interactive or provided principle inputs, ensuring all dependent templates stay in sync.
|
||||
---
|
||||
|
||||
The user input to you can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
You are updating the project constitution at `.specify/memory/constitution.md`. This file is a TEMPLATE containing placeholder tokens in square brackets (e.g. `[PROJECT_NAME]`, `[PRINCIPLE_1_NAME]`). Your job is to (a) collect/derive concrete values, (b) fill the template precisely, and (c) propagate any amendments across dependent artifacts.
|
||||
|
||||
Follow this execution flow:
|
||||
|
||||
1. Load the existing constitution template at `.specify/memory/constitution.md`.
|
||||
- Identify every placeholder token of the form `[ALL_CAPS_IDENTIFIER]`.
|
||||
**IMPORTANT**: The user might require less or more principles than the ones used in the template. If a number is specified, respect that - follow the general template. You will update the doc accordingly.
|
||||
|
||||
2. Collect/derive values for placeholders:
|
||||
- If user input (conversation) supplies a value, use it.
|
||||
- Otherwise infer from existing repo context (README, docs, prior constitution versions if embedded).
|
||||
- For governance dates: `RATIFICATION_DATE` is the original adoption date (if unknown ask or mark TODO), `LAST_AMENDED_DATE` is today if changes are made, otherwise keep previous.
|
||||
- `CONSTITUTION_VERSION` must increment according to semantic versioning rules:
|
||||
* MAJOR: Backward incompatible governance/principle removals or redefinitions.
|
||||
* MINOR: New principle/section added or materially expanded guidance.
|
||||
* PATCH: Clarifications, wording, typo fixes, non-semantic refinements.
|
||||
- If version bump type ambiguous, propose reasoning before finalizing.
|
||||
|
||||
3. Draft the updated constitution content:
|
||||
- Replace every placeholder with concrete text (no bracketed tokens left except intentionally retained template slots that the project has chosen not to define yet—explicitly justify any left).
|
||||
- Preserve heading hierarchy and comments can be removed once replaced unless they still add clarifying guidance.
|
||||
- Ensure each Principle section: succinct name line, paragraph (or bullet list) capturing non‑negotiable rules, explicit rationale if not obvious.
|
||||
- Ensure Governance section lists amendment procedure, versioning policy, and compliance review expectations.
|
||||
|
||||
4. Consistency propagation checklist (convert prior checklist into active validations):
|
||||
- Read `.specify/templates/plan-template.md` and ensure any "Constitution Check" or rules align with updated principles.
|
||||
- Read `.specify/templates/spec-template.md` for scope/requirements alignment—update if constitution adds/removes mandatory sections or constraints.
|
||||
- Read `.specify/templates/tasks-template.md` and ensure task categorization reflects new or removed principle-driven task types (e.g., observability, versioning, testing discipline).
|
||||
- Read each command file in `.specify/templates/commands/*.md` (including this one) to verify no outdated references (agent-specific names like CLAUDE only) remain when generic guidance is required.
|
||||
- Read any runtime guidance docs (e.g., `README.md`, `docs/quickstart.md`, or agent-specific guidance files if present). Update references to principles changed.
|
||||
|
||||
5. Produce a Sync Impact Report (prepend as an HTML comment at top of the constitution file after update):
|
||||
- Version change: old → new
|
||||
- List of modified principles (old title → new title if renamed)
|
||||
- Added sections
|
||||
- Removed sections
|
||||
- Templates requiring updates (✅ updated / ⚠ pending) with file paths
|
||||
- Follow-up TODOs if any placeholders intentionally deferred.
|
||||
|
||||
6. Validation before final output:
|
||||
- No remaining unexplained bracket tokens.
|
||||
- Version line matches report.
|
||||
- Dates ISO format YYYY-MM-DD.
|
||||
- Principles are declarative, testable, and free of vague language ("should" → replace with MUST/SHOULD rationale where appropriate).
|
||||
|
||||
7. Write the completed constitution back to `.specify/memory/constitution.md` (overwrite).
|
||||
|
||||
8. Output a final summary to the user with:
|
||||
- New version and bump rationale.
|
||||
- Any files flagged for manual follow-up.
|
||||
- Suggested commit message (e.g., `docs: amend constitution to vX.Y.Z (principle additions + governance update)`).
|
||||
|
||||
Formatting & Style Requirements:
|
||||
- Use Markdown headings exactly as in the template (do not demote/promote levels).
|
||||
- Wrap long rationale lines to keep readability (<100 chars ideally) but do not hard enforce with awkward breaks.
|
||||
- Keep a single blank line between sections.
|
||||
- Avoid trailing whitespace.
|
||||
|
||||
If the user supplies partial updates (e.g., only one principle revision), still perform validation and version decision steps.
|
||||
|
||||
If critical info missing (e.g., ratification date truly unknown), insert `TODO(<FIELD_NAME>): explanation` and include in the Sync Impact Report under deferred items.
|
||||
|
||||
Do not create a new template; always operate on the existing `.specify/memory/constitution.md` file.
|
||||
56
.codex/prompts/implement.md
Normal file
56
.codex/prompts/implement.md
Normal file
@@ -0,0 +1,56 @@
|
||||
---
|
||||
description: Execute the implementation plan by processing and executing all tasks defined in tasks.md
|
||||
---
|
||||
|
||||
The user input can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
1. Run `.specify/scripts/bash/check-prerequisites.sh --json --require-tasks --include-tasks` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute.
|
||||
|
||||
2. Load and analyze the implementation context:
|
||||
- **REQUIRED**: Read tasks.md for the complete task list and execution plan
|
||||
- **REQUIRED**: Read plan.md for tech stack, architecture, and file structure
|
||||
- **IF EXISTS**: Read data-model.md for entities and relationships
|
||||
- **IF EXISTS**: Read contracts/ for API specifications and test requirements
|
||||
- **IF EXISTS**: Read research.md for technical decisions and constraints
|
||||
- **IF EXISTS**: Read quickstart.md for integration scenarios
|
||||
|
||||
3. Parse tasks.md structure and extract:
|
||||
- **Task phases**: Setup, Tests, Core, Integration, Polish
|
||||
- **Task dependencies**: Sequential vs parallel execution rules
|
||||
- **Task details**: ID, description, file paths, parallel markers [P]
|
||||
- **Execution flow**: Order and dependency requirements
|
||||
|
||||
4. Execute implementation following the task plan:
|
||||
- **Phase-by-phase execution**: Complete each phase before moving to the next
|
||||
- **Respect dependencies**: Run sequential tasks in order, parallel tasks [P] can run together
|
||||
- **Follow TDD approach**: Execute test tasks before their corresponding implementation tasks
|
||||
- **File-based coordination**: Tasks affecting the same files must run sequentially
|
||||
- **Validation checkpoints**: Verify each phase completion before proceeding
|
||||
|
||||
5. Implementation execution rules:
|
||||
- **Setup first**: Initialize project structure, dependencies, configuration
|
||||
- **Tests before code**: If you need to write tests for contracts, entities, and integration scenarios
|
||||
- **Core development**: Implement models, services, CLI commands, endpoints
|
||||
- **Integration work**: Database connections, middleware, logging, external services
|
||||
- **Polish and validation**: Unit tests, performance optimization, documentation
|
||||
|
||||
6. Progress tracking and error handling:
|
||||
- Report progress after each completed task
|
||||
- Halt execution if any non-parallel task fails
|
||||
- For parallel tasks [P], continue with successful tasks, report failed ones
|
||||
- Provide clear error messages with context for debugging
|
||||
- Suggest next steps if implementation cannot proceed
|
||||
- **IMPORTANT** For completed tasks, make sure to mark the task off as [X] in the tasks file.
|
||||
|
||||
7. Completion validation:
|
||||
- Verify all required tasks are completed
|
||||
- Check that implemented features match the original specification
|
||||
- Validate that tests pass and coverage meets requirements
|
||||
- Confirm the implementation follows the technical plan
|
||||
- Report final status with summary of completed work
|
||||
|
||||
Note: This command assumes a complete task breakdown exists in tasks.md. If tasks are incomplete or missing, suggest running `/tasks` first to regenerate the task list.
|
||||
42
.codex/prompts/plan.md
Normal file
42
.codex/prompts/plan.md
Normal file
@@ -0,0 +1,42 @@
|
||||
---
|
||||
description: Execute the implementation planning workflow using the plan template to generate design artifacts.
|
||||
---
|
||||
|
||||
The user input to you can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
Given the implementation details provided as an argument, do this:
|
||||
|
||||
1. Run `.specify/scripts/bash/setup-plan.sh --json` from the repo root and parse JSON for FEATURE_SPEC, IMPL_PLAN, SPECS_DIR, BRANCH. All future file paths must be absolute.
|
||||
2. Read and analyze the feature specification to understand:
|
||||
- The feature requirements and user stories
|
||||
- Functional and non-functional requirements
|
||||
- Success criteria and acceptance criteria
|
||||
- Any technical constraints or dependencies mentioned
|
||||
|
||||
3. Read the constitution at `.specify/memory/constitution.md` to understand constitutional requirements.
|
||||
|
||||
4. Execute the implementation plan template:
|
||||
- Load `.specify/templates/plan-template.md` (already copied to IMPL_PLAN path)
|
||||
- Set Input path to FEATURE_SPEC
|
||||
- Run the Execution Flow (main) function steps 1-9
|
||||
- The template is self-contained and executable
|
||||
- Follow error handling and gate checks as specified
|
||||
- Let the template guide artifact generation in $SPECS_DIR:
|
||||
* Phase 0 generates research.md
|
||||
* Phase 1 generates data-model.md, contracts/, quickstart.md
|
||||
* Phase 2 generates tasks.md
|
||||
- Incorporate user-provided details from arguments into Technical Context: $ARGUMENTS
|
||||
- Update Progress Tracking as you complete each phase
|
||||
|
||||
5. Verify execution completed:
|
||||
- Check Progress Tracking shows all phases complete
|
||||
- Ensure all required artifacts were generated
|
||||
- Confirm no ERROR states in execution
|
||||
|
||||
6. Report results with branch name, file paths, and generated artifacts.
|
||||
|
||||
Use absolute paths with the repository root for all file operations to avoid path issues.
|
||||
21
.codex/prompts/specify.md
Normal file
21
.codex/prompts/specify.md
Normal file
@@ -0,0 +1,21 @@
|
||||
---
|
||||
description: Create or update the feature specification from a natural language feature description.
|
||||
---
|
||||
|
||||
The user input to you can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
The text the user typed after `/specify` in the triggering message **is** the feature description. Assume you always have it available in this conversation even if `$ARGUMENTS` appears literally below. Do not ask the user to repeat it unless they provided an empty command.
|
||||
|
||||
Given that feature description, do this:
|
||||
|
||||
1. Run the script `.specify/scripts/bash/create-new-feature.sh --json "$ARGUMENTS"` from repo root and parse its JSON output for BRANCH_NAME and SPEC_FILE. All file paths must be absolute.
|
||||
**IMPORTANT** You must only ever run this script once. The JSON is provided in the terminal as output - always refer to it to get the actual content you're looking for.
|
||||
2. Load `.specify/templates/spec-template.md` to understand required sections.
|
||||
3. Write the specification to SPEC_FILE using the template structure, replacing placeholders with concrete details derived from the feature description (arguments) while preserving section order and headings.
|
||||
4. Report completion with branch name, spec file path, and readiness for the next phase.
|
||||
|
||||
Note: The script creates and checks out the new branch and initializes the spec file before writing.
|
||||
62
.codex/prompts/tasks.md
Normal file
62
.codex/prompts/tasks.md
Normal file
@@ -0,0 +1,62 @@
|
||||
---
|
||||
description: Generate an actionable, dependency-ordered tasks.md for the feature based on available design artifacts.
|
||||
---
|
||||
|
||||
The user input to you can be provided directly by the agent or as a command argument - you **MUST** consider it before proceeding with the prompt (if not empty).
|
||||
|
||||
User input:
|
||||
|
||||
$ARGUMENTS
|
||||
|
||||
1. Run `.specify/scripts/bash/check-prerequisites.sh --json` from repo root and parse FEATURE_DIR and AVAILABLE_DOCS list. All paths must be absolute.
|
||||
2. Load and analyze available design documents:
|
||||
- Always read plan.md for tech stack and libraries
|
||||
- IF EXISTS: Read data-model.md for entities
|
||||
- IF EXISTS: Read contracts/ for API endpoints
|
||||
- IF EXISTS: Read research.md for technical decisions
|
||||
- IF EXISTS: Read quickstart.md for test scenarios
|
||||
|
||||
Note: Not all projects have all documents. For example:
|
||||
- CLI tools might not have contracts/
|
||||
- Simple libraries might not need data-model.md
|
||||
- Generate tasks based on what's available
|
||||
|
||||
3. Generate tasks following the template:
|
||||
- Use `.specify/templates/tasks-template.md` as the base
|
||||
- Replace example tasks with actual tasks based on:
|
||||
* **Setup tasks**: Project init, dependencies, linting
|
||||
* **Test tasks [P]**: One per contract, one per integration scenario
|
||||
* **Core tasks**: One per entity, service, CLI command, endpoint
|
||||
* **Integration tasks**: DB connections, middleware, logging
|
||||
* **Polish tasks [P]**: Unit tests, performance, docs
|
||||
|
||||
4. Task generation rules:
|
||||
- Each contract file → contract test task marked [P]
|
||||
- Each entity in data-model → model creation task marked [P]
|
||||
- Each endpoint → implementation task (not parallel if shared files)
|
||||
- Each user story → integration test marked [P]
|
||||
- Different files = can be parallel [P]
|
||||
- Same file = sequential (no [P])
|
||||
|
||||
5. Order tasks by dependencies:
|
||||
- Setup before everything
|
||||
- Tests before implementation (TDD)
|
||||
- Models before services
|
||||
- Services before endpoints
|
||||
- Core before integration
|
||||
- Everything before polish
|
||||
|
||||
6. Include parallel execution examples:
|
||||
- Group [P] tasks that can run together
|
||||
- Show actual Task agent commands
|
||||
|
||||
7. Create FEATURE_DIR/tasks.md with:
|
||||
- Correct feature name from implementation plan
|
||||
- Numbered tasks (T001, T002, etc.)
|
||||
- Clear file paths for each task
|
||||
- Dependency notes
|
||||
- Parallel execution guidance
|
||||
|
||||
Context for task generation: $ARGUMENTS
|
||||
|
||||
The tasks.md should be immediately executable - each task must be specific enough that an LLM can complete it without additional context.
|
||||
1
.codex/version.json
Normal file
1
.codex/version.json
Normal file
@@ -0,0 +1 @@
|
||||
{"latest_version":"0.39.0","last_checked_at":"2025-09-22T09:14:04.035532Z"}
|
||||
43
.gitignore
vendored
Normal file
43
.gitignore
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
# Dependencies
|
||||
/node_modules
|
||||
**/node_modules
|
||||
|
||||
# Build output
|
||||
/dist
|
||||
/build
|
||||
|
||||
# Logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.dev.vars
|
||||
|
||||
# IDE and editor directories
|
||||
.idea/
|
||||
.vscode/
|
||||
|
||||
# OS-generated files
|
||||
.DS_Store
|
||||
.DS_Store?
|
||||
Thumbs.db
|
||||
|
||||
# Temporary files
|
||||
*.tmp
|
||||
|
||||
# AI and tool directories
|
||||
.codex/
|
||||
.gemini/
|
||||
.turbo/
|
||||
.wrangler/
|
||||
|
||||
# OS-generated files in subdirs
|
||||
**/.DS_Store
|
||||
160
.specify/scripts/bash/check-prerequisites.sh
Executable file
160
.specify/scripts/bash/check-prerequisites.sh
Executable file
@@ -0,0 +1,160 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Consolidated prerequisite checking script
|
||||
#
|
||||
# This script provides unified prerequisite checking for Spec-Driven Development workflow.
|
||||
# It replaces the functionality previously spread across multiple scripts.
|
||||
#
|
||||
# Usage: ./check-prerequisites.sh [OPTIONS]
|
||||
#
|
||||
# OPTIONS:
|
||||
# --json Output in JSON format
|
||||
# --require-tasks Require tasks.md to exist (for implementation phase)
|
||||
# --include-tasks Include tasks.md in AVAILABLE_DOCS list
|
||||
# --paths-only Only output path variables (no validation)
|
||||
# --help, -h Show help message
|
||||
#
|
||||
# OUTPUTS:
|
||||
# JSON mode: {"FEATURE_DIR":"...", "AVAILABLE_DOCS":["..."]}
|
||||
# Text mode: FEATURE_DIR:... \n AVAILABLE_DOCS: \n ✓/✗ file.md
|
||||
# Paths only: REPO_ROOT: ... \n BRANCH: ... \n FEATURE_DIR: ... etc.
|
||||
|
||||
set -e
|
||||
|
||||
# Parse command line arguments
|
||||
JSON_MODE=false
|
||||
REQUIRE_TASKS=false
|
||||
INCLUDE_TASKS=false
|
||||
PATHS_ONLY=false
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--json)
|
||||
JSON_MODE=true
|
||||
;;
|
||||
--require-tasks)
|
||||
REQUIRE_TASKS=true
|
||||
;;
|
||||
--include-tasks)
|
||||
INCLUDE_TASKS=true
|
||||
;;
|
||||
--paths-only)
|
||||
PATHS_ONLY=true
|
||||
;;
|
||||
--help|-h)
|
||||
cat << 'EOF'
|
||||
Usage: check-prerequisites.sh [OPTIONS]
|
||||
|
||||
Consolidated prerequisite checking for Spec-Driven Development workflow.
|
||||
|
||||
OPTIONS:
|
||||
--json Output in JSON format
|
||||
--require-tasks Require tasks.md to exist (for implementation phase)
|
||||
--include-tasks Include tasks.md in AVAILABLE_DOCS list
|
||||
--paths-only Only output path variables (no prerequisite validation)
|
||||
--help, -h Show this help message
|
||||
|
||||
EXAMPLES:
|
||||
# Check task prerequisites (plan.md required)
|
||||
./check-prerequisites.sh --json
|
||||
|
||||
# Check implementation prerequisites (plan.md + tasks.md required)
|
||||
./check-prerequisites.sh --json --require-tasks --include-tasks
|
||||
|
||||
# Get feature paths only (no validation)
|
||||
./check-prerequisites.sh --paths-only
|
||||
|
||||
EOF
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "ERROR: Unknown option '$arg'. Use --help for usage information." >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Source common functions
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "$SCRIPT_DIR/common.sh"
|
||||
|
||||
# Get feature paths and validate branch
|
||||
eval $(get_feature_paths)
|
||||
check_feature_branch "$CURRENT_BRANCH" "$HAS_GIT" || exit 1
|
||||
|
||||
# If paths-only mode, output paths and exit
|
||||
if $PATHS_ONLY; then
|
||||
echo "REPO_ROOT: $REPO_ROOT"
|
||||
echo "BRANCH: $CURRENT_BRANCH"
|
||||
echo "FEATURE_DIR: $FEATURE_DIR"
|
||||
echo "FEATURE_SPEC: $FEATURE_SPEC"
|
||||
echo "IMPL_PLAN: $IMPL_PLAN"
|
||||
echo "TASKS: $TASKS"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Validate required directories and files
|
||||
if [[ ! -d "$FEATURE_DIR" ]]; then
|
||||
echo "ERROR: Feature directory not found: $FEATURE_DIR" >&2
|
||||
echo "Run /specify first to create the feature structure." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$IMPL_PLAN" ]]; then
|
||||
echo "ERROR: plan.md not found in $FEATURE_DIR" >&2
|
||||
echo "Run /plan first to create the implementation plan." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check for tasks.md if required
|
||||
if $REQUIRE_TASKS && [[ ! -f "$TASKS" ]]; then
|
||||
echo "ERROR: tasks.md not found in $FEATURE_DIR" >&2
|
||||
echo "Run /tasks first to create the task list." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Build list of available documents
|
||||
docs=()
|
||||
|
||||
# Always check these optional docs
|
||||
[[ -f "$RESEARCH" ]] && docs+=("research.md")
|
||||
[[ -f "$DATA_MODEL" ]] && docs+=("data-model.md")
|
||||
|
||||
# Check contracts directory (only if it exists and has files)
|
||||
if [[ -d "$CONTRACTS_DIR" ]] && [[ -n "$(ls -A "$CONTRACTS_DIR" 2>/dev/null)" ]]; then
|
||||
docs+=("contracts/")
|
||||
fi
|
||||
|
||||
[[ -f "$QUICKSTART" ]] && docs+=("quickstart.md")
|
||||
|
||||
# Include tasks.md if requested and it exists
|
||||
if $INCLUDE_TASKS && [[ -f "$TASKS" ]]; then
|
||||
docs+=("tasks.md")
|
||||
fi
|
||||
|
||||
# Output results
|
||||
if $JSON_MODE; then
|
||||
# Build JSON array of documents
|
||||
if [[ ${#docs[@]} -eq 0 ]]; then
|
||||
json_docs="[]"
|
||||
else
|
||||
json_docs=$(printf '"%s",' "${docs[@]}")
|
||||
json_docs="[${json_docs%,}]"
|
||||
fi
|
||||
|
||||
printf '{"FEATURE_DIR":"%s","AVAILABLE_DOCS":%s}\n' "$FEATURE_DIR" "$json_docs"
|
||||
else
|
||||
# Text output
|
||||
echo "FEATURE_DIR:$FEATURE_DIR"
|
||||
echo "AVAILABLE_DOCS:"
|
||||
|
||||
# Show status of each potential document
|
||||
check_file "$RESEARCH" "research.md"
|
||||
check_file "$DATA_MODEL" "data-model.md"
|
||||
check_dir "$CONTRACTS_DIR" "contracts/"
|
||||
check_file "$QUICKSTART" "quickstart.md"
|
||||
|
||||
if $INCLUDE_TASKS; then
|
||||
check_file "$TASKS" "tasks.md"
|
||||
fi
|
||||
fi
|
||||
@@ -1,16 +1,84 @@
|
||||
#!/usr/bin/env bash
|
||||
# (Moved to scripts/bash/) Common functions and variables for all scripts
|
||||
# Common functions and variables for all scripts
|
||||
|
||||
get_repo_root() { git rev-parse --show-toplevel; }
|
||||
get_current_branch() { git rev-parse --abbrev-ref HEAD; }
|
||||
# Get repository root, with fallback for non-git repositories
|
||||
get_repo_root() {
|
||||
if git rev-parse --show-toplevel >/dev/null 2>&1; then
|
||||
git rev-parse --show-toplevel
|
||||
else
|
||||
# Fall back to script location for non-git repos
|
||||
local script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
(cd "$script_dir/../../.." && pwd)
|
||||
fi
|
||||
}
|
||||
|
||||
# Get current branch, with fallback for non-git repositories
|
||||
get_current_branch() {
|
||||
# First check if SPECIFY_FEATURE environment variable is set
|
||||
if [[ -n "${SPECIFY_FEATURE:-}" ]]; then
|
||||
echo "$SPECIFY_FEATURE"
|
||||
return
|
||||
fi
|
||||
|
||||
# Then check git if available
|
||||
if git rev-parse --abbrev-ref HEAD >/dev/null 2>&1; then
|
||||
git rev-parse --abbrev-ref HEAD
|
||||
return
|
||||
fi
|
||||
|
||||
# For non-git repos, try to find the latest feature directory
|
||||
local repo_root=$(get_repo_root)
|
||||
local specs_dir="$repo_root/specs"
|
||||
|
||||
if [[ -d "$specs_dir" ]]; then
|
||||
local latest_feature=""
|
||||
local highest=0
|
||||
|
||||
for dir in "$specs_dir"/*; do
|
||||
if [[ -d "$dir" ]]; then
|
||||
local dirname=$(basename "$dir")
|
||||
if [[ "$dirname" =~ ^([0-9]{3})- ]]; then
|
||||
local number=${BASH_REMATCH[1]}
|
||||
number=$((10#$number))
|
||||
if [[ "$number" -gt "$highest" ]]; then
|
||||
highest=$number
|
||||
latest_feature=$dirname
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ -n "$latest_feature" ]]; then
|
||||
echo "$latest_feature"
|
||||
return
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "main" # Final fallback
|
||||
}
|
||||
|
||||
# Check if we have git available
|
||||
has_git() {
|
||||
git rev-parse --show-toplevel >/dev/null 2>&1
|
||||
}
|
||||
|
||||
check_feature_branch() {
|
||||
local branch="$1"
|
||||
local has_git_repo="$2"
|
||||
|
||||
# For non-git repos, we can't enforce branch naming but still provide output
|
||||
if [[ "$has_git_repo" != "true" ]]; then
|
||||
echo "[specify] Warning: Git repository not detected; skipped branch validation" >&2
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ ! "$branch" =~ ^[0-9]{3}- ]]; then
|
||||
echo "ERROR: Not on a feature branch. Current branch: $branch" >&2
|
||||
echo "Feature branches should be named like: 001-feature-name" >&2
|
||||
return 1
|
||||
fi; return 0
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
get_feature_dir() { echo "$1/specs/$2"; }
|
||||
@@ -18,10 +86,18 @@ get_feature_dir() { echo "$1/specs/$2"; }
|
||||
get_feature_paths() {
|
||||
local repo_root=$(get_repo_root)
|
||||
local current_branch=$(get_current_branch)
|
||||
local has_git_repo="false"
|
||||
|
||||
if has_git; then
|
||||
has_git_repo="true"
|
||||
fi
|
||||
|
||||
local feature_dir=$(get_feature_dir "$repo_root" "$current_branch")
|
||||
|
||||
cat <<EOF
|
||||
REPO_ROOT='$repo_root'
|
||||
CURRENT_BRANCH='$current_branch'
|
||||
HAS_GIT='$has_git_repo'
|
||||
FEATURE_DIR='$feature_dir'
|
||||
FEATURE_SPEC='$feature_dir/spec.md'
|
||||
IMPL_PLAN='$feature_dir/plan.md'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
# (Moved to scripts/bash/) Create a new feature with branch, directory structure, and template
|
||||
|
||||
set -e
|
||||
|
||||
JSON_MODE=false
|
||||
@@ -18,7 +18,38 @@ if [ -z "$FEATURE_DESCRIPTION" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
REPO_ROOT=$(git rev-parse --show-toplevel)
|
||||
# Function to find the repository root by searching for existing project markers
|
||||
find_repo_root() {
|
||||
local dir="$1"
|
||||
while [ "$dir" != "/" ]; do
|
||||
if [ -d "$dir/.git" ] || [ -d "$dir/.specify" ]; then
|
||||
echo "$dir"
|
||||
return 0
|
||||
fi
|
||||
dir="$(dirname "$dir")"
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
# Resolve repository root. Prefer git information when available, but fall back
|
||||
# to searching for repository markers so the workflow still functions in repositories that
|
||||
# were initialised with --no-git.
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
|
||||
if git rev-parse --show-toplevel >/dev/null 2>&1; then
|
||||
REPO_ROOT=$(git rev-parse --show-toplevel)
|
||||
HAS_GIT=true
|
||||
else
|
||||
REPO_ROOT="$(find_repo_root "$SCRIPT_DIR")"
|
||||
if [ -z "$REPO_ROOT" ]; then
|
||||
echo "Error: Could not determine repository root. Please run this script from within the repository." >&2
|
||||
exit 1
|
||||
fi
|
||||
HAS_GIT=false
|
||||
fi
|
||||
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
SPECS_DIR="$REPO_ROOT/specs"
|
||||
mkdir -p "$SPECS_DIR"
|
||||
|
||||
@@ -40,7 +71,11 @@ BRANCH_NAME=$(echo "$FEATURE_DESCRIPTION" | tr '[:upper:]' '[:lower:]' | sed 's/
|
||||
WORDS=$(echo "$BRANCH_NAME" | tr '-' '\n' | grep -v '^$' | head -3 | tr '\n' '-' | sed 's/-$//')
|
||||
BRANCH_NAME="${FEATURE_NUM}-${WORDS}"
|
||||
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
if [ "$HAS_GIT" = true ]; then
|
||||
git checkout -b "$BRANCH_NAME"
|
||||
else
|
||||
>&2 echo "[specify] Warning: Git repository not detected; skipped branch creation for $BRANCH_NAME"
|
||||
fi
|
||||
|
||||
FEATURE_DIR="$SPECS_DIR/$BRANCH_NAME"
|
||||
mkdir -p "$FEATURE_DIR"
|
||||
@@ -49,10 +84,14 @@ TEMPLATE="$REPO_ROOT/templates/spec-template.md"
|
||||
SPEC_FILE="$FEATURE_DIR/spec.md"
|
||||
if [ -f "$TEMPLATE" ]; then cp "$TEMPLATE" "$SPEC_FILE"; else touch "$SPEC_FILE"; fi
|
||||
|
||||
# Set the SPECIFY_FEATURE environment variable for the current session
|
||||
export SPECIFY_FEATURE="$BRANCH_NAME"
|
||||
|
||||
if $JSON_MODE; then
|
||||
printf '{"BRANCH_NAME":"%s","SPEC_FILE":"%s","FEATURE_NUM":"%s"}\n' "$BRANCH_NAME" "$SPEC_FILE" "$FEATURE_NUM"
|
||||
else
|
||||
echo "BRANCH_NAME: $BRANCH_NAME"
|
||||
echo "SPEC_FILE: $SPEC_FILE"
|
||||
echo "FEATURE_NUM: $FEATURE_NUM"
|
||||
echo "SPECIFY_FEATURE environment variable set to: $BRANCH_NAME"
|
||||
fi
|
||||
|
||||
@@ -1,17 +1,60 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
# Parse command line arguments
|
||||
JSON_MODE=false
|
||||
for arg in "$@"; do case "$arg" in --json) JSON_MODE=true ;; --help|-h) echo "Usage: $0 [--json]"; exit 0 ;; esac; done
|
||||
ARGS=()
|
||||
|
||||
for arg in "$@"; do
|
||||
case "$arg" in
|
||||
--json)
|
||||
JSON_MODE=true
|
||||
;;
|
||||
--help|-h)
|
||||
echo "Usage: $0 [--json]"
|
||||
echo " --json Output results in JSON format"
|
||||
echo " --help Show this help message"
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
ARGS+=("$arg")
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Get script directory and load common functions
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "$SCRIPT_DIR/common.sh"
|
||||
|
||||
# Get all paths and variables from common functions
|
||||
eval $(get_feature_paths)
|
||||
check_feature_branch "$CURRENT_BRANCH" || exit 1
|
||||
|
||||
# Check if we're on a proper feature branch (only for git repos)
|
||||
check_feature_branch "$CURRENT_BRANCH" "$HAS_GIT" || exit 1
|
||||
|
||||
# Ensure the feature directory exists
|
||||
mkdir -p "$FEATURE_DIR"
|
||||
|
||||
# Copy plan template if it exists
|
||||
TEMPLATE="$REPO_ROOT/.specify/templates/plan-template.md"
|
||||
[[ -f "$TEMPLATE" ]] && cp "$TEMPLATE" "$IMPL_PLAN"
|
||||
if $JSON_MODE; then
|
||||
printf '{"FEATURE_SPEC":"%s","IMPL_PLAN":"%s","SPECS_DIR":"%s","BRANCH":"%s"}\n' \
|
||||
"$FEATURE_SPEC" "$IMPL_PLAN" "$FEATURE_DIR" "$CURRENT_BRANCH"
|
||||
if [[ -f "$TEMPLATE" ]]; then
|
||||
cp "$TEMPLATE" "$IMPL_PLAN"
|
||||
echo "Copied plan template to $IMPL_PLAN"
|
||||
else
|
||||
echo "FEATURE_SPEC: $FEATURE_SPEC"; echo "IMPL_PLAN: $IMPL_PLAN"; echo "SPECS_DIR: $FEATURE_DIR"; echo "BRANCH: $CURRENT_BRANCH"
|
||||
echo "Warning: Plan template not found at $TEMPLATE"
|
||||
# Create a basic plan file if template doesn't exist
|
||||
touch "$IMPL_PLAN"
|
||||
fi
|
||||
|
||||
# Output results
|
||||
if $JSON_MODE; then
|
||||
printf '{"FEATURE_SPEC":"%s","IMPL_PLAN":"%s","SPECS_DIR":"%s","BRANCH":"%s","HAS_GIT":"%s"}\n' \
|
||||
"$FEATURE_SPEC" "$IMPL_PLAN" "$FEATURE_DIR" "$CURRENT_BRANCH" "$HAS_GIT"
|
||||
else
|
||||
echo "FEATURE_SPEC: $FEATURE_SPEC"
|
||||
echo "IMPL_PLAN: $IMPL_PLAN"
|
||||
echo "SPECS_DIR: $FEATURE_DIR"
|
||||
echo "BRANCH: $CURRENT_BRANCH"
|
||||
echo "HAS_GIT: $HAS_GIT"
|
||||
fi
|
||||
|
||||
@@ -1,66 +1,719 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Update agent context files with information from plan.md
|
||||
#
|
||||
# This script maintains AI agent context files by parsing feature specifications
|
||||
# and updating agent-specific configuration files with project information.
|
||||
#
|
||||
# MAIN FUNCTIONS:
|
||||
# 1. Environment Validation
|
||||
# - Verifies git repository structure and branch information
|
||||
# - Checks for required plan.md files and templates
|
||||
# - Validates file permissions and accessibility
|
||||
#
|
||||
# 2. Plan Data Extraction
|
||||
# - Parses plan.md files to extract project metadata
|
||||
# - Identifies language/version, frameworks, databases, and project types
|
||||
# - Handles missing or incomplete specification data gracefully
|
||||
#
|
||||
# 3. Agent File Management
|
||||
# - Creates new agent context files from templates when needed
|
||||
# - Updates existing agent files with new project information
|
||||
# - Preserves manual additions and custom configurations
|
||||
# - Supports multiple AI agent formats and directory structures
|
||||
#
|
||||
# 4. Content Generation
|
||||
# - Generates language-specific build/test commands
|
||||
# - Creates appropriate project directory structures
|
||||
# - Updates technology stacks and recent changes sections
|
||||
# - Maintains consistent formatting and timestamps
|
||||
#
|
||||
# 5. Multi-Agent Support
|
||||
# - Handles agent-specific file paths and naming conventions
|
||||
# - Supports: Claude, Gemini, Copilot, Cursor, Qwen, opencode, Codex, Windsurf
|
||||
# - Can update single agents or all existing agent files
|
||||
# - Creates default Claude file if no agent files exist
|
||||
#
|
||||
# Usage: ./update-agent-context.sh [agent_type]
|
||||
# Agent types: claude|gemini|copilot|cursor|qwen|opencode|codex|windsurf
|
||||
# Leave empty to update all existing agent files
|
||||
|
||||
set -e
|
||||
REPO_ROOT=$(git rev-parse --show-toplevel)
|
||||
CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
|
||||
FEATURE_DIR="$REPO_ROOT/specs/$CURRENT_BRANCH"
|
||||
NEW_PLAN="$FEATURE_DIR/plan.md"
|
||||
CLAUDE_FILE="$REPO_ROOT/CLAUDE.md"; GEMINI_FILE="$REPO_ROOT/GEMINI.md"; COPILOT_FILE="$REPO_ROOT/.github/copilot-instructions.md"; CURSOR_FILE="$REPO_ROOT/.cursor/rules/specify-rules.mdc"; QWEN_FILE="$REPO_ROOT/QWEN.md"; AGENTS_FILE="$REPO_ROOT/AGENTS.md"
|
||||
AGENT_TYPE="$1"
|
||||
[ -f "$NEW_PLAN" ] || { echo "ERROR: No plan.md found at $NEW_PLAN"; exit 1; }
|
||||
echo "=== Updating agent context files for feature $CURRENT_BRANCH ==="
|
||||
NEW_LANG=$(grep "^**Language/Version**: " "$NEW_PLAN" 2>/dev/null | head -1 | sed 's/^**Language\/Version**: //' | grep -v "NEEDS CLARIFICATION" || echo "")
|
||||
NEW_FRAMEWORK=$(grep "^**Primary Dependencies**: " "$NEW_PLAN" 2>/dev/null | head -1 | sed 's/^**Primary Dependencies**: //' | grep -v "NEEDS CLARIFICATION" || echo "")
|
||||
NEW_DB=$(grep "^**Storage**: " "$NEW_PLAN" 2>/dev/null | head -1 | sed 's/^**Storage**: //' | grep -v "N/A" | grep -v "NEEDS CLARIFICATION" || echo "")
|
||||
NEW_PROJECT_TYPE=$(grep "^**Project Type**: " "$NEW_PLAN" 2>/dev/null | head -1 | sed 's/^**Project Type**: //' || echo "")
|
||||
update_agent_file() { local target_file="$1" agent_name="$2"; echo "Updating $agent_name context file: $target_file"; local temp_file=$(mktemp); if [ ! -f "$target_file" ]; then
|
||||
echo "Creating new $agent_name context file..."; if [ -f "$REPO_ROOT/.specify/templates/agent-file-template.md" ]; then cp "$REPO_ROOT/.specify/templates/agent-file-template.md" "$temp_file"; else echo "ERROR: Template not found"; return 1; fi;
|
||||
sed -i.bak "s/\[PROJECT NAME\]/$(basename $REPO_ROOT)/" "$temp_file"; sed -i.bak "s/\[DATE\]/$(date +%Y-%m-%d)/" "$temp_file"; sed -i.bak "s/\[EXTRACTED FROM ALL PLAN.MD FILES\]/- $NEW_LANG + $NEW_FRAMEWORK ($CURRENT_BRANCH)/" "$temp_file";
|
||||
if [[ "$NEW_PROJECT_TYPE" == *"web"* ]]; then sed -i.bak "s|\[ACTUAL STRUCTURE FROM PLANS\]|backend/\nfrontend/\ntests/|" "$temp_file"; else sed -i.bak "s|\[ACTUAL STRUCTURE FROM PLANS\]|src/\ntests/|" "$temp_file"; fi;
|
||||
if [[ "$NEW_LANG" == *"Python"* ]]; then COMMANDS="cd src && pytest && ruff check ."; elif [[ "$NEW_LANG" == *"Rust"* ]]; then COMMANDS="cargo test && cargo clippy"; elif [[ "$NEW_LANG" == *"JavaScript"* ]] || [[ "$NEW_LANG" == *"TypeScript"* ]]; then COMMANDS="npm test && npm run lint"; else COMMANDS="# Add commands for $NEW_LANG"; fi; sed -i.bak "s|\[ONLY COMMANDS FOR ACTIVE TECHNOLOGIES\]|$COMMANDS|" "$temp_file";
|
||||
sed -i.bak "s|\[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE\]|$NEW_LANG: Follow standard conventions|" "$temp_file"; sed -i.bak "s|\[LAST 3 FEATURES AND WHAT THEY ADDED\]|- $CURRENT_BRANCH: Added $NEW_LANG + $NEW_FRAMEWORK|" "$temp_file"; rm "$temp_file.bak";
|
||||
else
|
||||
echo "Updating existing $agent_name context file..."; manual_start=$(grep -n "<!-- MANUAL ADDITIONS START -->" "$target_file" | cut -d: -f1); manual_end=$(grep -n "<!-- MANUAL ADDITIONS END -->" "$target_file" | cut -d: -f1); if [ -n "$manual_start" ] && [ -n "$manual_end" ]; then sed -n "${manual_start},${manual_end}p" "$target_file" > /tmp/manual_additions.txt; fi;
|
||||
python3 - "$target_file" <<'EOF'
|
||||
import re,sys,datetime
|
||||
target=sys.argv[1]
|
||||
with open(target) as f: content=f.read()
|
||||
NEW_LANG="'$NEW_LANG'";NEW_FRAMEWORK="'$NEW_FRAMEWORK'";CURRENT_BRANCH="'$CURRENT_BRANCH'";NEW_DB="'$NEW_DB'";NEW_PROJECT_TYPE="'$NEW_PROJECT_TYPE'"
|
||||
# Tech section
|
||||
m=re.search(r'## Active Technologies\n(.*?)\n\n',content, re.DOTALL)
|
||||
if m:
|
||||
existing=m.group(1)
|
||||
additions=[]
|
||||
if '$NEW_LANG' and '$NEW_LANG' not in existing: additions.append(f"- $NEW_LANG + $NEW_FRAMEWORK ($CURRENT_BRANCH)")
|
||||
if '$NEW_DB' and '$NEW_DB' not in existing and '$NEW_DB'!='N/A': additions.append(f"- $NEW_DB ($CURRENT_BRANCH)")
|
||||
if additions:
|
||||
new_block=existing+"\n"+"\n".join(additions)
|
||||
content=content.replace(m.group(0),f"## Active Technologies\n{new_block}\n\n")
|
||||
# Recent changes
|
||||
m2=re.search(r'## Recent Changes\n(.*?)(\n\n|$)',content, re.DOTALL)
|
||||
if m2:
|
||||
lines=[l for l in m2.group(1).strip().split('\n') if l]
|
||||
lines.insert(0,f"- $CURRENT_BRANCH: Added $NEW_LANG + $NEW_FRAMEWORK")
|
||||
lines=lines[:3]
|
||||
content=re.sub(r'## Recent Changes\n.*?(\n\n|$)', '## Recent Changes\n'+"\n".join(lines)+'\n\n', content, flags=re.DOTALL)
|
||||
content=re.sub(r'Last updated: \d{4}-\d{2}-\d{2}', 'Last updated: '+datetime.datetime.now().strftime('%Y-%m-%d'), content)
|
||||
open(target+'.tmp','w').write(content)
|
||||
EOF
|
||||
mv "$target_file.tmp" "$target_file"; if [ -f /tmp/manual_additions.txt ]; then sed -i.bak '/<!-- MANUAL ADDITIONS START -->/,/<!-- MANUAL ADDITIONS END -->/d' "$target_file"; cat /tmp/manual_additions.txt >> "$target_file"; rm /tmp/manual_additions.txt "$target_file.bak"; fi;
|
||||
fi; mv "$temp_file" "$target_file" 2>/dev/null || true; echo "✅ $agent_name context file updated successfully"; }
|
||||
case "$AGENT_TYPE" in
|
||||
claude) update_agent_file "$CLAUDE_FILE" "Claude Code" ;;
|
||||
gemini) update_agent_file "$GEMINI_FILE" "Gemini CLI" ;;
|
||||
copilot) update_agent_file "$COPILOT_FILE" "GitHub Copilot" ;;
|
||||
cursor) update_agent_file "$CURSOR_FILE" "Cursor IDE" ;;
|
||||
qwen) update_agent_file "$QWEN_FILE" "Qwen Code" ;;
|
||||
opencode) update_agent_file "$AGENTS_FILE" "opencode" ;;
|
||||
"") [ -f "$CLAUDE_FILE" ] && update_agent_file "$CLAUDE_FILE" "Claude Code"; \
|
||||
[ -f "$GEMINI_FILE" ] && update_agent_file "$GEMINI_FILE" "Gemini CLI"; \
|
||||
[ -f "$COPILOT_FILE" ] && update_agent_file "$COPILOT_FILE" "GitHub Copilot"; \
|
||||
[ -f "$CURSOR_FILE" ] && update_agent_file "$CURSOR_FILE" "Cursor IDE"; \
|
||||
[ -f "$QWEN_FILE" ] && update_agent_file "$QWEN_FILE" "Qwen Code"; \
|
||||
[ -f "$AGENTS_FILE" ] && update_agent_file "$AGENTS_FILE" "opencode"; \
|
||||
if [ ! -f "$CLAUDE_FILE" ] && [ ! -f "$GEMINI_FILE" ] && [ ! -f "$COPILOT_FILE" ] && [ ! -f "$CURSOR_FILE" ] && [ ! -f "$QWEN_FILE" ] && [ ! -f "$AGENTS_FILE" ]; then update_agent_file "$CLAUDE_FILE" "Claude Code"; fi ;;
|
||||
*) echo "ERROR: Unknown agent type '$AGENT_TYPE' (expected claude|gemini|copilot|cursor|qwen|opencode)"; exit 1 ;;
|
||||
esac
|
||||
echo; echo "Summary of changes:"; [ -n "$NEW_LANG" ] && echo "- Added language: $NEW_LANG"; [ -n "$NEW_FRAMEWORK" ] && echo "- Added framework: $NEW_FRAMEWORK"; [ -n "$NEW_DB" ] && [ "$NEW_DB" != "N/A" ] && echo "- Added database: $NEW_DB"; echo; echo "Usage: $0 [claude|gemini|copilot|cursor|qwen|opencode]"
|
||||
|
||||
# Enable strict error handling
|
||||
set -u
|
||||
set -o pipefail
|
||||
|
||||
#==============================================================================
|
||||
# Configuration and Global Variables
|
||||
#==============================================================================
|
||||
|
||||
# Get script directory and load common functions
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
source "$SCRIPT_DIR/common.sh"
|
||||
|
||||
# Get all paths and variables from common functions
|
||||
eval $(get_feature_paths)
|
||||
|
||||
NEW_PLAN="$IMPL_PLAN" # Alias for compatibility with existing code
|
||||
AGENT_TYPE="${1:-}"
|
||||
|
||||
# Agent-specific file paths
|
||||
CLAUDE_FILE="$REPO_ROOT/CLAUDE.md"
|
||||
GEMINI_FILE="$REPO_ROOT/GEMINI.md"
|
||||
COPILOT_FILE="$REPO_ROOT/.github/copilot-instructions.md"
|
||||
CURSOR_FILE="$REPO_ROOT/.cursor/rules/specify-rules.mdc"
|
||||
QWEN_FILE="$REPO_ROOT/QWEN.md"
|
||||
AGENTS_FILE="$REPO_ROOT/AGENTS.md"
|
||||
WINDSURF_FILE="$REPO_ROOT/.windsurf/rules/specify-rules.md"
|
||||
KILOCODE_FILE="$REPO_ROOT/.kilocode/rules/specify-rules.md"
|
||||
AUGGIE_FILE="$REPO_ROOT/.augment/rules/specify-rules.md"
|
||||
ROO_FILE="$REPO_ROOT/.roo/rules/specify-rules.md"
|
||||
|
||||
# Template file
|
||||
TEMPLATE_FILE="$REPO_ROOT/.specify/templates/agent-file-template.md"
|
||||
|
||||
# Global variables for parsed plan data
|
||||
NEW_LANG=""
|
||||
NEW_FRAMEWORK=""
|
||||
NEW_DB=""
|
||||
NEW_PROJECT_TYPE=""
|
||||
|
||||
#==============================================================================
|
||||
# Utility Functions
|
||||
#==============================================================================
|
||||
|
||||
log_info() {
|
||||
echo "INFO: $1"
|
||||
}
|
||||
|
||||
log_success() {
|
||||
echo "✓ $1"
|
||||
}
|
||||
|
||||
log_error() {
|
||||
echo "ERROR: $1" >&2
|
||||
}
|
||||
|
||||
log_warning() {
|
||||
echo "WARNING: $1" >&2
|
||||
}
|
||||
|
||||
# Cleanup function for temporary files
|
||||
cleanup() {
|
||||
local exit_code=$?
|
||||
rm -f /tmp/agent_update_*_$$
|
||||
rm -f /tmp/manual_additions_$$
|
||||
exit $exit_code
|
||||
}
|
||||
|
||||
# Set up cleanup trap
|
||||
trap cleanup EXIT INT TERM
|
||||
|
||||
#==============================================================================
|
||||
# Validation Functions
|
||||
#==============================================================================
|
||||
|
||||
validate_environment() {
|
||||
# Check if we have a current branch/feature (git or non-git)
|
||||
if [[ -z "$CURRENT_BRANCH" ]]; then
|
||||
log_error "Unable to determine current feature"
|
||||
if [[ "$HAS_GIT" == "true" ]]; then
|
||||
log_info "Make sure you're on a feature branch"
|
||||
else
|
||||
log_info "Set SPECIFY_FEATURE environment variable or create a feature first"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if plan.md exists
|
||||
if [[ ! -f "$NEW_PLAN" ]]; then
|
||||
log_error "No plan.md found at $NEW_PLAN"
|
||||
log_info "Make sure you're working on a feature with a corresponding spec directory"
|
||||
if [[ "$HAS_GIT" != "true" ]]; then
|
||||
log_info "Use: export SPECIFY_FEATURE=your-feature-name or create a new feature first"
|
||||
fi
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if template exists (needed for new files)
|
||||
if [[ ! -f "$TEMPLATE_FILE" ]]; then
|
||||
log_warning "Template file not found at $TEMPLATE_FILE"
|
||||
log_warning "Creating new agent files will fail"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# Plan Parsing Functions
|
||||
#==============================================================================
|
||||
|
||||
extract_plan_field() {
|
||||
local field_pattern="$1"
|
||||
local plan_file="$2"
|
||||
|
||||
grep "^\*\*${field_pattern}\*\*: " "$plan_file" 2>/dev/null | \
|
||||
head -1 | \
|
||||
sed "s|^\*\*${field_pattern}\*\*: ||" | \
|
||||
sed 's/^[ \t]*//;s/[ \t]*$//' | \
|
||||
grep -v "NEEDS CLARIFICATION" | \
|
||||
grep -v "^N/A$" || echo ""
|
||||
}
|
||||
|
||||
parse_plan_data() {
|
||||
local plan_file="$1"
|
||||
|
||||
if [[ ! -f "$plan_file" ]]; then
|
||||
log_error "Plan file not found: $plan_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ ! -r "$plan_file" ]]; then
|
||||
log_error "Plan file is not readable: $plan_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_info "Parsing plan data from $plan_file"
|
||||
|
||||
NEW_LANG=$(extract_plan_field "Language/Version" "$plan_file")
|
||||
NEW_FRAMEWORK=$(extract_plan_field "Primary Dependencies" "$plan_file")
|
||||
NEW_DB=$(extract_plan_field "Storage" "$plan_file")
|
||||
NEW_PROJECT_TYPE=$(extract_plan_field "Project Type" "$plan_file")
|
||||
|
||||
# Log what we found
|
||||
if [[ -n "$NEW_LANG" ]]; then
|
||||
log_info "Found language: $NEW_LANG"
|
||||
else
|
||||
log_warning "No language information found in plan"
|
||||
fi
|
||||
|
||||
if [[ -n "$NEW_FRAMEWORK" ]]; then
|
||||
log_info "Found framework: $NEW_FRAMEWORK"
|
||||
fi
|
||||
|
||||
if [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]]; then
|
||||
log_info "Found database: $NEW_DB"
|
||||
fi
|
||||
|
||||
if [[ -n "$NEW_PROJECT_TYPE" ]]; then
|
||||
log_info "Found project type: $NEW_PROJECT_TYPE"
|
||||
fi
|
||||
}
|
||||
|
||||
format_technology_stack() {
|
||||
local lang="$1"
|
||||
local framework="$2"
|
||||
local parts=()
|
||||
|
||||
# Add non-empty parts
|
||||
[[ -n "$lang" && "$lang" != "NEEDS CLARIFICATION" ]] && parts+=("$lang")
|
||||
[[ -n "$framework" && "$framework" != "NEEDS CLARIFICATION" && "$framework" != "N/A" ]] && parts+=("$framework")
|
||||
|
||||
# Join with proper formatting
|
||||
if [[ ${#parts[@]} -eq 0 ]]; then
|
||||
echo ""
|
||||
elif [[ ${#parts[@]} -eq 1 ]]; then
|
||||
echo "${parts[0]}"
|
||||
else
|
||||
# Join multiple parts with " + "
|
||||
local result="${parts[0]}"
|
||||
for ((i=1; i<${#parts[@]}; i++)); do
|
||||
result="$result + ${parts[i]}"
|
||||
done
|
||||
echo "$result"
|
||||
fi
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# Template and Content Generation Functions
|
||||
#==============================================================================
|
||||
|
||||
get_project_structure() {
|
||||
local project_type="$1"
|
||||
|
||||
if [[ "$project_type" == *"web"* ]]; then
|
||||
echo "backend/\\nfrontend/\\ntests/"
|
||||
else
|
||||
echo "src/\\ntests/"
|
||||
fi
|
||||
}
|
||||
|
||||
get_commands_for_language() {
|
||||
local lang="$1"
|
||||
|
||||
case "$lang" in
|
||||
*"Python"*)
|
||||
echo "cd src && pytest && ruff check ."
|
||||
;;
|
||||
*"Rust"*)
|
||||
echo "cargo test && cargo clippy"
|
||||
;;
|
||||
*"JavaScript"*|*"TypeScript"*)
|
||||
echo "npm test && npm run lint"
|
||||
;;
|
||||
*)
|
||||
echo "# Add commands for $lang"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
get_language_conventions() {
|
||||
local lang="$1"
|
||||
echo "$lang: Follow standard conventions"
|
||||
}
|
||||
|
||||
create_new_agent_file() {
|
||||
local target_file="$1"
|
||||
local temp_file="$2"
|
||||
local project_name="$3"
|
||||
local current_date="$4"
|
||||
|
||||
if [[ ! -f "$TEMPLATE_FILE" ]]; then
|
||||
log_error "Template not found at $TEMPLATE_FILE"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ ! -r "$TEMPLATE_FILE" ]]; then
|
||||
log_error "Template file is not readable: $TEMPLATE_FILE"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_info "Creating new agent context file from template..."
|
||||
|
||||
if ! cp "$TEMPLATE_FILE" "$temp_file"; then
|
||||
log_error "Failed to copy template file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Replace template placeholders
|
||||
local project_structure
|
||||
project_structure=$(get_project_structure "$NEW_PROJECT_TYPE")
|
||||
|
||||
local commands
|
||||
commands=$(get_commands_for_language "$NEW_LANG")
|
||||
|
||||
local language_conventions
|
||||
language_conventions=$(get_language_conventions "$NEW_LANG")
|
||||
|
||||
# Perform substitutions with error checking using safer approach
|
||||
# Escape special characters for sed by using a different delimiter or escaping
|
||||
local escaped_lang=$(printf '%s\n' "$NEW_LANG" | sed 's/[\[\.*^$()+{}|]/\\&/g')
|
||||
local escaped_framework=$(printf '%s\n' "$NEW_FRAMEWORK" | sed 's/[\[\.*^$()+{}|]/\\&/g')
|
||||
local escaped_branch=$(printf '%s\n' "$CURRENT_BRANCH" | sed 's/[\[\.*^$()+{}|]/\\&/g')
|
||||
|
||||
# Build technology stack and recent change strings conditionally
|
||||
local tech_stack
|
||||
if [[ -n "$escaped_lang" && -n "$escaped_framework" ]]; then
|
||||
tech_stack="- $escaped_lang + $escaped_framework ($escaped_branch)"
|
||||
elif [[ -n "$escaped_lang" ]]; then
|
||||
tech_stack="- $escaped_lang ($escaped_branch)"
|
||||
elif [[ -n "$escaped_framework" ]]; then
|
||||
tech_stack="- $escaped_framework ($escaped_branch)"
|
||||
else
|
||||
tech_stack="- ($escaped_branch)"
|
||||
fi
|
||||
|
||||
local recent_change
|
||||
if [[ -n "$escaped_lang" && -n "$escaped_framework" ]]; then
|
||||
recent_change="- $escaped_branch: Added $escaped_lang + $escaped_framework"
|
||||
elif [[ -n "$escaped_lang" ]]; then
|
||||
recent_change="- $escaped_branch: Added $escaped_lang"
|
||||
elif [[ -n "$escaped_framework" ]]; then
|
||||
recent_change="- $escaped_branch: Added $escaped_framework"
|
||||
else
|
||||
recent_change="- $escaped_branch: Added"
|
||||
fi
|
||||
|
||||
local substitutions=(
|
||||
"s|\[PROJECT NAME\]|$project_name|"
|
||||
"s|\[DATE\]|$current_date|"
|
||||
"s|\[EXTRACTED FROM ALL PLAN.MD FILES\]|$tech_stack|"
|
||||
"s|\[ACTUAL STRUCTURE FROM PLANS\]|$project_structure|g"
|
||||
"s|\[ONLY COMMANDS FOR ACTIVE TECHNOLOGIES\]|$commands|"
|
||||
"s|\[LANGUAGE-SPECIFIC, ONLY FOR LANGUAGES IN USE\]|$language_conventions|"
|
||||
"s|\[LAST 3 FEATURES AND WHAT THEY ADDED\]|$recent_change|"
|
||||
)
|
||||
|
||||
for substitution in "${substitutions[@]}"; do
|
||||
if ! sed -i.bak -e "$substitution" "$temp_file"; then
|
||||
log_error "Failed to perform substitution: $substitution"
|
||||
rm -f "$temp_file" "$temp_file.bak"
|
||||
return 1
|
||||
fi
|
||||
done
|
||||
|
||||
# Convert \n sequences to actual newlines
|
||||
newline=$(printf '\n')
|
||||
sed -i.bak2 "s/\\\\n/${newline}/g" "$temp_file"
|
||||
|
||||
# Clean up backup files
|
||||
rm -f "$temp_file.bak" "$temp_file.bak2"
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
update_existing_agent_file() {
|
||||
local target_file="$1"
|
||||
local current_date="$2"
|
||||
|
||||
log_info "Updating existing agent context file..."
|
||||
|
||||
# Use a single temporary file for atomic update
|
||||
local temp_file
|
||||
temp_file=$(mktemp) || {
|
||||
log_error "Failed to create temporary file"
|
||||
return 1
|
||||
}
|
||||
|
||||
# Process the file in one pass
|
||||
local tech_stack=$(format_technology_stack "$NEW_LANG" "$NEW_FRAMEWORK")
|
||||
local new_tech_entries=()
|
||||
local new_change_entry=""
|
||||
|
||||
# Prepare new technology entries
|
||||
if [[ -n "$tech_stack" ]] && ! grep -q "$tech_stack" "$target_file"; then
|
||||
new_tech_entries+=("- $tech_stack ($CURRENT_BRANCH)")
|
||||
fi
|
||||
|
||||
if [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]] && [[ "$NEW_DB" != "NEEDS CLARIFICATION" ]] && ! grep -q "$NEW_DB" "$target_file"; then
|
||||
new_tech_entries+=("- $NEW_DB ($CURRENT_BRANCH)")
|
||||
fi
|
||||
|
||||
# Prepare new change entry
|
||||
if [[ -n "$tech_stack" ]]; then
|
||||
new_change_entry="- $CURRENT_BRANCH: Added $tech_stack"
|
||||
elif [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]] && [[ "$NEW_DB" != "NEEDS CLARIFICATION" ]]; then
|
||||
new_change_entry="- $CURRENT_BRANCH: Added $NEW_DB"
|
||||
fi
|
||||
|
||||
# Process file line by line
|
||||
local in_tech_section=false
|
||||
local in_changes_section=false
|
||||
local tech_entries_added=false
|
||||
local changes_entries_added=false
|
||||
local existing_changes_count=0
|
||||
|
||||
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||
# Handle Active Technologies section
|
||||
if [[ "$line" == "## Active Technologies" ]]; then
|
||||
echo "$line" >> "$temp_file"
|
||||
in_tech_section=true
|
||||
continue
|
||||
elif [[ $in_tech_section == true ]] && [[ "$line" =~ ^##[[:space:]] ]]; then
|
||||
# Add new tech entries before closing the section
|
||||
if [[ $tech_entries_added == false ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then
|
||||
printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file"
|
||||
tech_entries_added=true
|
||||
fi
|
||||
echo "$line" >> "$temp_file"
|
||||
in_tech_section=false
|
||||
continue
|
||||
elif [[ $in_tech_section == true ]] && [[ -z "$line" ]]; then
|
||||
# Add new tech entries before empty line in tech section
|
||||
if [[ $tech_entries_added == false ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then
|
||||
printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file"
|
||||
tech_entries_added=true
|
||||
fi
|
||||
echo "$line" >> "$temp_file"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Handle Recent Changes section
|
||||
if [[ "$line" == "## Recent Changes" ]]; then
|
||||
echo "$line" >> "$temp_file"
|
||||
# Add new change entry right after the heading
|
||||
if [[ -n "$new_change_entry" ]]; then
|
||||
echo "$new_change_entry" >> "$temp_file"
|
||||
fi
|
||||
in_changes_section=true
|
||||
changes_entries_added=true
|
||||
continue
|
||||
elif [[ $in_changes_section == true ]] && [[ "$line" =~ ^##[[:space:]] ]]; then
|
||||
echo "$line" >> "$temp_file"
|
||||
in_changes_section=false
|
||||
continue
|
||||
elif [[ $in_changes_section == true ]] && [[ "$line" == "- "* ]]; then
|
||||
# Keep only first 2 existing changes
|
||||
if [[ $existing_changes_count -lt 2 ]]; then
|
||||
echo "$line" >> "$temp_file"
|
||||
((existing_changes_count++))
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
|
||||
# Update timestamp
|
||||
if [[ "$line" =~ \*\*Last\ updated\*\*:.*[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] ]]; then
|
||||
echo "$line" | sed "s/[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]/$current_date/" >> "$temp_file"
|
||||
else
|
||||
echo "$line" >> "$temp_file"
|
||||
fi
|
||||
done < "$target_file"
|
||||
|
||||
# Post-loop check: if we're still in the Active Technologies section and haven't added new entries
|
||||
if [[ $in_tech_section == true ]] && [[ $tech_entries_added == false ]] && [[ ${#new_tech_entries[@]} -gt 0 ]]; then
|
||||
printf '%s\n' "${new_tech_entries[@]}" >> "$temp_file"
|
||||
fi
|
||||
|
||||
# Move temp file to target atomically
|
||||
if ! mv "$temp_file" "$target_file"; then
|
||||
log_error "Failed to update target file"
|
||||
rm -f "$temp_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
#==============================================================================
|
||||
# Main Agent File Update Function
|
||||
#==============================================================================
|
||||
|
||||
update_agent_file() {
|
||||
local target_file="$1"
|
||||
local agent_name="$2"
|
||||
|
||||
if [[ -z "$target_file" ]] || [[ -z "$agent_name" ]]; then
|
||||
log_error "update_agent_file requires target_file and agent_name parameters"
|
||||
return 1
|
||||
fi
|
||||
|
||||
log_info "Updating $agent_name context file: $target_file"
|
||||
|
||||
local project_name
|
||||
project_name=$(basename "$REPO_ROOT")
|
||||
local current_date
|
||||
current_date=$(date +%Y-%m-%d)
|
||||
|
||||
# Create directory if it doesn't exist
|
||||
local target_dir
|
||||
target_dir=$(dirname "$target_file")
|
||||
if [[ ! -d "$target_dir" ]]; then
|
||||
if ! mkdir -p "$target_dir"; then
|
||||
log_error "Failed to create directory: $target_dir"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ ! -f "$target_file" ]]; then
|
||||
# Create new file from template
|
||||
local temp_file
|
||||
temp_file=$(mktemp) || {
|
||||
log_error "Failed to create temporary file"
|
||||
return 1
|
||||
}
|
||||
|
||||
if create_new_agent_file "$target_file" "$temp_file" "$project_name" "$current_date"; then
|
||||
if mv "$temp_file" "$target_file"; then
|
||||
log_success "Created new $agent_name context file"
|
||||
else
|
||||
log_error "Failed to move temporary file to $target_file"
|
||||
rm -f "$temp_file"
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
log_error "Failed to create new agent file"
|
||||
rm -f "$temp_file"
|
||||
return 1
|
||||
fi
|
||||
else
|
||||
# Update existing file
|
||||
if [[ ! -r "$target_file" ]]; then
|
||||
log_error "Cannot read existing file: $target_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ ! -w "$target_file" ]]; then
|
||||
log_error "Cannot write to existing file: $target_file"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if update_existing_agent_file "$target_file" "$current_date"; then
|
||||
log_success "Updated existing $agent_name context file"
|
||||
else
|
||||
log_error "Failed to update existing agent file"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# Agent Selection and Processing
|
||||
#==============================================================================
|
||||
|
||||
update_specific_agent() {
|
||||
local agent_type="$1"
|
||||
|
||||
case "$agent_type" in
|
||||
claude)
|
||||
update_agent_file "$CLAUDE_FILE" "Claude Code"
|
||||
;;
|
||||
gemini)
|
||||
update_agent_file "$GEMINI_FILE" "Gemini CLI"
|
||||
;;
|
||||
copilot)
|
||||
update_agent_file "$COPILOT_FILE" "GitHub Copilot"
|
||||
;;
|
||||
cursor)
|
||||
update_agent_file "$CURSOR_FILE" "Cursor IDE"
|
||||
;;
|
||||
qwen)
|
||||
update_agent_file "$QWEN_FILE" "Qwen Code"
|
||||
;;
|
||||
opencode)
|
||||
update_agent_file "$AGENTS_FILE" "opencode"
|
||||
;;
|
||||
codex)
|
||||
update_agent_file "$AGENTS_FILE" "Codex CLI"
|
||||
;;
|
||||
windsurf)
|
||||
update_agent_file "$WINDSURF_FILE" "Windsurf"
|
||||
;;
|
||||
kilocode)
|
||||
update_agent_file "$KILOCODE_FILE" "Kilo Code"
|
||||
;;
|
||||
auggie)
|
||||
update_agent_file "$AUGGIE_FILE" "Auggie CLI"
|
||||
;;
|
||||
roo)
|
||||
update_agent_file "$ROO_FILE" "Roo Code"
|
||||
;;
|
||||
*)
|
||||
log_error "Unknown agent type '$agent_type'"
|
||||
log_error "Expected: claude|gemini|copilot|cursor|qwen|opencode|codex|windsurf|kilocode|auggie|roo"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
update_all_existing_agents() {
|
||||
local found_agent=false
|
||||
|
||||
# Check each possible agent file and update if it exists
|
||||
if [[ -f "$CLAUDE_FILE" ]]; then
|
||||
update_agent_file "$CLAUDE_FILE" "Claude Code"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
if [[ -f "$GEMINI_FILE" ]]; then
|
||||
update_agent_file "$GEMINI_FILE" "Gemini CLI"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
if [[ -f "$COPILOT_FILE" ]]; then
|
||||
update_agent_file "$COPILOT_FILE" "GitHub Copilot"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
if [[ -f "$CURSOR_FILE" ]]; then
|
||||
update_agent_file "$CURSOR_FILE" "Cursor IDE"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
if [[ -f "$QWEN_FILE" ]]; then
|
||||
update_agent_file "$QWEN_FILE" "Qwen Code"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
if [[ -f "$AGENTS_FILE" ]]; then
|
||||
update_agent_file "$AGENTS_FILE" "Codex/opencode"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
if [[ -f "$WINDSURF_FILE" ]]; then
|
||||
update_agent_file "$WINDSURF_FILE" "Windsurf"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
if [[ -f "$KILOCODE_FILE" ]]; then
|
||||
update_agent_file "$KILOCODE_FILE" "Kilo Code"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
if [[ -f "$AUGGIE_FILE" ]]; then
|
||||
update_agent_file "$AUGGIE_FILE" "Auggie CLI"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
if [[ -f "$ROO_FILE" ]]; then
|
||||
update_agent_file "$ROO_FILE" "Roo Code"
|
||||
found_agent=true
|
||||
fi
|
||||
|
||||
# If no agent files exist, create a default Claude file
|
||||
if [[ "$found_agent" == false ]]; then
|
||||
log_info "No existing agent files found, creating default Claude file..."
|
||||
update_agent_file "$CLAUDE_FILE" "Claude Code"
|
||||
fi
|
||||
}
|
||||
print_summary() {
|
||||
echo
|
||||
log_info "Summary of changes:"
|
||||
|
||||
if [[ -n "$NEW_LANG" ]]; then
|
||||
echo " - Added language: $NEW_LANG"
|
||||
fi
|
||||
|
||||
if [[ -n "$NEW_FRAMEWORK" ]]; then
|
||||
echo " - Added framework: $NEW_FRAMEWORK"
|
||||
fi
|
||||
|
||||
if [[ -n "$NEW_DB" ]] && [[ "$NEW_DB" != "N/A" ]]; then
|
||||
echo " - Added database: $NEW_DB"
|
||||
fi
|
||||
|
||||
echo
|
||||
log_info "Usage: $0 [claude|gemini|copilot|cursor|qwen|opencode|codex|windsurf|kilocode|auggie|roo]"
|
||||
}
|
||||
|
||||
#==============================================================================
|
||||
# Main Execution
|
||||
#==============================================================================
|
||||
|
||||
main() {
|
||||
# Validate environment before proceeding
|
||||
validate_environment
|
||||
|
||||
log_info "=== Updating agent context files for feature $CURRENT_BRANCH ==="
|
||||
|
||||
# Parse the plan file to extract project information
|
||||
if ! parse_plan_data "$NEW_PLAN"; then
|
||||
log_error "Failed to parse plan data"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Process based on agent type argument
|
||||
local success=true
|
||||
|
||||
if [[ -z "$AGENT_TYPE" ]]; then
|
||||
# No specific agent provided - update all existing agent files
|
||||
log_info "No agent specified, updating all existing agent files..."
|
||||
if ! update_all_existing_agents; then
|
||||
success=false
|
||||
fi
|
||||
else
|
||||
# Specific agent provided - update only that agent
|
||||
log_info "Updating specific agent: $AGENT_TYPE"
|
||||
if ! update_specific_agent "$AGENT_TYPE"; then
|
||||
success=false
|
||||
fi
|
||||
fi
|
||||
|
||||
# Print summary
|
||||
print_summary
|
||||
|
||||
if [[ "$success" == true ]]; then
|
||||
log_success "Agent context update completed successfully"
|
||||
exit 0
|
||||
else
|
||||
log_error "Agent context update completed with errors"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Execute main function if script is run directly
|
||||
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
|
||||
main "$@"
|
||||
fi
|
||||
|
||||
@@ -145,7 +145,8 @@ ios/ or android/
|
||||
- Quickstart test = story validation steps
|
||||
|
||||
5. **Update agent file incrementally** (O(1) operation):
|
||||
- Run `.specify/scripts/bash/update-agent-context.sh gemini` for your AI assistant
|
||||
- Run `.specify/scripts/bash/update-agent-context.sh codex`
|
||||
**IMPORTANT**: Execute it exactly as specified above. Do not add or remove any arguments.
|
||||
- If exists: Add only NEW tech from current plan
|
||||
- Preserve manual additions between markers
|
||||
- Update recent changes (keep last 3)
|
||||
|
||||
10
apps/backend/.editorconfig
Normal file
10
apps/backend/.editorconfig
Normal file
@@ -0,0 +1,10 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
end_of_line = lf
|
||||
max_line_length = null
|
||||
17
apps/backend/.env.example
Normal file
17
apps/backend/.env.example
Normal file
@@ -0,0 +1,17 @@
|
||||
# Database connection string
|
||||
DATABASE_URI=mongodb://127.0.0.1/your-database-name
|
||||
|
||||
# Or use a PG connection string
|
||||
#DATABASE_URI=postgresql://127.0.0.1:5432/your-database-name
|
||||
|
||||
# Used to encrypt JWT tokens
|
||||
PAYLOAD_SECRET=YOUR_SECRET_HERE
|
||||
|
||||
# Used to configure CORS, format links and more. No trailing slash
|
||||
NEXT_PUBLIC_SERVER_URL=http://localhost:3000
|
||||
|
||||
# Secret used to authenticate cron jobs
|
||||
CRON_SECRET=YOUR_CRON_SECRET_HERE
|
||||
|
||||
# Used to validate preview requests
|
||||
PREVIEW_SECRET=YOUR_SECRET_HERE
|
||||
21
apps/backend/.gitignore
vendored
Normal file
21
apps/backend/.gitignore
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
build
|
||||
dist / media
|
||||
node_modules
|
||||
.DS_Store
|
||||
.env
|
||||
.next
|
||||
.vercel
|
||||
|
||||
# Payload default media upload directory
|
||||
public/media/
|
||||
|
||||
public/robots.txt
|
||||
public/sitemap*.xml
|
||||
|
||||
|
||||
# Playwright
|
||||
node_modules/
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/blob-report/
|
||||
/playwright/.cache/
|
||||
2
apps/backend/.npmrc
Normal file
2
apps/backend/.npmrc
Normal file
@@ -0,0 +1,2 @@
|
||||
legacy-peer-deps=true
|
||||
enable-pre-post-scripts=true
|
||||
14
apps/backend/.prettierignore
Normal file
14
apps/backend/.prettierignore
Normal file
@@ -0,0 +1,14 @@
|
||||
**/payload-types.ts
|
||||
.tmp
|
||||
**/.git
|
||||
**/.hg
|
||||
**/.pnp.*
|
||||
**/.svn
|
||||
**/.yarn/**
|
||||
**/build
|
||||
**/dist/**
|
||||
**/node_modules
|
||||
**/temp
|
||||
**/docs/**
|
||||
tsconfig.json
|
||||
|
||||
6
apps/backend/.prettierrc.json
Normal file
6
apps/backend/.prettierrc.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "all",
|
||||
"printWidth": 100,
|
||||
"semi": false
|
||||
}
|
||||
71
apps/backend/Dockerfile
Normal file
71
apps/backend/Dockerfile
Normal file
@@ -0,0 +1,71 @@
|
||||
# To use this Dockerfile, you have to set `output: 'standalone'` in your next.config.js file.
|
||||
# From https://github.com/vercel/next.js/blob/canary/examples/with-docker/Dockerfile
|
||||
|
||||
FROM node:22.17.0-alpine AS base
|
||||
|
||||
# Install dependencies only when needed
|
||||
FROM base AS deps
|
||||
# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
|
||||
RUN apk add --no-cache libc6-compat
|
||||
WORKDIR /app
|
||||
|
||||
# Install dependencies based on the preferred package manager
|
||||
COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* ./
|
||||
RUN \
|
||||
if [ -f yarn.lock ]; then yarn --frozen-lockfile; \
|
||||
elif [ -f package-lock.json ]; then npm ci; \
|
||||
elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm i --frozen-lockfile; \
|
||||
else echo "Lockfile not found." && exit 1; \
|
||||
fi
|
||||
|
||||
|
||||
# Rebuild the source code only when needed
|
||||
FROM base AS builder
|
||||
WORKDIR /app
|
||||
COPY --from=deps /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
|
||||
# Next.js collects completely anonymous telemetry data about general usage.
|
||||
# Learn more here: https://nextjs.org/telemetry
|
||||
# Uncomment the following line in case you want to disable telemetry during the build.
|
||||
# ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
RUN \
|
||||
if [ -f yarn.lock ]; then yarn run build; \
|
||||
elif [ -f package-lock.json ]; then npm run build; \
|
||||
elif [ -f pnpm-lock.yaml ]; then corepack enable pnpm && pnpm run build; \
|
||||
else echo "Lockfile not found." && exit 1; \
|
||||
fi
|
||||
|
||||
# Production image, copy all the files and run next
|
||||
FROM base AS runner
|
||||
WORKDIR /app
|
||||
|
||||
ENV NODE_ENV production
|
||||
# Uncomment the following line in case you want to disable telemetry during runtime.
|
||||
# ENV NEXT_TELEMETRY_DISABLED 1
|
||||
|
||||
RUN addgroup --system --gid 1001 nodejs
|
||||
RUN adduser --system --uid 1001 nextjs
|
||||
|
||||
# Remove this line if you do not have this folder
|
||||
COPY --from=builder /app/public ./public
|
||||
|
||||
# Set the correct permission for prerender cache
|
||||
RUN mkdir .next
|
||||
RUN chown nextjs:nodejs .next
|
||||
|
||||
# Automatically leverage output traces to reduce image size
|
||||
# https://nextjs.org/docs/advanced-features/output-file-tracing
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
|
||||
COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
|
||||
|
||||
USER nextjs
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
ENV PORT 3000
|
||||
|
||||
# server.js is created by next build from the standalone output
|
||||
# https://nextjs.org/docs/pages/api-reference/next-config-js/output
|
||||
CMD HOSTNAME="0.0.0.0" node server.js
|
||||
321
apps/backend/README.md
Normal file
321
apps/backend/README.md
Normal file
@@ -0,0 +1,321 @@
|
||||
# Payload Website Template
|
||||
|
||||
This is the official [Payload Website Template](https://github.com/payloadcms/payload/blob/main/templates/website). Use it to power websites, blogs, or portfolios from small to enterprise. This repo includes a fully-working backend, enterprise-grade admin panel, and a beautifully designed, production-ready website.
|
||||
|
||||
This template is right for you if you are working on:
|
||||
|
||||
- A personal or enterprise-grade website, blog, or portfolio
|
||||
- A content publishing platform with a fully featured publication workflow
|
||||
- Exploring the capabilities of Payload
|
||||
|
||||
Core features:
|
||||
|
||||
- [Pre-configured Payload Config](#how-it-works)
|
||||
- [Authentication](#users-authentication)
|
||||
- [Access Control](#access-control)
|
||||
- [Layout Builder](#layout-builder)
|
||||
- [Draft Preview](#draft-preview)
|
||||
- [Live Preview](#live-preview)
|
||||
- [On-demand Revalidation](#on-demand-revalidation)
|
||||
- [SEO](#seo)
|
||||
- [Search](#search)
|
||||
- [Redirects](#redirects)
|
||||
- [Jobs and Scheduled Publishing](#jobs-and-scheduled-publish)
|
||||
- [Website](#website)
|
||||
|
||||
## Quick Start
|
||||
|
||||
To spin up this example locally, follow these steps:
|
||||
|
||||
### Clone
|
||||
|
||||
If you have not done so already, you need to have standalone copy of this repo on your machine. If you've already cloned this repo, skip to [Development](#development).
|
||||
|
||||
#### Method 1 (recommended)
|
||||
|
||||
Go to Payload Cloud and [clone this template](https://payloadcms.com/new/clone/website). This will create a new repository on your GitHub account with this template's code which you can then clone to your own machine.
|
||||
|
||||
#### Method 2
|
||||
|
||||
Use the `create-payload-app` CLI to clone this template directly to your machine:
|
||||
|
||||
```bash
|
||||
pnpx create-payload-app my-project -t website
|
||||
```
|
||||
|
||||
#### Method 3
|
||||
|
||||
Use the `git` CLI to clone this template directly to your machine:
|
||||
|
||||
```bash
|
||||
git clone -n --depth=1 --filter=tree:0 https://github.com/payloadcms/payload my-project && cd my-project && git sparse-checkout set --no-cone templates/website && git checkout && rm -rf .git && git init && git add . && git mv -f templates/website/{.,}* . && git add . && git commit -m "Initial commit"
|
||||
```
|
||||
|
||||
### Development
|
||||
|
||||
1. First [clone the repo](#clone) if you have not done so already
|
||||
1. `cd my-project && cp .env.example .env` to copy the example environment variables
|
||||
1. `pnpm install && pnpm dev` to install dependencies and start the dev server
|
||||
1. open `http://localhost:3000` to open the app in your browser
|
||||
|
||||
That's it! Changes made in `./src` will be reflected in your app. Follow the on-screen instructions to login and create your first admin user. Then check out [Production](#production) once you're ready to build and serve your app, and [Deployment](#deployment) when you're ready to go live.
|
||||
|
||||
## How it works
|
||||
|
||||
The Payload config is tailored specifically to the needs of most websites. It is pre-configured in the following ways:
|
||||
|
||||
### Collections
|
||||
|
||||
See the [Collections](https://payloadcms.com/docs/configuration/collections) docs for details on how to extend this functionality.
|
||||
|
||||
- #### Users (Authentication)
|
||||
|
||||
Users are auth-enabled collections that have access to the admin panel and unpublished content. See [Access Control](#access-control) for more details.
|
||||
|
||||
For additional help, see the official [Auth Example](https://github.com/payloadcms/payload/tree/main/examples/auth) or the [Authentication](https://payloadcms.com/docs/authentication/overview#authentication-overview) docs.
|
||||
|
||||
- #### Posts
|
||||
|
||||
Posts are used to generate blog posts, news articles, or any other type of content that is published over time. All posts are layout builder enabled so you can generate unique layouts for each post using layout-building blocks, see [Layout Builder](#layout-builder) for more details. Posts are also draft-enabled so you can preview them before publishing them to your website, see [Draft Preview](#draft-preview) for more details.
|
||||
|
||||
- #### Pages
|
||||
|
||||
All pages are layout builder enabled so you can generate unique layouts for each page using layout-building blocks, see [Layout Builder](#layout-builder) for more details. Pages are also draft-enabled so you can preview them before publishing them to your website, see [Draft Preview](#draft-preview) for more details.
|
||||
|
||||
- #### Media
|
||||
|
||||
This is the uploads enabled collection used by pages, posts, and projects to contain media like images, videos, downloads, and other assets. It features pre-configured sizes, focal point and manual resizing to help you manage your pictures.
|
||||
|
||||
- #### Categories
|
||||
|
||||
A taxonomy used to group posts together. Categories can be nested inside of one another, for example "News > Technology". See the official [Payload Nested Docs Plugin](https://payloadcms.com/docs/plugins/nested-docs) for more details.
|
||||
|
||||
### Globals
|
||||
|
||||
See the [Globals](https://payloadcms.com/docs/configuration/globals) docs for details on how to extend this functionality.
|
||||
|
||||
- `Header`
|
||||
|
||||
The data required by the header on your front-end like nav links.
|
||||
|
||||
- `Footer`
|
||||
|
||||
Same as above but for the footer of your site.
|
||||
|
||||
## Access control
|
||||
|
||||
Basic access control is setup to limit access to various content based based on publishing status.
|
||||
|
||||
- `users`: Users can access the admin panel and create or edit content.
|
||||
- `posts`: Everyone can access published posts, but only users can create, update, or delete them.
|
||||
- `pages`: Everyone can access published pages, but only users can create, update, or delete them.
|
||||
|
||||
For more details on how to extend this functionality, see the [Payload Access Control](https://payloadcms.com/docs/access-control/overview#access-control) docs.
|
||||
|
||||
## Layout Builder
|
||||
|
||||
Create unique page layouts for any type of content using a powerful layout builder. This template comes pre-configured with the following layout building blocks:
|
||||
|
||||
- Hero
|
||||
- Content
|
||||
- Media
|
||||
- Call To Action
|
||||
- Archive
|
||||
|
||||
Each block is fully designed and built into the front-end website that comes with this template. See [Website](#website) for more details.
|
||||
|
||||
## Lexical editor
|
||||
|
||||
A deep editorial experience that allows complete freedom to focus just on writing content without breaking out of the flow with support for Payload blocks, media, links and other features provided out of the box. See [Lexical](https://payloadcms.com/docs/rich-text/overview) docs.
|
||||
|
||||
## Draft Preview
|
||||
|
||||
All posts and pages are draft-enabled so you can preview them before publishing them to your website. To do this, these collections use [Versions](https://payloadcms.com/docs/configuration/collections#versions) with `drafts` set to `true`. This means that when you create a new post, project, or page, it will be saved as a draft and will not be visible on your website until you publish it. This also means that you can preview your draft before publishing it to your website. To do this, we automatically format a custom URL which redirects to your front-end to securely fetch the draft version of your content.
|
||||
|
||||
Since the front-end of this template is statically generated, this also means that pages, posts, and projects will need to be regenerated as changes are made to published documents. To do this, we use an `afterChange` hook to regenerate the front-end when a document has changed and its `_status` is `published`.
|
||||
|
||||
For more details on how to extend this functionality, see the official [Draft Preview Example](https://github.com/payloadcms/payload/tree/examples/draft-preview).
|
||||
|
||||
## Live preview
|
||||
|
||||
In addition to draft previews you can also enable live preview to view your end resulting page as you're editing content with full support for SSR rendering. See [Live preview docs](https://payloadcms.com/docs/live-preview/overview) for more details.
|
||||
|
||||
## On-demand Revalidation
|
||||
|
||||
We've added hooks to collections and globals so that all of your pages, posts, footer, or header changes will automatically be updated in the frontend via on-demand revalidation supported by Nextjs.
|
||||
|
||||
> Note: if an image has been changed, for example it's been cropped, you will need to republish the page it's used on in order to be able to revalidate the Nextjs image cache.
|
||||
|
||||
## SEO
|
||||
|
||||
This template comes pre-configured with the official [Payload SEO Plugin](https://payloadcms.com/docs/plugins/seo) for complete SEO control from the admin panel. All SEO data is fully integrated into the front-end website that comes with this template. See [Website](#website) for more details.
|
||||
|
||||
## Search
|
||||
|
||||
This template also pre-configured with the official [Payload Search Plugin](https://payloadcms.com/docs/plugins/search) to showcase how SSR search features can easily be implemented into Next.js with Payload. See [Website](#website) for more details.
|
||||
|
||||
## Redirects
|
||||
|
||||
If you are migrating an existing site or moving content to a new URL, you can use the `redirects` collection to create a proper redirect from old URLs to new ones. This will ensure that proper request status codes are returned to search engines and that your users are not left with a broken link. This template comes pre-configured with the official [Payload Redirects Plugin](https://payloadcms.com/docs/plugins/redirects) for complete redirect control from the admin panel. All redirects are fully integrated into the front-end website that comes with this template. See [Website](#website) for more details.
|
||||
|
||||
## Jobs and Scheduled Publish
|
||||
|
||||
We have configured [Scheduled Publish](https://payloadcms.com/docs/versions/drafts#scheduled-publish) which uses the [jobs queue](https://payloadcms.com/docs/jobs-queue/jobs) in order to publish or unpublish your content on a scheduled time. The tasks are run on a cron schedule and can also be run as a separate instance if needed.
|
||||
|
||||
> Note: When deployed on Vercel, depending on the plan tier, you may be limited to daily cron only.
|
||||
|
||||
## Website
|
||||
|
||||
This template includes a beautifully designed, production-ready front-end built with the [Next.js App Router](https://nextjs.org), served right alongside your Payload app in a instance. This makes it so that you can deploy both your backend and website where you need it.
|
||||
|
||||
Core features:
|
||||
|
||||
- [Next.js App Router](https://nextjs.org)
|
||||
- [TypeScript](https://www.typescriptlang.org)
|
||||
- [React Hook Form](https://react-hook-form.com)
|
||||
- [Payload Admin Bar](https://github.com/payloadcms/payload/tree/main/packages/admin-bar)
|
||||
- [TailwindCSS styling](https://tailwindcss.com/)
|
||||
- [shadcn/ui components](https://ui.shadcn.com/)
|
||||
- User Accounts and Authentication
|
||||
- Fully featured blog
|
||||
- Publication workflow
|
||||
- Dark mode
|
||||
- Pre-made layout building blocks
|
||||
- SEO
|
||||
- Search
|
||||
- Redirects
|
||||
- Live preview
|
||||
|
||||
### Cache
|
||||
|
||||
Although Next.js includes a robust set of caching strategies out of the box, Payload Cloud proxies and caches all files through Cloudflare using the [Official Cloud Plugin](https://www.npmjs.com/package/@payloadcms/payload-cloud). This means that Next.js caching is not needed and is disabled by default. If you are hosting your app outside of Payload Cloud, you can easily reenable the Next.js caching mechanisms by removing the `no-store` directive from all fetch requests in `./src/app/_api` and then removing all instances of `export const dynamic = 'force-dynamic'` from pages files, such as `./src/app/(pages)/[slug]/page.tsx`. For more details, see the official [Next.js Caching Docs](https://nextjs.org/docs/app/building-your-application/caching).
|
||||
|
||||
## Development
|
||||
|
||||
To spin up this example locally, follow the [Quick Start](#quick-start). Then [Seed](#seed) the database with a few pages, posts, and projects.
|
||||
|
||||
### Working with Postgres
|
||||
|
||||
Postgres and other SQL-based databases follow a strict schema for managing your data. In comparison to our MongoDB adapter, this means that there's a few extra steps to working with Postgres.
|
||||
|
||||
Note that often times when making big schema changes you can run the risk of losing data if you're not manually migrating it.
|
||||
|
||||
#### Local development
|
||||
|
||||
Ideally we recommend running a local copy of your database so that schema updates are as fast as possible. By default the Postgres adapter has `push: true` for development environments. This will let you add, modify and remove fields and collections without needing to run any data migrations.
|
||||
|
||||
If your database is pointed to production you will want to set `push: false` otherwise you will risk losing data or having your migrations out of sync.
|
||||
|
||||
#### Migrations
|
||||
|
||||
[Migrations](https://payloadcms.com/docs/database/migrations) are essentially SQL code versions that keeps track of your schema. When deploy with Postgres you will need to make sure you create and then run your migrations.
|
||||
|
||||
Locally create a migration
|
||||
|
||||
```bash
|
||||
pnpm payload migrate:create
|
||||
```
|
||||
|
||||
This creates the migration files you will need to push alongside with your new configuration.
|
||||
|
||||
On the server after building and before running `pnpm start` you will want to run your migrations
|
||||
|
||||
```bash
|
||||
pnpm payload migrate
|
||||
```
|
||||
|
||||
This command will check for any migrations that have not yet been run and try to run them and it will keep a record of migrations that have been run in the database.
|
||||
|
||||
### Docker
|
||||
|
||||
Alternatively, you can use [Docker](https://www.docker.com) to spin up this template locally. To do so, follow these steps:
|
||||
|
||||
1. Follow [steps 1 and 2 from above](#development), the docker-compose file will automatically use the `.env` file in your project root
|
||||
1. Next run `docker-compose up`
|
||||
1. Follow [steps 4 and 5 from above](#development) to login and create your first admin user
|
||||
|
||||
That's it! The Docker instance will help you get up and running quickly while also standardizing the development environment across your teams.
|
||||
|
||||
### Seed
|
||||
|
||||
To seed the database with a few pages, posts, and projects you can click the 'seed database' link from the admin panel.
|
||||
|
||||
The seed script will also create a demo user for demonstration purposes only:
|
||||
|
||||
- Demo Author
|
||||
- Email: `demo-author@payloadcms.com`
|
||||
- Password: `password`
|
||||
|
||||
> NOTICE: seeding the database is destructive because it drops your current database to populate a fresh one from the seed template. Only run this command if you are starting a new project or can afford to lose your current data.
|
||||
|
||||
## Production
|
||||
|
||||
To run Payload in production, you need to build and start the Admin panel. To do so, follow these steps:
|
||||
|
||||
1. Invoke the `next build` script by running `pnpm build` or `npm run build` in your project root. This creates a `.next` directory with a production-ready admin bundle.
|
||||
1. Finally run `pnpm start` or `npm run start` to run Node in production and serve Payload from the `.build` directory.
|
||||
1. When you're ready to go live, see Deployment below for more details.
|
||||
|
||||
### Deploying to Payload Cloud
|
||||
|
||||
The easiest way to deploy your project is to use [Payload Cloud](https://payloadcms.com/new/import), a one-click hosting solution to deploy production-ready instances of your Payload apps directly from your GitHub repo.
|
||||
|
||||
### Deploying to Vercel
|
||||
|
||||
This template can also be deployed to Vercel for free. You can get started by choosing the Vercel DB adapter during the setup of the template or by manually installing and configuring it:
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/db-vercel-postgres
|
||||
```
|
||||
|
||||
```ts
|
||||
// payload.config.ts
|
||||
import { vercelPostgresAdapter } from '@payloadcms/db-vercel-postgres'
|
||||
|
||||
export default buildConfig({
|
||||
// ...
|
||||
db: vercelPostgresAdapter({
|
||||
pool: {
|
||||
connectionString: process.env.POSTGRES_URL || '',
|
||||
},
|
||||
}),
|
||||
// ...
|
||||
```
|
||||
|
||||
We also support Vercel's blob storage:
|
||||
|
||||
```bash
|
||||
pnpm add @payloadcms/storage-vercel-blob
|
||||
```
|
||||
|
||||
```ts
|
||||
// payload.config.ts
|
||||
import { vercelBlobStorage } from '@payloadcms/storage-vercel-blob'
|
||||
|
||||
export default buildConfig({
|
||||
// ...
|
||||
plugins: [
|
||||
vercelBlobStorage({
|
||||
collections: {
|
||||
[Media.slug]: true,
|
||||
},
|
||||
token: process.env.BLOB_READ_WRITE_TOKEN || '',
|
||||
}),
|
||||
],
|
||||
// ...
|
||||
```
|
||||
|
||||
There is also a simplified [one click deploy](https://github.com/payloadcms/payload/tree/templates/with-vercel-postgres) to Vercel should you need it.
|
||||
|
||||
### Self-hosting
|
||||
|
||||
Before deploying your app, you need to:
|
||||
|
||||
1. Ensure your app builds and serves in production. See [Production](#production) for more details.
|
||||
2. You can then deploy Payload as you would any other Node.js or Next.js application either directly on a VPS, DigitalOcean's Apps Platform, via Coolify or more. More guides coming soon.
|
||||
|
||||
You can also deploy your app manually, check out the [deployment documentation](https://payloadcms.com/docs/production/deployment) for full details.
|
||||
|
||||
## Questions
|
||||
|
||||
If you have any issues or questions, reach out to us on [Discord](https://discord.com/invite/payload) or start a [GitHub discussion](https://github.com/payloadcms/payload/discussions).
|
||||
17
apps/backend/components.json
Normal file
17
apps/backend/components.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"$schema": "https://ui.shadcn.com/schema.json",
|
||||
"style": "default",
|
||||
"rsc": true,
|
||||
"tsx": true,
|
||||
"tailwind": {
|
||||
"config": "tailwind.config.js",
|
||||
"css": "src/app/(frontend)/globals.css",
|
||||
"baseColor": "slate",
|
||||
"cssVariables": true,
|
||||
"prefix": ""
|
||||
},
|
||||
"aliases": {
|
||||
"components": "@/components",
|
||||
"utils": "@/utilities/ui"
|
||||
}
|
||||
}
|
||||
31
apps/backend/docker-compose.yml
Normal file
31
apps/backend/docker-compose.yml
Normal file
@@ -0,0 +1,31 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
payload:
|
||||
image: node:18-alpine
|
||||
ports:
|
||||
- '3000:3000'
|
||||
volumes:
|
||||
- .:/home/node/app
|
||||
- node_modules:/home/node/app/node_modules
|
||||
working_dir: /home/node/app/
|
||||
command: sh -c "yarn install && yarn dev"
|
||||
depends_on:
|
||||
- mongo
|
||||
env_file:
|
||||
- .env
|
||||
|
||||
mongo:
|
||||
image: mongo:latest
|
||||
ports:
|
||||
- '27017:27017'
|
||||
command:
|
||||
- --storageEngine=wiredTiger
|
||||
volumes:
|
||||
- data:/data/db
|
||||
logging:
|
||||
driver: none
|
||||
|
||||
volumes:
|
||||
data:
|
||||
node_modules:
|
||||
38
apps/backend/eslint.config.mjs
Normal file
38
apps/backend/eslint.config.mjs
Normal file
@@ -0,0 +1,38 @@
|
||||
import { dirname } from 'path'
|
||||
import { fileURLToPath } from 'url'
|
||||
import { FlatCompat } from '@eslint/eslintrc'
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url)
|
||||
const __dirname = dirname(__filename)
|
||||
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
})
|
||||
|
||||
const eslintConfig = [
|
||||
...compat.extends('next/core-web-vitals', 'next/typescript'),
|
||||
{
|
||||
rules: {
|
||||
'@typescript-eslint/ban-ts-comment': 'warn',
|
||||
'@typescript-eslint/no-empty-object-type': 'warn',
|
||||
'@typescript-eslint/no-explicit-any': 'warn',
|
||||
'@typescript-eslint/no-unused-vars': [
|
||||
'warn',
|
||||
{
|
||||
vars: 'all',
|
||||
args: 'after-used',
|
||||
ignoreRestSiblings: false,
|
||||
argsIgnorePattern: '^_',
|
||||
varsIgnorePattern: '^_',
|
||||
destructuredArrayIgnorePattern: '^_',
|
||||
caughtErrorsIgnorePattern: '^(_|ignore)',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
ignores: ['.next/'],
|
||||
},
|
||||
]
|
||||
|
||||
export default eslintConfig
|
||||
5
apps/backend/next-env.d.ts
vendored
Normal file
5
apps/backend/next-env.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/app/api-reference/config/typescript for more information.
|
||||
20
apps/backend/next-sitemap.config.cjs
Normal file
20
apps/backend/next-sitemap.config.cjs
Normal file
@@ -0,0 +1,20 @@
|
||||
const SITE_URL =
|
||||
process.env.NEXT_PUBLIC_SERVER_URL ||
|
||||
process.env.VERCEL_PROJECT_PRODUCTION_URL ||
|
||||
'https://example.com'
|
||||
|
||||
/** @type {import('next-sitemap').IConfig} */
|
||||
module.exports = {
|
||||
siteUrl: SITE_URL,
|
||||
generateRobotsTxt: true,
|
||||
exclude: ['/posts-sitemap.xml', '/pages-sitemap.xml', '/*', '/posts/*'],
|
||||
robotsTxtOptions: {
|
||||
policies: [
|
||||
{
|
||||
userAgent: '*',
|
||||
disallow: '/admin/*',
|
||||
},
|
||||
],
|
||||
additionalSitemaps: [`${SITE_URL}/pages-sitemap.xml`, `${SITE_URL}/posts-sitemap.xml`],
|
||||
},
|
||||
}
|
||||
36
apps/backend/next.config.js
Normal file
36
apps/backend/next.config.js
Normal file
@@ -0,0 +1,36 @@
|
||||
import { withPayload } from '@payloadcms/next/withPayload'
|
||||
|
||||
import redirects from './redirects.js'
|
||||
|
||||
const NEXT_PUBLIC_SERVER_URL = process.env.VERCEL_PROJECT_PRODUCTION_URL
|
||||
? `https://${process.env.VERCEL_PROJECT_PRODUCTION_URL}`
|
||||
: undefined || process.env.__NEXT_PRIVATE_ORIGIN || 'http://localhost:3000'
|
||||
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
images: {
|
||||
remotePatterns: [
|
||||
...[NEXT_PUBLIC_SERVER_URL /* 'https://example.com' */].map((item) => {
|
||||
const url = new URL(item)
|
||||
|
||||
return {
|
||||
hostname: url.hostname,
|
||||
protocol: url.protocol.replace(':', ''),
|
||||
}
|
||||
}),
|
||||
],
|
||||
},
|
||||
webpack: (webpackConfig) => {
|
||||
webpackConfig.resolve.extensionAlias = {
|
||||
'.cjs': ['.cts', '.cjs'],
|
||||
'.js': ['.ts', '.tsx', '.js', '.jsx'],
|
||||
'.mjs': ['.mts', '.mjs'],
|
||||
}
|
||||
|
||||
return webpackConfig
|
||||
},
|
||||
reactStrictMode: true,
|
||||
redirects,
|
||||
}
|
||||
|
||||
export default withPayload(nextConfig, { devBundleServerPackages: false })
|
||||
94
apps/backend/package.json
Normal file
94
apps/backend/package.json
Normal file
@@ -0,0 +1,94 @@
|
||||
{
|
||||
"name": "",
|
||||
"version": "1.0.0",
|
||||
"description": "Website template for Payload",
|
||||
"license": "MIT",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "cross-env NODE_OPTIONS=--no-deprecation next build",
|
||||
"postbuild": "next-sitemap --config next-sitemap.config.cjs",
|
||||
"dev": "cross-env NODE_OPTIONS=--no-deprecation next dev",
|
||||
"dev:prod": "cross-env NODE_OPTIONS=--no-deprecation rm -rf .next && pnpm build && pnpm start",
|
||||
"generate:importmap": "cross-env NODE_OPTIONS=--no-deprecation payload generate:importmap",
|
||||
"generate:types": "cross-env NODE_OPTIONS=--no-deprecation payload generate:types",
|
||||
"ii": "cross-env NODE_OPTIONS=--no-deprecation pnpm --ignore-workspace install",
|
||||
"lint": "cross-env NODE_OPTIONS=--no-deprecation next lint",
|
||||
"lint:fix": "cross-env NODE_OPTIONS=--no-deprecation next lint --fix",
|
||||
"payload": "cross-env NODE_OPTIONS=--no-deprecation payload",
|
||||
"reinstall": "cross-env NODE_OPTIONS=--no-deprecation rm -rf node_modules && rm pnpm-lock.yaml && pnpm --ignore-workspace install",
|
||||
"start": "cross-env NODE_OPTIONS=--no-deprecation next start",
|
||||
"test": "pnpm run test:int && pnpm run test:e2e",
|
||||
"test:e2e": "cross-env NODE_OPTIONS=\"--no-deprecation --no-experimental-strip-types\" pnpm exec playwright test --config=playwright.config.ts",
|
||||
"test:int": "cross-env NODE_OPTIONS=--no-deprecation vitest run --config ./vitest.config.mts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@payloadcms/admin-bar": "3.56.0",
|
||||
"@payloadcms/db-mongodb": "3.56.0",
|
||||
"@payloadcms/live-preview-react": "3.56.0",
|
||||
"@payloadcms/next": "3.56.0",
|
||||
"@payloadcms/payload-cloud": "3.56.0",
|
||||
"@payloadcms/plugin-form-builder": "3.56.0",
|
||||
"@payloadcms/plugin-nested-docs": "3.56.0",
|
||||
"@payloadcms/plugin-redirects": "3.56.0",
|
||||
"@payloadcms/plugin-search": "3.56.0",
|
||||
"@payloadcms/plugin-seo": "3.56.0",
|
||||
"@payloadcms/richtext-lexical": "3.56.0",
|
||||
"@payloadcms/ui": "3.56.0",
|
||||
"@radix-ui/react-checkbox": "^1.0.4",
|
||||
"@radix-ui/react-label": "^2.0.2",
|
||||
"@radix-ui/react-select": "^2.0.0",
|
||||
"@radix-ui/react-slot": "^1.0.2",
|
||||
"class-variance-authority": "^0.7.0",
|
||||
"clsx": "^2.1.1",
|
||||
"cross-env": "^7.0.3",
|
||||
"dotenv": "16.4.7",
|
||||
"geist": "^1.3.0",
|
||||
"graphql": "^16.8.2",
|
||||
"lucide-react": "^0.378.0",
|
||||
"next": "15.4.4",
|
||||
"next-sitemap": "^4.2.3",
|
||||
"payload": "3.56.0",
|
||||
"prism-react-renderer": "^2.3.1",
|
||||
"react": "19.1.0",
|
||||
"react-dom": "19.1.0",
|
||||
"react-hook-form": "7.45.4",
|
||||
"sharp": "0.34.2",
|
||||
"tailwind-merge": "^2.3.0",
|
||||
"tailwindcss-animate": "^1.0.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/eslintrc": "^3.2.0",
|
||||
"@playwright/test": "1.54.1",
|
||||
"@tailwindcss/typography": "^0.5.13",
|
||||
"@testing-library/react": "16.3.0",
|
||||
"@types/escape-html": "^1.0.2",
|
||||
"@types/node": "22.5.4",
|
||||
"@types/react": "19.1.8",
|
||||
"@types/react-dom": "19.1.6",
|
||||
"@vitejs/plugin-react": "4.5.2",
|
||||
"autoprefixer": "^10.4.19",
|
||||
"copyfiles": "^2.4.1",
|
||||
"eslint": "^9.16.0",
|
||||
"eslint-config-next": "15.4.4",
|
||||
"jsdom": "26.1.0",
|
||||
"playwright": "1.54.1",
|
||||
"playwright-core": "1.54.1",
|
||||
"postcss": "^8.4.38",
|
||||
"prettier": "^3.4.2",
|
||||
"tailwindcss": "^3.4.3",
|
||||
"typescript": "5.7.3",
|
||||
"vite-tsconfig-paths": "5.1.4",
|
||||
"vitest": "3.2.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.20.2 || >=20.9.0",
|
||||
"pnpm": "^9 || ^10"
|
||||
},
|
||||
"pnpm": {
|
||||
"onlyBuiltDependencies": [
|
||||
"sharp",
|
||||
"esbuild",
|
||||
"unrs-resolver"
|
||||
]
|
||||
}
|
||||
}
|
||||
41
apps/backend/playwright.config.ts
Normal file
41
apps/backend/playwright.config.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import { defineConfig, devices } from '@playwright/test'
|
||||
|
||||
/**
|
||||
* Read environment variables from file.
|
||||
* https://github.com/motdotla/dotenv
|
||||
*/
|
||||
import 'dotenv/config'
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
export default defineConfig({
|
||||
testDir: './tests/e2e',
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
forbidOnly: !!process.env.CI,
|
||||
/* Retry on CI only */
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
/* Opt out of parallel tests on CI. */
|
||||
workers: process.env.CI ? 1 : undefined,
|
||||
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||
reporter: 'html',
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
use: {
|
||||
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||
// baseURL: 'http://localhost:3000',
|
||||
|
||||
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
|
||||
trace: 'on-first-retry',
|
||||
},
|
||||
projects: [
|
||||
{
|
||||
name: 'chromium',
|
||||
use: { ...devices['Desktop Chrome'] },
|
||||
},
|
||||
],
|
||||
webServer: {
|
||||
command: 'pnpm dev',
|
||||
reuseExistingServer: true,
|
||||
url: 'http://localhost:3000',
|
||||
},
|
||||
})
|
||||
8
apps/backend/postcss.config.js
Normal file
8
apps/backend/postcss.config.js
Normal file
@@ -0,0 +1,8 @@
|
||||
const config = {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
}
|
||||
|
||||
export default config
|
||||
BIN
apps/backend/public/favicon.ico
Normal file
BIN
apps/backend/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
23
apps/backend/public/favicon.svg
Normal file
23
apps/backend/public/favicon.svg
Normal file
@@ -0,0 +1,23 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:svgjs="http://svgjs.dev/svgjs" width="1000" height="1000"><style>
|
||||
#light-icon {
|
||||
display: inline;
|
||||
}
|
||||
#dark-icon {
|
||||
display: none;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark) {
|
||||
#light-icon {
|
||||
display: none;
|
||||
}
|
||||
#dark-icon {
|
||||
display: inline;
|
||||
}
|
||||
}
|
||||
</style><g id="light-icon"><svg xmlns="http://www.w3.org/2000/svg" version="1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:svgjs="http://svgjs.dev/svgjs" width="1000" height="1000"><g clip-path="url(#SvgjsClipPath1059)"><rect width="1000" height="1000" fill="#000000"></rect><g transform="matrix(5,0,0,5,192.5,150)"><svg xmlns="http://www.w3.org/2000/svg" version="1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:svgjs="http://svgjs.dev/svgjs" width="123" height="140"><svg width="123" height="140" viewBox="0 0 123 140" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M60.2569 118.758L18.9035 94.9917C18.4016 94.6917 18.067 94.1583 18.067 93.5583V56.825C18.067 56.1917 18.7696 55.7917 19.3049 56.0917L67.3164 83.6917C67.9855 84.0917 68.822 83.5917 68.822 82.825V64.925C68.822 64.225 68.4539 63.5583 67.8182 63.1917L10.0707 29.9917C9.56883 29.6917 8.89968 29.6917 8.39782 29.9917L0.836436 34.3583C0.334574 34.6583 0 35.1917 0 35.7917V104.025C0 104.625 0.334574 105.158 0.836436 105.458L60.1565 139.592C60.6583 139.892 61.3275 139.892 61.8293 139.592L111.647 110.925C112.317 110.525 112.317 109.592 111.647 109.192L96.1232 100.258C95.4875 99.8917 94.7515 99.8917 94.1158 100.258L61.9632 118.758C61.4613 119.058 60.7922 119.058 60.2903 118.758H60.2569Z" fill="white"></path>
|
||||
<path d="M121.149 34.325L61.8294 0.225C61.3275 -0.075 60.6584 -0.075 60.1565 0.225L28.8069 18.2583C28.1378 18.6583 28.1378 19.5917 28.8069 19.9917L44.1973 28.8583C44.833 29.225 45.5691 29.225 46.2048 28.8583L60.2569 20.7917C60.7588 20.4917 61.4279 20.4917 61.9298 20.7917L103.283 44.5583C103.785 44.8583 104.12 45.3917 104.12 45.9917V82.8917C104.12 83.5917 104.488 84.2583 105.123 84.625L120.514 93.4583C121.183 93.8583 122.019 93.3583 122.019 92.5917V35.7917C122.019 35.1917 121.685 34.6583 121.183 34.3583L121.149 34.325Z" fill="white"></path>
|
||||
</svg></svg></g></g><defs><clipPath id="SvgjsClipPath1059"><rect width="1000" height="1000" x="0" y="0" rx="350" ry="350"></rect></clipPath></defs></svg></g><g id="dark-icon"><svg xmlns="http://www.w3.org/2000/svg" version="1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:svgjs="http://svgjs.dev/svgjs" width="1000" height="1000"><g clip-path="url(#SvgjsClipPath1060)"><rect width="1000" height="1000" fill="#000000"></rect><g transform="matrix(5,0,0,5,192.5,150)"><svg xmlns="http://www.w3.org/2000/svg" version="1.1" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:svgjs="http://svgjs.dev/svgjs" width="123" height="140"><svg width="123" height="140" viewBox="0 0 123 140" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M60.2569 118.758L18.9035 94.9917C18.4016 94.6917 18.067 94.1583 18.067 93.5583V56.825C18.067 56.1917 18.7696 55.7917 19.3049 56.0917L67.3164 83.6917C67.9855 84.0917 68.822 83.5917 68.822 82.825V64.925C68.822 64.225 68.4539 63.5583 67.8182 63.1917L10.0707 29.9917C9.56883 29.6917 8.89968 29.6917 8.39782 29.9917L0.836436 34.3583C0.334574 34.6583 0 35.1917 0 35.7917V104.025C0 104.625 0.334574 105.158 0.836436 105.458L60.1565 139.592C60.6583 139.892 61.3275 139.892 61.8293 139.592L111.647 110.925C112.317 110.525 112.317 109.592 111.647 109.192L96.1232 100.258C95.4875 99.8917 94.7515 99.8917 94.1158 100.258L61.9632 118.758C61.4613 119.058 60.7922 119.058 60.2903 118.758H60.2569Z" fill="white"></path>
|
||||
<path d="M121.149 34.325L61.8294 0.225C61.3275 -0.075 60.6584 -0.075 60.1565 0.225L28.8069 18.2583C28.1378 18.6583 28.1378 19.5917 28.8069 19.9917L44.1973 28.8583C44.833 29.225 45.5691 29.225 46.2048 28.8583L60.2569 20.7917C60.7588 20.4917 61.4279 20.4917 61.9298 20.7917L103.283 44.5583C103.785 44.8583 104.12 45.3917 104.12 45.9917V82.8917C104.12 83.5917 104.488 84.2583 105.123 84.625L120.514 93.4583C121.183 93.8583 122.019 93.3583 122.019 92.5917V35.7917C122.019 35.1917 121.685 34.6583 121.183 34.3583L121.149 34.325Z" fill="white"></path>
|
||||
</svg></svg></g></g><defs><clipPath id="SvgjsClipPath1060"><rect width="1000" height="1000" x="0" y="0" rx="350" ry="350"></rect></clipPath></defs></svg></g></svg>
|
||||
|
After Width: | Height: | Size: 4.3 KiB |
BIN
apps/backend/public/website-template-OG.webp
Normal file
BIN
apps/backend/public/website-template-OG.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 99 KiB |
20
apps/backend/redirects.js
Normal file
20
apps/backend/redirects.js
Normal file
@@ -0,0 +1,20 @@
|
||||
const redirects = async () => {
|
||||
const internetExplorerRedirect = {
|
||||
destination: '/ie-incompatible.html',
|
||||
has: [
|
||||
{
|
||||
type: 'header',
|
||||
key: 'user-agent',
|
||||
value: '(.*Trident.*)', // all ie browsers
|
||||
},
|
||||
],
|
||||
permanent: false,
|
||||
source: '/:path((?!ie-incompatible.html$).*)', // all pages except the incompatibility page
|
||||
}
|
||||
|
||||
const redirects = [internetExplorerRedirect]
|
||||
|
||||
return redirects
|
||||
}
|
||||
|
||||
export default redirects
|
||||
34
apps/backend/src/Footer/Component.tsx
Normal file
34
apps/backend/src/Footer/Component.tsx
Normal file
@@ -0,0 +1,34 @@
|
||||
import { getCachedGlobal } from '@/utilities/getGlobals'
|
||||
import Link from 'next/link'
|
||||
import React from 'react'
|
||||
|
||||
import type { Footer } from '@/payload-types'
|
||||
|
||||
import { ThemeSelector } from '@/providers/Theme/ThemeSelector'
|
||||
import { CMSLink } from '@/components/Link'
|
||||
import { Logo } from '@/components/Logo/Logo'
|
||||
|
||||
export async function Footer() {
|
||||
const footerData: Footer = await getCachedGlobal('footer', 1)()
|
||||
|
||||
const navItems = footerData?.navItems || []
|
||||
|
||||
return (
|
||||
<footer className="mt-auto border-t border-border bg-black dark:bg-card text-white">
|
||||
<div className="container py-8 gap-8 flex flex-col md:flex-row md:justify-between">
|
||||
<Link className="flex items-center" href="/">
|
||||
<Logo />
|
||||
</Link>
|
||||
|
||||
<div className="flex flex-col-reverse items-start md:flex-row gap-4 md:items-center">
|
||||
<ThemeSelector />
|
||||
<nav className="flex flex-col md:flex-row gap-4">
|
||||
{navItems.map(({ link }, i) => {
|
||||
return <CMSLink className="text-white" key={i} {...link} />
|
||||
})}
|
||||
</nav>
|
||||
</div>
|
||||
</div>
|
||||
</footer>
|
||||
)
|
||||
}
|
||||
13
apps/backend/src/Footer/RowLabel.tsx
Normal file
13
apps/backend/src/Footer/RowLabel.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
'use client'
|
||||
import { Header } from '@/payload-types'
|
||||
import { RowLabelProps, useRowLabel } from '@payloadcms/ui'
|
||||
|
||||
export const RowLabel: React.FC<RowLabelProps> = () => {
|
||||
const data = useRowLabel<NonNullable<Header['navItems']>[number]>()
|
||||
|
||||
const label = data?.data?.link?.label
|
||||
? `Nav item ${data.rowNumber !== undefined ? data.rowNumber + 1 : ''}: ${data?.data?.link?.label}`
|
||||
: 'Row'
|
||||
|
||||
return <div>{label}</div>
|
||||
}
|
||||
32
apps/backend/src/Footer/config.ts
Normal file
32
apps/backend/src/Footer/config.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import type { GlobalConfig } from 'payload'
|
||||
|
||||
import { link } from '@/fields/link'
|
||||
import { revalidateFooter } from './hooks/revalidateFooter'
|
||||
|
||||
export const Footer: GlobalConfig = {
|
||||
slug: 'footer',
|
||||
access: {
|
||||
read: () => true,
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'navItems',
|
||||
type: 'array',
|
||||
fields: [
|
||||
link({
|
||||
appearances: false,
|
||||
}),
|
||||
],
|
||||
maxRows: 6,
|
||||
admin: {
|
||||
initCollapsed: true,
|
||||
components: {
|
||||
RowLabel: '@/Footer/RowLabel#RowLabel',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
hooks: {
|
||||
afterChange: [revalidateFooter],
|
||||
},
|
||||
}
|
||||
13
apps/backend/src/Footer/hooks/revalidateFooter.ts
Normal file
13
apps/backend/src/Footer/hooks/revalidateFooter.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import type { GlobalAfterChangeHook } from 'payload'
|
||||
|
||||
import { revalidateTag } from 'next/cache'
|
||||
|
||||
export const revalidateFooter: GlobalAfterChangeHook = ({ doc, req: { payload, context } }) => {
|
||||
if (!context.disableRevalidate) {
|
||||
payload.logger.info(`Revalidating footer`)
|
||||
|
||||
revalidateTag('global_footer')
|
||||
}
|
||||
|
||||
return doc
|
||||
}
|
||||
42
apps/backend/src/Header/Component.client.tsx
Normal file
42
apps/backend/src/Header/Component.client.tsx
Normal file
@@ -0,0 +1,42 @@
|
||||
'use client'
|
||||
import { useHeaderTheme } from '@/providers/HeaderTheme'
|
||||
import Link from 'next/link'
|
||||
import { usePathname } from 'next/navigation'
|
||||
import React, { useEffect, useState } from 'react'
|
||||
|
||||
import type { Header } from '@/payload-types'
|
||||
|
||||
import { Logo } from '@/components/Logo/Logo'
|
||||
import { HeaderNav } from './Nav'
|
||||
|
||||
interface HeaderClientProps {
|
||||
data: Header
|
||||
}
|
||||
|
||||
export const HeaderClient: React.FC<HeaderClientProps> = ({ data }) => {
|
||||
/* Storing the value in a useState to avoid hydration errors */
|
||||
const [theme, setTheme] = useState<string | null>(null)
|
||||
const { headerTheme, setHeaderTheme } = useHeaderTheme()
|
||||
const pathname = usePathname()
|
||||
|
||||
useEffect(() => {
|
||||
setHeaderTheme(null)
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [pathname])
|
||||
|
||||
useEffect(() => {
|
||||
if (headerTheme && headerTheme !== theme) setTheme(headerTheme)
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [headerTheme])
|
||||
|
||||
return (
|
||||
<header className="container relative z-20 " {...(theme ? { 'data-theme': theme } : {})}>
|
||||
<div className="py-8 flex justify-between">
|
||||
<Link href="/">
|
||||
<Logo loading="eager" priority="high" className="invert dark:invert-0" />
|
||||
</Link>
|
||||
<HeaderNav data={data} />
|
||||
</div>
|
||||
</header>
|
||||
)
|
||||
}
|
||||
11
apps/backend/src/Header/Component.tsx
Normal file
11
apps/backend/src/Header/Component.tsx
Normal file
@@ -0,0 +1,11 @@
|
||||
import { HeaderClient } from './Component.client'
|
||||
import { getCachedGlobal } from '@/utilities/getGlobals'
|
||||
import React from 'react'
|
||||
|
||||
import type { Header } from '@/payload-types'
|
||||
|
||||
export async function Header() {
|
||||
const headerData: Header = await getCachedGlobal('header', 1)()
|
||||
|
||||
return <HeaderClient data={headerData} />
|
||||
}
|
||||
25
apps/backend/src/Header/Nav/index.tsx
Normal file
25
apps/backend/src/Header/Nav/index.tsx
Normal file
@@ -0,0 +1,25 @@
|
||||
'use client'
|
||||
|
||||
import React from 'react'
|
||||
|
||||
import type { Header as HeaderType } from '@/payload-types'
|
||||
|
||||
import { CMSLink } from '@/components/Link'
|
||||
import Link from 'next/link'
|
||||
import { SearchIcon } from 'lucide-react'
|
||||
|
||||
export const HeaderNav: React.FC<{ data: HeaderType }> = ({ data }) => {
|
||||
const navItems = data?.navItems || []
|
||||
|
||||
return (
|
||||
<nav className="flex gap-3 items-center">
|
||||
{navItems.map(({ link }, i) => {
|
||||
return <CMSLink key={i} {...link} appearance="link" />
|
||||
})}
|
||||
<Link href="/search">
|
||||
<span className="sr-only">Search</span>
|
||||
<SearchIcon className="w-5 text-primary" />
|
||||
</Link>
|
||||
</nav>
|
||||
)
|
||||
}
|
||||
13
apps/backend/src/Header/RowLabel.tsx
Normal file
13
apps/backend/src/Header/RowLabel.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
'use client'
|
||||
import { Header } from '@/payload-types'
|
||||
import { RowLabelProps, useRowLabel } from '@payloadcms/ui'
|
||||
|
||||
export const RowLabel: React.FC<RowLabelProps> = () => {
|
||||
const data = useRowLabel<NonNullable<Header['navItems']>[number]>()
|
||||
|
||||
const label = data?.data?.link?.label
|
||||
? `Nav item ${data.rowNumber !== undefined ? data.rowNumber + 1 : ''}: ${data?.data?.link?.label}`
|
||||
: 'Row'
|
||||
|
||||
return <div>{label}</div>
|
||||
}
|
||||
32
apps/backend/src/Header/config.ts
Normal file
32
apps/backend/src/Header/config.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import type { GlobalConfig } from 'payload'
|
||||
|
||||
import { link } from '@/fields/link'
|
||||
import { revalidateHeader } from './hooks/revalidateHeader'
|
||||
|
||||
export const Header: GlobalConfig = {
|
||||
slug: 'header',
|
||||
access: {
|
||||
read: () => true,
|
||||
},
|
||||
fields: [
|
||||
{
|
||||
name: 'navItems',
|
||||
type: 'array',
|
||||
fields: [
|
||||
link({
|
||||
appearances: false,
|
||||
}),
|
||||
],
|
||||
maxRows: 6,
|
||||
admin: {
|
||||
initCollapsed: true,
|
||||
components: {
|
||||
RowLabel: '@/Header/RowLabel#RowLabel',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
hooks: {
|
||||
afterChange: [revalidateHeader],
|
||||
},
|
||||
}
|
||||
13
apps/backend/src/Header/hooks/revalidateHeader.ts
Normal file
13
apps/backend/src/Header/hooks/revalidateHeader.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import type { GlobalAfterChangeHook } from 'payload'
|
||||
|
||||
import { revalidateTag } from 'next/cache'
|
||||
|
||||
export const revalidateHeader: GlobalAfterChangeHook = ({ doc, req: { payload, context } }) => {
|
||||
if (!context.disableRevalidate) {
|
||||
payload.logger.info(`Revalidating header`)
|
||||
|
||||
revalidateTag('global_header')
|
||||
}
|
||||
|
||||
return doc
|
||||
}
|
||||
3
apps/backend/src/access/anyone.ts
Normal file
3
apps/backend/src/access/anyone.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const anyone: Access = () => true
|
||||
9
apps/backend/src/access/authenticated.ts
Normal file
9
apps/backend/src/access/authenticated.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import type { AccessArgs } from 'payload'
|
||||
|
||||
import type { User } from '@/payload-types'
|
||||
|
||||
type isAuthenticated = (args: AccessArgs<User>) => boolean
|
||||
|
||||
export const authenticated: isAuthenticated = ({ req: { user } }) => {
|
||||
return Boolean(user)
|
||||
}
|
||||
13
apps/backend/src/access/authenticatedOrPublished.ts
Normal file
13
apps/backend/src/access/authenticatedOrPublished.ts
Normal file
@@ -0,0 +1,13 @@
|
||||
import type { Access } from 'payload'
|
||||
|
||||
export const authenticatedOrPublished: Access = ({ req: { user } }) => {
|
||||
if (user) {
|
||||
return true
|
||||
}
|
||||
|
||||
return {
|
||||
_status: {
|
||||
equals: 'published',
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
import { getServerSideSitemap } from 'next-sitemap'
|
||||
import { getPayload } from 'payload'
|
||||
import config from '@payload-config'
|
||||
import { unstable_cache } from 'next/cache'
|
||||
|
||||
const getPagesSitemap = unstable_cache(
|
||||
async () => {
|
||||
const payload = await getPayload({ config })
|
||||
const SITE_URL =
|
||||
process.env.NEXT_PUBLIC_SERVER_URL ||
|
||||
process.env.VERCEL_PROJECT_PRODUCTION_URL ||
|
||||
'https://example.com'
|
||||
|
||||
const results = await payload.find({
|
||||
collection: 'pages',
|
||||
overrideAccess: false,
|
||||
draft: false,
|
||||
depth: 0,
|
||||
limit: 1000,
|
||||
pagination: false,
|
||||
where: {
|
||||
_status: {
|
||||
equals: 'published',
|
||||
},
|
||||
},
|
||||
select: {
|
||||
slug: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
})
|
||||
|
||||
const dateFallback = new Date().toISOString()
|
||||
|
||||
const defaultSitemap = [
|
||||
{
|
||||
loc: `${SITE_URL}/search`,
|
||||
lastmod: dateFallback,
|
||||
},
|
||||
{
|
||||
loc: `${SITE_URL}/posts`,
|
||||
lastmod: dateFallback,
|
||||
},
|
||||
]
|
||||
|
||||
const sitemap = results.docs
|
||||
? results.docs
|
||||
.filter((page) => Boolean(page?.slug))
|
||||
.map((page) => {
|
||||
return {
|
||||
loc: page?.slug === 'home' ? `${SITE_URL}/` : `${SITE_URL}/${page?.slug}`,
|
||||
lastmod: page.updatedAt || dateFallback,
|
||||
}
|
||||
})
|
||||
: []
|
||||
|
||||
return [...defaultSitemap, ...sitemap]
|
||||
},
|
||||
['pages-sitemap'],
|
||||
{
|
||||
tags: ['pages-sitemap'],
|
||||
},
|
||||
)
|
||||
|
||||
export async function GET() {
|
||||
const sitemap = await getPagesSitemap()
|
||||
|
||||
return getServerSideSitemap(sitemap)
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
import { getServerSideSitemap } from 'next-sitemap'
|
||||
import { getPayload } from 'payload'
|
||||
import config from '@payload-config'
|
||||
import { unstable_cache } from 'next/cache'
|
||||
|
||||
const getPostsSitemap = unstable_cache(
|
||||
async () => {
|
||||
const payload = await getPayload({ config })
|
||||
const SITE_URL =
|
||||
process.env.NEXT_PUBLIC_SERVER_URL ||
|
||||
process.env.VERCEL_PROJECT_PRODUCTION_URL ||
|
||||
'https://example.com'
|
||||
|
||||
const results = await payload.find({
|
||||
collection: 'posts',
|
||||
overrideAccess: false,
|
||||
draft: false,
|
||||
depth: 0,
|
||||
limit: 1000,
|
||||
pagination: false,
|
||||
where: {
|
||||
_status: {
|
||||
equals: 'published',
|
||||
},
|
||||
},
|
||||
select: {
|
||||
slug: true,
|
||||
updatedAt: true,
|
||||
},
|
||||
})
|
||||
|
||||
const dateFallback = new Date().toISOString()
|
||||
|
||||
const sitemap = results.docs
|
||||
? results.docs
|
||||
.filter((post) => Boolean(post?.slug))
|
||||
.map((post) => ({
|
||||
loc: `${SITE_URL}/posts/${post?.slug}`,
|
||||
lastmod: post.updatedAt || dateFallback,
|
||||
}))
|
||||
: []
|
||||
|
||||
return sitemap
|
||||
},
|
||||
['posts-sitemap'],
|
||||
{
|
||||
tags: ['posts-sitemap'],
|
||||
},
|
||||
)
|
||||
|
||||
export async function GET() {
|
||||
const sitemap = await getPostsSitemap()
|
||||
|
||||
return getServerSideSitemap(sitemap)
|
||||
}
|
||||
15
apps/backend/src/app/(frontend)/[slug]/page.client.tsx
Normal file
15
apps/backend/src/app/(frontend)/[slug]/page.client.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
'use client'
|
||||
import { useHeaderTheme } from '@/providers/HeaderTheme'
|
||||
import React, { useEffect } from 'react'
|
||||
|
||||
const PageClient: React.FC = () => {
|
||||
/* Force the header to be dark mode while we have an image behind it */
|
||||
const { setHeaderTheme } = useHeaderTheme()
|
||||
|
||||
useEffect(() => {
|
||||
setHeaderTheme('light')
|
||||
}, [setHeaderTheme])
|
||||
return <React.Fragment />
|
||||
}
|
||||
|
||||
export default PageClient
|
||||
110
apps/backend/src/app/(frontend)/[slug]/page.tsx
Normal file
110
apps/backend/src/app/(frontend)/[slug]/page.tsx
Normal file
@@ -0,0 +1,110 @@
|
||||
import type { Metadata } from 'next'
|
||||
|
||||
import { PayloadRedirects } from '@/components/PayloadRedirects'
|
||||
import configPromise from '@payload-config'
|
||||
import { getPayload, type RequiredDataFromCollectionSlug } from 'payload'
|
||||
import { draftMode } from 'next/headers'
|
||||
import React, { cache } from 'react'
|
||||
import { homeStatic } from '@/endpoints/seed/home-static'
|
||||
|
||||
import { RenderBlocks } from '@/blocks/RenderBlocks'
|
||||
import { RenderHero } from '@/heros/RenderHero'
|
||||
import { generateMeta } from '@/utilities/generateMeta'
|
||||
import PageClient from './page.client'
|
||||
import { LivePreviewListener } from '@/components/LivePreviewListener'
|
||||
|
||||
export async function generateStaticParams() {
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
const pages = await payload.find({
|
||||
collection: 'pages',
|
||||
draft: false,
|
||||
limit: 1000,
|
||||
overrideAccess: false,
|
||||
pagination: false,
|
||||
select: {
|
||||
slug: true,
|
||||
},
|
||||
})
|
||||
|
||||
const params = pages.docs
|
||||
?.filter((doc) => {
|
||||
return doc.slug !== 'home'
|
||||
})
|
||||
.map(({ slug }) => {
|
||||
return { slug }
|
||||
})
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
type Args = {
|
||||
params: Promise<{
|
||||
slug?: string
|
||||
}>
|
||||
}
|
||||
|
||||
export default async function Page({ params: paramsPromise }: Args) {
|
||||
const { isEnabled: draft } = await draftMode()
|
||||
const { slug = 'home' } = await paramsPromise
|
||||
const url = '/' + slug
|
||||
|
||||
let page: RequiredDataFromCollectionSlug<'pages'> | null
|
||||
|
||||
page = await queryPageBySlug({
|
||||
slug,
|
||||
})
|
||||
|
||||
// Remove this code once your website is seeded
|
||||
if (!page && slug === 'home') {
|
||||
page = homeStatic
|
||||
}
|
||||
|
||||
if (!page) {
|
||||
return <PayloadRedirects url={url} />
|
||||
}
|
||||
|
||||
const { hero, layout } = page
|
||||
|
||||
return (
|
||||
<article className="pt-16 pb-24">
|
||||
<PageClient />
|
||||
{/* Allows redirects for valid pages too */}
|
||||
<PayloadRedirects disableNotFound url={url} />
|
||||
|
||||
{draft && <LivePreviewListener />}
|
||||
|
||||
<RenderHero {...hero} />
|
||||
<RenderBlocks blocks={layout} />
|
||||
</article>
|
||||
)
|
||||
}
|
||||
|
||||
export async function generateMetadata({ params: paramsPromise }: Args): Promise<Metadata> {
|
||||
const { slug = 'home' } = await paramsPromise
|
||||
const page = await queryPageBySlug({
|
||||
slug,
|
||||
})
|
||||
|
||||
return generateMeta({ doc: page })
|
||||
}
|
||||
|
||||
const queryPageBySlug = cache(async ({ slug }: { slug: string }) => {
|
||||
const { isEnabled: draft } = await draftMode()
|
||||
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
|
||||
const result = await payload.find({
|
||||
collection: 'pages',
|
||||
draft,
|
||||
limit: 1,
|
||||
pagination: false,
|
||||
overrideAccess: draft,
|
||||
where: {
|
||||
slug: {
|
||||
equals: slug,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
return result.docs?.[0] || null
|
||||
})
|
||||
103
apps/backend/src/app/(frontend)/globals.css
Normal file
103
apps/backend/src/app/(frontend)/globals.css
Normal file
@@ -0,0 +1,103 @@
|
||||
@tailwind base;
|
||||
@tailwind components;
|
||||
@tailwind utilities;
|
||||
|
||||
@layer base {
|
||||
h1,
|
||||
h2,
|
||||
h3,
|
||||
h4,
|
||||
h5,
|
||||
h6 {
|
||||
font-size: unset;
|
||||
font-weight: unset;
|
||||
}
|
||||
|
||||
:root {
|
||||
--background: 0 0% 100%;
|
||||
--foreground: 222.2 84% 4.9%;
|
||||
|
||||
--card: 240 5% 96%;
|
||||
--card-foreground: 222.2 84% 4.9%;
|
||||
|
||||
--popover: 0 0% 100%;
|
||||
--popover-foreground: 222.2 84% 4.9%;
|
||||
|
||||
--primary: 222.2 47.4% 11.2%;
|
||||
--primary-foreground: 210 40% 98%;
|
||||
|
||||
--secondary: 210 40% 96.1%;
|
||||
--secondary-foreground: 222.2 47.4% 11.2%;
|
||||
|
||||
--muted: 210 40% 96.1%;
|
||||
--muted-foreground: 215.4 16.3% 46.9%;
|
||||
|
||||
--accent: 210 40% 96.1%;
|
||||
--accent-foreground: 222.2 47.4% 11.2%;
|
||||
|
||||
--destructive: 0 84.2% 60.2%;
|
||||
--destructive-foreground: 210 40% 98%;
|
||||
|
||||
--border: 240 6% 80%;
|
||||
--input: 214.3 31.8% 91.4%;
|
||||
--ring: 222.2 84% 4.9%;
|
||||
|
||||
--radius: 0.2rem;
|
||||
|
||||
--success: 196 52% 74%;
|
||||
--warning: 34 89% 85%;
|
||||
--error: 10 100% 86%;
|
||||
}
|
||||
|
||||
[data-theme='dark'] {
|
||||
--background: 0 0% 0%;
|
||||
--foreground: 210 40% 98%;
|
||||
|
||||
--card: 0 0% 4%;
|
||||
--card-foreground: 210 40% 98%;
|
||||
|
||||
--popover: 222.2 84% 4.9%;
|
||||
--popover-foreground: 210 40% 98%;
|
||||
|
||||
--primary: 210 40% 98%;
|
||||
--primary-foreground: 222.2 47.4% 11.2%;
|
||||
|
||||
--secondary: 217.2 32.6% 17.5%;
|
||||
--secondary-foreground: 210 40% 98%;
|
||||
|
||||
--muted: 217.2 32.6% 17.5%;
|
||||
--muted-foreground: 215 20.2% 65.1%;
|
||||
|
||||
--accent: 217.2 32.6% 17.5%;
|
||||
--accent-foreground: 210 40% 98%;
|
||||
|
||||
--destructive: 0 62.8% 30.6%;
|
||||
--destructive-foreground: 210 40% 98%;
|
||||
|
||||
--border: 0, 0%, 15%, 0.8;
|
||||
--input: 217.2 32.6% 17.5%;
|
||||
--ring: 212.7 26.8% 83.9%;
|
||||
|
||||
--success: 196 100% 14%;
|
||||
--warning: 34 51% 25%;
|
||||
--error: 10 39% 43%;
|
||||
}
|
||||
}
|
||||
|
||||
@layer base {
|
||||
* {
|
||||
@apply border-border;
|
||||
}
|
||||
body {
|
||||
@apply bg-background text-foreground min-h-[100vh] flex flex-col;
|
||||
}
|
||||
}
|
||||
|
||||
html {
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
html[data-theme='dark'],
|
||||
html[data-theme='light'] {
|
||||
opacity: initial;
|
||||
}
|
||||
53
apps/backend/src/app/(frontend)/layout.tsx
Normal file
53
apps/backend/src/app/(frontend)/layout.tsx
Normal file
@@ -0,0 +1,53 @@
|
||||
import type { Metadata } from 'next'
|
||||
|
||||
import { cn } from '@/utilities/ui'
|
||||
import { GeistMono } from 'geist/font/mono'
|
||||
import { GeistSans } from 'geist/font/sans'
|
||||
import React from 'react'
|
||||
|
||||
import { AdminBar } from '@/components/AdminBar'
|
||||
import { Footer } from '@/Footer/Component'
|
||||
import { Header } from '@/Header/Component'
|
||||
import { Providers } from '@/providers'
|
||||
import { InitTheme } from '@/providers/Theme/InitTheme'
|
||||
import { mergeOpenGraph } from '@/utilities/mergeOpenGraph'
|
||||
import { draftMode } from 'next/headers'
|
||||
|
||||
import './globals.css'
|
||||
import { getServerSideURL } from '@/utilities/getURL'
|
||||
|
||||
export default async function RootLayout({ children }: { children: React.ReactNode }) {
|
||||
const { isEnabled } = await draftMode()
|
||||
|
||||
return (
|
||||
<html className={cn(GeistSans.variable, GeistMono.variable)} lang="en" suppressHydrationWarning>
|
||||
<head>
|
||||
<InitTheme />
|
||||
<link href="/favicon.ico" rel="icon" sizes="32x32" />
|
||||
<link href="/favicon.svg" rel="icon" type="image/svg+xml" />
|
||||
</head>
|
||||
<body>
|
||||
<Providers>
|
||||
<AdminBar
|
||||
adminBarProps={{
|
||||
preview: isEnabled,
|
||||
}}
|
||||
/>
|
||||
|
||||
<Header />
|
||||
{children}
|
||||
<Footer />
|
||||
</Providers>
|
||||
</body>
|
||||
</html>
|
||||
)
|
||||
}
|
||||
|
||||
export const metadata: Metadata = {
|
||||
metadataBase: new URL(getServerSideURL()),
|
||||
openGraph: mergeOpenGraph(),
|
||||
twitter: {
|
||||
card: 'summary_large_image',
|
||||
creator: '@payloadcms',
|
||||
},
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
import { draftMode } from 'next/headers'
|
||||
|
||||
export async function GET(): Promise<Response> {
|
||||
const draft = await draftMode()
|
||||
draft.disable()
|
||||
return new Response('Draft mode is disabled')
|
||||
}
|
||||
56
apps/backend/src/app/(frontend)/next/preview/route.ts
Normal file
56
apps/backend/src/app/(frontend)/next/preview/route.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import type { CollectionSlug, PayloadRequest } from 'payload'
|
||||
import { getPayload } from 'payload'
|
||||
|
||||
import { draftMode } from 'next/headers'
|
||||
import { redirect } from 'next/navigation'
|
||||
import { NextRequest } from 'next/server'
|
||||
|
||||
import configPromise from '@payload-config'
|
||||
|
||||
export async function GET(req: NextRequest): Promise<Response> {
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
|
||||
const { searchParams } = new URL(req.url)
|
||||
|
||||
const path = searchParams.get('path')
|
||||
const collection = searchParams.get('collection') as CollectionSlug
|
||||
const slug = searchParams.get('slug')
|
||||
const previewSecret = searchParams.get('previewSecret')
|
||||
|
||||
if (previewSecret !== process.env.PREVIEW_SECRET) {
|
||||
return new Response('You are not allowed to preview this page', { status: 403 })
|
||||
}
|
||||
|
||||
if (!path || !collection || !slug) {
|
||||
return new Response('Insufficient search params', { status: 404 })
|
||||
}
|
||||
|
||||
if (!path.startsWith('/')) {
|
||||
return new Response('This endpoint can only be used for relative previews', { status: 500 })
|
||||
}
|
||||
|
||||
let user
|
||||
|
||||
try {
|
||||
user = await payload.auth({
|
||||
req: req as unknown as PayloadRequest,
|
||||
headers: req.headers,
|
||||
})
|
||||
} catch (error) {
|
||||
payload.logger.error({ err: error }, 'Error verifying token for live preview')
|
||||
return new Response('You are not allowed to preview this page', { status: 403 })
|
||||
}
|
||||
|
||||
const draft = await draftMode()
|
||||
|
||||
if (!user) {
|
||||
draft.disable()
|
||||
return new Response('You are not allowed to preview this page', { status: 403 })
|
||||
}
|
||||
|
||||
// You can add additional checks here to see if the user is allowed to preview this page
|
||||
|
||||
draft.enable()
|
||||
|
||||
redirect(path)
|
||||
}
|
||||
31
apps/backend/src/app/(frontend)/next/seed/route.ts
Normal file
31
apps/backend/src/app/(frontend)/next/seed/route.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import { createLocalReq, getPayload } from 'payload'
|
||||
import { seed } from '@/endpoints/seed'
|
||||
import config from '@payload-config'
|
||||
import { headers } from 'next/headers'
|
||||
|
||||
export const maxDuration = 60 // This function can run for a maximum of 60 seconds
|
||||
|
||||
export async function POST(): Promise<Response> {
|
||||
const payload = await getPayload({ config })
|
||||
const requestHeaders = await headers()
|
||||
|
||||
// Authenticate by passing request headers
|
||||
const { user } = await payload.auth({ headers: requestHeaders })
|
||||
|
||||
if (!user) {
|
||||
return new Response('Action forbidden.', { status: 403 })
|
||||
}
|
||||
|
||||
try {
|
||||
// Create a Payload request object to pass to the Local API for transactions
|
||||
// At this point you should pass in a user, locale, and any other context you need for the Local API
|
||||
const payloadReq = await createLocalReq({ user }, payload)
|
||||
|
||||
await seed({ payload, req: payloadReq })
|
||||
|
||||
return Response.json({ success: true })
|
||||
} catch (e) {
|
||||
payload.logger.error({ err: e, message: 'Error seeding data' })
|
||||
return new Response('Error seeding data.', { status: 500 })
|
||||
}
|
||||
}
|
||||
18
apps/backend/src/app/(frontend)/not-found.tsx
Normal file
18
apps/backend/src/app/(frontend)/not-found.tsx
Normal file
@@ -0,0 +1,18 @@
|
||||
import Link from 'next/link'
|
||||
import React from 'react'
|
||||
|
||||
import { Button } from '@/components/ui/button'
|
||||
|
||||
export default function NotFound() {
|
||||
return (
|
||||
<div className="container py-28">
|
||||
<div className="prose max-w-none">
|
||||
<h1 style={{ marginBottom: 0 }}>404</h1>
|
||||
<p className="mb-4">This page could not be found.</p>
|
||||
</div>
|
||||
<Button asChild variant="default">
|
||||
<Link href="/">Go home</Link>
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
5
apps/backend/src/app/(frontend)/page.tsx
Normal file
5
apps/backend/src/app/(frontend)/page.tsx
Normal file
@@ -0,0 +1,5 @@
|
||||
import PageTemplate, { generateMetadata } from './[slug]/page'
|
||||
|
||||
export default PageTemplate
|
||||
|
||||
export { generateMetadata }
|
||||
15
apps/backend/src/app/(frontend)/posts/[slug]/page.client.tsx
Normal file
15
apps/backend/src/app/(frontend)/posts/[slug]/page.client.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
'use client'
|
||||
import { useHeaderTheme } from '@/providers/HeaderTheme'
|
||||
import React, { useEffect } from 'react'
|
||||
|
||||
const PageClient: React.FC = () => {
|
||||
/* Force the header to be dark mode while we have an image behind it */
|
||||
const { setHeaderTheme } = useHeaderTheme()
|
||||
|
||||
useEffect(() => {
|
||||
setHeaderTheme('dark')
|
||||
}, [setHeaderTheme])
|
||||
return <React.Fragment />
|
||||
}
|
||||
|
||||
export default PageClient
|
||||
104
apps/backend/src/app/(frontend)/posts/[slug]/page.tsx
Normal file
104
apps/backend/src/app/(frontend)/posts/[slug]/page.tsx
Normal file
@@ -0,0 +1,104 @@
|
||||
import type { Metadata } from 'next'
|
||||
|
||||
import { RelatedPosts } from '@/blocks/RelatedPosts/Component'
|
||||
import { PayloadRedirects } from '@/components/PayloadRedirects'
|
||||
import configPromise from '@payload-config'
|
||||
import { getPayload } from 'payload'
|
||||
import { draftMode } from 'next/headers'
|
||||
import React, { cache } from 'react'
|
||||
import RichText from '@/components/RichText'
|
||||
|
||||
import type { Post } from '@/payload-types'
|
||||
|
||||
import { PostHero } from '@/heros/PostHero'
|
||||
import { generateMeta } from '@/utilities/generateMeta'
|
||||
import PageClient from './page.client'
|
||||
import { LivePreviewListener } from '@/components/LivePreviewListener'
|
||||
|
||||
export async function generateStaticParams() {
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
draft: false,
|
||||
limit: 1000,
|
||||
overrideAccess: false,
|
||||
pagination: false,
|
||||
select: {
|
||||
slug: true,
|
||||
},
|
||||
})
|
||||
|
||||
const params = posts.docs.map(({ slug }) => {
|
||||
return { slug }
|
||||
})
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
type Args = {
|
||||
params: Promise<{
|
||||
slug?: string
|
||||
}>
|
||||
}
|
||||
|
||||
export default async function Post({ params: paramsPromise }: Args) {
|
||||
const { isEnabled: draft } = await draftMode()
|
||||
const { slug = '' } = await paramsPromise
|
||||
const url = '/posts/' + slug
|
||||
const post = await queryPostBySlug({ slug })
|
||||
|
||||
if (!post) return <PayloadRedirects url={url} />
|
||||
|
||||
return (
|
||||
<article className="pt-16 pb-16">
|
||||
<PageClient />
|
||||
|
||||
{/* Allows redirects for valid pages too */}
|
||||
<PayloadRedirects disableNotFound url={url} />
|
||||
|
||||
{draft && <LivePreviewListener />}
|
||||
|
||||
<PostHero post={post} />
|
||||
|
||||
<div className="flex flex-col items-center gap-4 pt-8">
|
||||
<div className="container">
|
||||
<RichText className="max-w-[48rem] mx-auto" data={post.content} enableGutter={false} />
|
||||
{post.relatedPosts && post.relatedPosts.length > 0 && (
|
||||
<RelatedPosts
|
||||
className="mt-12 max-w-[52rem] lg:grid lg:grid-cols-subgrid col-start-1 col-span-3 grid-rows-[2fr]"
|
||||
docs={post.relatedPosts.filter((post) => typeof post === 'object')}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</article>
|
||||
)
|
||||
}
|
||||
|
||||
export async function generateMetadata({ params: paramsPromise }: Args): Promise<Metadata> {
|
||||
const { slug = '' } = await paramsPromise
|
||||
const post = await queryPostBySlug({ slug })
|
||||
|
||||
return generateMeta({ doc: post })
|
||||
}
|
||||
|
||||
const queryPostBySlug = cache(async ({ slug }: { slug: string }) => {
|
||||
const { isEnabled: draft } = await draftMode()
|
||||
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
|
||||
const result = await payload.find({
|
||||
collection: 'posts',
|
||||
draft,
|
||||
limit: 1,
|
||||
overrideAccess: draft,
|
||||
pagination: false,
|
||||
where: {
|
||||
slug: {
|
||||
equals: slug,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
return result.docs?.[0] || null
|
||||
})
|
||||
15
apps/backend/src/app/(frontend)/posts/page.client.tsx
Normal file
15
apps/backend/src/app/(frontend)/posts/page.client.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
'use client'
|
||||
import { useHeaderTheme } from '@/providers/HeaderTheme'
|
||||
import React, { useEffect } from 'react'
|
||||
|
||||
const PageClient: React.FC = () => {
|
||||
/* Force the header to be dark mode while we have an image behind it */
|
||||
const { setHeaderTheme } = useHeaderTheme()
|
||||
|
||||
useEffect(() => {
|
||||
setHeaderTheme('light')
|
||||
}, [setHeaderTheme])
|
||||
return <React.Fragment />
|
||||
}
|
||||
|
||||
export default PageClient
|
||||
63
apps/backend/src/app/(frontend)/posts/page.tsx
Normal file
63
apps/backend/src/app/(frontend)/posts/page.tsx
Normal file
@@ -0,0 +1,63 @@
|
||||
import type { Metadata } from 'next/types'
|
||||
|
||||
import { CollectionArchive } from '@/components/CollectionArchive'
|
||||
import { PageRange } from '@/components/PageRange'
|
||||
import { Pagination } from '@/components/Pagination'
|
||||
import configPromise from '@payload-config'
|
||||
import { getPayload } from 'payload'
|
||||
import React from 'react'
|
||||
import PageClient from './page.client'
|
||||
|
||||
export const dynamic = 'force-static'
|
||||
export const revalidate = 600
|
||||
|
||||
export default async function Page() {
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
depth: 1,
|
||||
limit: 12,
|
||||
overrideAccess: false,
|
||||
select: {
|
||||
title: true,
|
||||
slug: true,
|
||||
categories: true,
|
||||
meta: true,
|
||||
},
|
||||
})
|
||||
|
||||
return (
|
||||
<div className="pt-24 pb-24">
|
||||
<PageClient />
|
||||
<div className="container mb-16">
|
||||
<div className="prose dark:prose-invert max-w-none">
|
||||
<h1>Posts</h1>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="container mb-8">
|
||||
<PageRange
|
||||
collection="posts"
|
||||
currentPage={posts.page}
|
||||
limit={12}
|
||||
totalDocs={posts.totalDocs}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<CollectionArchive posts={posts.docs} />
|
||||
|
||||
<div className="container">
|
||||
{posts.totalPages > 1 && posts.page && (
|
||||
<Pagination page={posts.page} totalPages={posts.totalPages} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export function generateMetadata(): Metadata {
|
||||
return {
|
||||
title: `Payload Website Template Posts`,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
'use client'
|
||||
import { useHeaderTheme } from '@/providers/HeaderTheme'
|
||||
import React, { useEffect } from 'react'
|
||||
|
||||
const PageClient: React.FC = () => {
|
||||
/* Force the header to be dark mode while we have an image behind it */
|
||||
const { setHeaderTheme } = useHeaderTheme()
|
||||
|
||||
useEffect(() => {
|
||||
setHeaderTheme('light')
|
||||
}, [setHeaderTheme])
|
||||
return <React.Fragment />
|
||||
}
|
||||
|
||||
export default PageClient
|
||||
@@ -0,0 +1,88 @@
|
||||
import type { Metadata } from 'next/types'
|
||||
|
||||
import { CollectionArchive } from '@/components/CollectionArchive'
|
||||
import { PageRange } from '@/components/PageRange'
|
||||
import { Pagination } from '@/components/Pagination'
|
||||
import configPromise from '@payload-config'
|
||||
import { getPayload } from 'payload'
|
||||
import React from 'react'
|
||||
import PageClient from './page.client'
|
||||
import { notFound } from 'next/navigation'
|
||||
|
||||
export const revalidate = 600
|
||||
|
||||
type Args = {
|
||||
params: Promise<{
|
||||
pageNumber: string
|
||||
}>
|
||||
}
|
||||
|
||||
export default async function Page({ params: paramsPromise }: Args) {
|
||||
const { pageNumber } = await paramsPromise
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
|
||||
const sanitizedPageNumber = Number(pageNumber)
|
||||
|
||||
if (!Number.isInteger(sanitizedPageNumber)) notFound()
|
||||
|
||||
const posts = await payload.find({
|
||||
collection: 'posts',
|
||||
depth: 1,
|
||||
limit: 12,
|
||||
page: sanitizedPageNumber,
|
||||
overrideAccess: false,
|
||||
})
|
||||
|
||||
return (
|
||||
<div className="pt-24 pb-24">
|
||||
<PageClient />
|
||||
<div className="container mb-16">
|
||||
<div className="prose dark:prose-invert max-w-none">
|
||||
<h1>Posts</h1>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="container mb-8">
|
||||
<PageRange
|
||||
collection="posts"
|
||||
currentPage={posts.page}
|
||||
limit={12}
|
||||
totalDocs={posts.totalDocs}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<CollectionArchive posts={posts.docs} />
|
||||
|
||||
<div className="container">
|
||||
{posts?.page && posts?.totalPages > 1 && (
|
||||
<Pagination page={posts.page} totalPages={posts.totalPages} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export async function generateMetadata({ params: paramsPromise }: Args): Promise<Metadata> {
|
||||
const { pageNumber } = await paramsPromise
|
||||
return {
|
||||
title: `Payload Website Template Posts Page ${pageNumber || ''}`,
|
||||
}
|
||||
}
|
||||
|
||||
export async function generateStaticParams() {
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
const { totalDocs } = await payload.count({
|
||||
collection: 'posts',
|
||||
overrideAccess: false,
|
||||
})
|
||||
|
||||
const totalPages = Math.ceil(totalDocs / 10)
|
||||
|
||||
const pages: { pageNumber: string }[] = []
|
||||
|
||||
for (let i = 1; i <= totalPages; i++) {
|
||||
pages.push({ pageNumber: String(i) })
|
||||
}
|
||||
|
||||
return pages
|
||||
}
|
||||
15
apps/backend/src/app/(frontend)/search/page.client.tsx
Normal file
15
apps/backend/src/app/(frontend)/search/page.client.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
'use client'
|
||||
import { useHeaderTheme } from '@/providers/HeaderTheme'
|
||||
import React, { useEffect } from 'react'
|
||||
|
||||
const PageClient: React.FC = () => {
|
||||
/* Force the header to be dark mode while we have an image behind it */
|
||||
const { setHeaderTheme } = useHeaderTheme()
|
||||
|
||||
useEffect(() => {
|
||||
setHeaderTheme('light')
|
||||
}, [setHeaderTheme])
|
||||
return <React.Fragment />
|
||||
}
|
||||
|
||||
export default PageClient
|
||||
88
apps/backend/src/app/(frontend)/search/page.tsx
Normal file
88
apps/backend/src/app/(frontend)/search/page.tsx
Normal file
@@ -0,0 +1,88 @@
|
||||
import type { Metadata } from 'next/types'
|
||||
|
||||
import { CollectionArchive } from '@/components/CollectionArchive'
|
||||
import configPromise from '@payload-config'
|
||||
import { getPayload } from 'payload'
|
||||
import React from 'react'
|
||||
import { Search } from '@/search/Component'
|
||||
import PageClient from './page.client'
|
||||
import { CardPostData } from '@/components/Card'
|
||||
|
||||
type Args = {
|
||||
searchParams: Promise<{
|
||||
q: string
|
||||
}>
|
||||
}
|
||||
export default async function Page({ searchParams: searchParamsPromise }: Args) {
|
||||
const { q: query } = await searchParamsPromise
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
|
||||
const posts = await payload.find({
|
||||
collection: 'search',
|
||||
depth: 1,
|
||||
limit: 12,
|
||||
select: {
|
||||
title: true,
|
||||
slug: true,
|
||||
categories: true,
|
||||
meta: true,
|
||||
},
|
||||
// pagination: false reduces overhead if you don't need totalDocs
|
||||
pagination: false,
|
||||
...(query
|
||||
? {
|
||||
where: {
|
||||
or: [
|
||||
{
|
||||
title: {
|
||||
like: query,
|
||||
},
|
||||
},
|
||||
{
|
||||
'meta.description': {
|
||||
like: query,
|
||||
},
|
||||
},
|
||||
{
|
||||
'meta.title': {
|
||||
like: query,
|
||||
},
|
||||
},
|
||||
{
|
||||
slug: {
|
||||
like: query,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
|
||||
return (
|
||||
<div className="pt-24 pb-24">
|
||||
<PageClient />
|
||||
<div className="container mb-16">
|
||||
<div className="prose dark:prose-invert max-w-none text-center">
|
||||
<h1 className="mb-8 lg:mb-16">Search</h1>
|
||||
|
||||
<div className="max-w-[50rem] mx-auto">
|
||||
<Search />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{posts.totalDocs > 0 ? (
|
||||
<CollectionArchive posts={posts.docs as CardPostData[]} />
|
||||
) : (
|
||||
<div className="container">No results found.</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export function generateMetadata(): Metadata {
|
||||
return {
|
||||
title: `Payload Website Template Search`,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */
|
||||
/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */
|
||||
import type { Metadata } from 'next'
|
||||
|
||||
import config from '@payload-config'
|
||||
import { NotFoundPage, generatePageMetadata } from '@payloadcms/next/views'
|
||||
import { importMap } from '../importMap'
|
||||
|
||||
type Args = {
|
||||
params: Promise<{
|
||||
segments: string[]
|
||||
}>
|
||||
searchParams: Promise<{
|
||||
[key: string]: string | string[]
|
||||
}>
|
||||
}
|
||||
|
||||
export const generateMetadata = ({ params, searchParams }: Args): Promise<Metadata> =>
|
||||
generatePageMetadata({ config, params, searchParams })
|
||||
|
||||
const NotFound = ({ params, searchParams }: Args) =>
|
||||
NotFoundPage({ config, params, searchParams, importMap })
|
||||
|
||||
export default NotFound
|
||||
@@ -0,0 +1,24 @@
|
||||
/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */
|
||||
/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */
|
||||
import type { Metadata } from 'next'
|
||||
|
||||
import config from '@payload-config'
|
||||
import { RootPage, generatePageMetadata } from '@payloadcms/next/views'
|
||||
import { importMap } from '../importMap'
|
||||
|
||||
type Args = {
|
||||
params: Promise<{
|
||||
segments: string[]
|
||||
}>
|
||||
searchParams: Promise<{
|
||||
[key: string]: string | string[]
|
||||
}>
|
||||
}
|
||||
|
||||
export const generateMetadata = ({ params, searchParams }: Args): Promise<Metadata> =>
|
||||
generatePageMetadata({ config, params, searchParams })
|
||||
|
||||
const Page = ({ params, searchParams }: Args) =>
|
||||
RootPage({ config, params, searchParams, importMap })
|
||||
|
||||
export default Page
|
||||
71
apps/backend/src/app/(payload)/admin/importMap.js
Normal file
71
apps/backend/src/app/(payload)/admin/importMap.js
Normal file
@@ -0,0 +1,71 @@
|
||||
import { RscEntryLexicalCell as RscEntryLexicalCell_44fe37237e0ebf4470c9990d8cb7b07e } from '@payloadcms/richtext-lexical/rsc'
|
||||
import { RscEntryLexicalField as RscEntryLexicalField_44fe37237e0ebf4470c9990d8cb7b07e } from '@payloadcms/richtext-lexical/rsc'
|
||||
import { LexicalDiffComponent as LexicalDiffComponent_44fe37237e0ebf4470c9990d8cb7b07e } from '@payloadcms/richtext-lexical/rsc'
|
||||
import { InlineToolbarFeatureClient as InlineToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { FixedToolbarFeatureClient as FixedToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { HeadingFeatureClient as HeadingFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { ParagraphFeatureClient as ParagraphFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { UnderlineFeatureClient as UnderlineFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { BoldFeatureClient as BoldFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { ItalicFeatureClient as ItalicFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { LinkFeatureClient as LinkFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { OverviewComponent as OverviewComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client'
|
||||
import { MetaTitleComponent as MetaTitleComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client'
|
||||
import { MetaImageComponent as MetaImageComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client'
|
||||
import { MetaDescriptionComponent as MetaDescriptionComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client'
|
||||
import { PreviewComponent as PreviewComponent_a8a977ebc872c5d5ea7ee689724c0860 } from '@payloadcms/plugin-seo/client'
|
||||
import { SlugComponent as SlugComponent_92cc057d0a2abb4f6cf0307edf59f986 } from '@/fields/slug/SlugComponent'
|
||||
import { HorizontalRuleFeatureClient as HorizontalRuleFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { BlocksFeatureClient as BlocksFeatureClient_e70f5e05f09f93e00b997edb1ef0c864 } from '@payloadcms/richtext-lexical/client'
|
||||
import { LinkToDoc as LinkToDoc_aead06e4cbf6b2620c5c51c9ab283634 } from '@payloadcms/plugin-search/client'
|
||||
import { ReindexButton as ReindexButton_aead06e4cbf6b2620c5c51c9ab283634 } from '@payloadcms/plugin-search/client'
|
||||
import { RowLabel as RowLabel_ec255a65fa6fa8d1faeb09cf35284224 } from '@/Header/RowLabel'
|
||||
import { RowLabel as RowLabel_1f6ff6ff633e3695d348f4f3c58f1466 } from '@/Footer/RowLabel'
|
||||
import { default as default_1a7510af427896d367a49dbf838d2de6 } from '@/components/BeforeDashboard'
|
||||
import { default as default_8a7ab0eb7ab5c511aba12e68480bfe5e } from '@/components/BeforeLogin'
|
||||
|
||||
export const importMap = {
|
||||
'@payloadcms/richtext-lexical/rsc#RscEntryLexicalCell':
|
||||
RscEntryLexicalCell_44fe37237e0ebf4470c9990d8cb7b07e,
|
||||
'@payloadcms/richtext-lexical/rsc#RscEntryLexicalField':
|
||||
RscEntryLexicalField_44fe37237e0ebf4470c9990d8cb7b07e,
|
||||
'@payloadcms/richtext-lexical/rsc#LexicalDiffComponent':
|
||||
LexicalDiffComponent_44fe37237e0ebf4470c9990d8cb7b07e,
|
||||
'@payloadcms/richtext-lexical/client#InlineToolbarFeatureClient':
|
||||
InlineToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/richtext-lexical/client#FixedToolbarFeatureClient':
|
||||
FixedToolbarFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/richtext-lexical/client#HeadingFeatureClient':
|
||||
HeadingFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/richtext-lexical/client#ParagraphFeatureClient':
|
||||
ParagraphFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/richtext-lexical/client#UnderlineFeatureClient':
|
||||
UnderlineFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/richtext-lexical/client#BoldFeatureClient':
|
||||
BoldFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/richtext-lexical/client#ItalicFeatureClient':
|
||||
ItalicFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/richtext-lexical/client#LinkFeatureClient':
|
||||
LinkFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/plugin-seo/client#OverviewComponent':
|
||||
OverviewComponent_a8a977ebc872c5d5ea7ee689724c0860,
|
||||
'@payloadcms/plugin-seo/client#MetaTitleComponent':
|
||||
MetaTitleComponent_a8a977ebc872c5d5ea7ee689724c0860,
|
||||
'@payloadcms/plugin-seo/client#MetaImageComponent':
|
||||
MetaImageComponent_a8a977ebc872c5d5ea7ee689724c0860,
|
||||
'@payloadcms/plugin-seo/client#MetaDescriptionComponent':
|
||||
MetaDescriptionComponent_a8a977ebc872c5d5ea7ee689724c0860,
|
||||
'@payloadcms/plugin-seo/client#PreviewComponent':
|
||||
PreviewComponent_a8a977ebc872c5d5ea7ee689724c0860,
|
||||
'@/fields/slug/SlugComponent#SlugComponent': SlugComponent_92cc057d0a2abb4f6cf0307edf59f986,
|
||||
'@payloadcms/richtext-lexical/client#HorizontalRuleFeatureClient':
|
||||
HorizontalRuleFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/richtext-lexical/client#BlocksFeatureClient':
|
||||
BlocksFeatureClient_e70f5e05f09f93e00b997edb1ef0c864,
|
||||
'@payloadcms/plugin-search/client#LinkToDoc': LinkToDoc_aead06e4cbf6b2620c5c51c9ab283634,
|
||||
'@payloadcms/plugin-search/client#ReindexButton': ReindexButton_aead06e4cbf6b2620c5c51c9ab283634,
|
||||
'@/Header/RowLabel#RowLabel': RowLabel_ec255a65fa6fa8d1faeb09cf35284224,
|
||||
'@/Footer/RowLabel#RowLabel': RowLabel_1f6ff6ff633e3695d348f4f3c58f1466,
|
||||
'@/components/BeforeDashboard#default': default_1a7510af427896d367a49dbf838d2de6,
|
||||
'@/components/BeforeLogin#default': default_8a7ab0eb7ab5c511aba12e68480bfe5e,
|
||||
}
|
||||
20
apps/backend/src/app/(payload)/api/[...slug]/route.ts
Normal file
20
apps/backend/src/app/(payload)/api/[...slug]/route.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */
|
||||
/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */
|
||||
import config from '@payload-config'
|
||||
import '@payloadcms/next/css'
|
||||
import {
|
||||
REST_DELETE,
|
||||
REST_GET,
|
||||
REST_OPTIONS,
|
||||
REST_PATCH,
|
||||
REST_POST,
|
||||
REST_PUT,
|
||||
} from '@payloadcms/next/routes'
|
||||
|
||||
export const GET = REST_GET(config)
|
||||
export const POST = REST_POST(config)
|
||||
export const DELETE = REST_DELETE(config)
|
||||
export const PATCH = REST_PATCH(config)
|
||||
|
||||
export const PUT = REST_PUT(config)
|
||||
export const OPTIONS = REST_OPTIONS(config)
|
||||
@@ -0,0 +1,7 @@
|
||||
/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */
|
||||
/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */
|
||||
import config from '@payload-config'
|
||||
import '@payloadcms/next/css'
|
||||
import { GRAPHQL_PLAYGROUND_GET } from '@payloadcms/next/routes'
|
||||
|
||||
export const GET = GRAPHQL_PLAYGROUND_GET(config)
|
||||
8
apps/backend/src/app/(payload)/api/graphql/route.ts
Normal file
8
apps/backend/src/app/(payload)/api/graphql/route.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */
|
||||
/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */
|
||||
import config from '@payload-config'
|
||||
import { GRAPHQL_POST, REST_OPTIONS } from '@payloadcms/next/routes'
|
||||
|
||||
export const POST = GRAPHQL_POST(config)
|
||||
|
||||
export const OPTIONS = REST_OPTIONS(config)
|
||||
0
apps/backend/src/app/(payload)/custom.scss
Normal file
0
apps/backend/src/app/(payload)/custom.scss
Normal file
31
apps/backend/src/app/(payload)/layout.tsx
Normal file
31
apps/backend/src/app/(payload)/layout.tsx
Normal file
@@ -0,0 +1,31 @@
|
||||
/* THIS FILE WAS GENERATED AUTOMATICALLY BY PAYLOAD. */
|
||||
/* DO NOT MODIFY IT BECAUSE IT COULD BE REWRITTEN AT ANY TIME. */
|
||||
import config from '@payload-config'
|
||||
import '@payloadcms/next/css'
|
||||
import type { ServerFunctionClient } from 'payload'
|
||||
import { handleServerFunctions, RootLayout } from '@payloadcms/next/layouts'
|
||||
import React from 'react'
|
||||
|
||||
import { importMap } from './admin/importMap.js'
|
||||
import './custom.scss'
|
||||
|
||||
type Args = {
|
||||
children: React.ReactNode
|
||||
}
|
||||
|
||||
const serverFunction: ServerFunctionClient = async function (args) {
|
||||
'use server'
|
||||
return handleServerFunctions({
|
||||
...args,
|
||||
config,
|
||||
importMap,
|
||||
})
|
||||
}
|
||||
|
||||
const Layout = ({ children }: Args) => (
|
||||
<RootLayout config={config} importMap={importMap} serverFunction={serverFunction}>
|
||||
{children}
|
||||
</RootLayout>
|
||||
)
|
||||
|
||||
export default Layout
|
||||
65
apps/backend/src/blocks/ArchiveBlock/Component.tsx
Normal file
65
apps/backend/src/blocks/ArchiveBlock/Component.tsx
Normal file
@@ -0,0 +1,65 @@
|
||||
import type { Post, ArchiveBlock as ArchiveBlockProps } from '@/payload-types'
|
||||
|
||||
import configPromise from '@payload-config'
|
||||
import { getPayload } from 'payload'
|
||||
import React from 'react'
|
||||
import RichText from '@/components/RichText'
|
||||
|
||||
import { CollectionArchive } from '@/components/CollectionArchive'
|
||||
|
||||
export const ArchiveBlock: React.FC<
|
||||
ArchiveBlockProps & {
|
||||
id?: string
|
||||
}
|
||||
> = async (props) => {
|
||||
const { id, categories, introContent, limit: limitFromProps, populateBy, selectedDocs } = props
|
||||
|
||||
const limit = limitFromProps || 3
|
||||
|
||||
let posts: Post[] = []
|
||||
|
||||
if (populateBy === 'collection') {
|
||||
const payload = await getPayload({ config: configPromise })
|
||||
|
||||
const flattenedCategories = categories?.map((category) => {
|
||||
if (typeof category === 'object') return category.id
|
||||
else return category
|
||||
})
|
||||
|
||||
const fetchedPosts = await payload.find({
|
||||
collection: 'posts',
|
||||
depth: 1,
|
||||
limit,
|
||||
...(flattenedCategories && flattenedCategories.length > 0
|
||||
? {
|
||||
where: {
|
||||
categories: {
|
||||
in: flattenedCategories,
|
||||
},
|
||||
},
|
||||
}
|
||||
: {}),
|
||||
})
|
||||
|
||||
posts = fetchedPosts.docs
|
||||
} else {
|
||||
if (selectedDocs?.length) {
|
||||
const filteredSelectedPosts = selectedDocs.map((post) => {
|
||||
if (typeof post.value === 'object') return post.value
|
||||
}) as Post[]
|
||||
|
||||
posts = filteredSelectedPosts
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="my-16" id={`block-${id}`}>
|
||||
{introContent && (
|
||||
<div className="container mb-16">
|
||||
<RichText className="ms-0 max-w-[48rem]" data={introContent} enableGutter={false} />
|
||||
</div>
|
||||
)}
|
||||
<CollectionArchive posts={posts} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
94
apps/backend/src/blocks/ArchiveBlock/config.ts
Normal file
94
apps/backend/src/blocks/ArchiveBlock/config.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import type { Block } from 'payload'
|
||||
|
||||
import {
|
||||
FixedToolbarFeature,
|
||||
HeadingFeature,
|
||||
InlineToolbarFeature,
|
||||
lexicalEditor,
|
||||
} from '@payloadcms/richtext-lexical'
|
||||
|
||||
export const Archive: Block = {
|
||||
slug: 'archive',
|
||||
interfaceName: 'ArchiveBlock',
|
||||
fields: [
|
||||
{
|
||||
name: 'introContent',
|
||||
type: 'richText',
|
||||
editor: lexicalEditor({
|
||||
features: ({ rootFeatures }) => {
|
||||
return [
|
||||
...rootFeatures,
|
||||
HeadingFeature({ enabledHeadingSizes: ['h1', 'h2', 'h3', 'h4'] }),
|
||||
FixedToolbarFeature(),
|
||||
InlineToolbarFeature(),
|
||||
]
|
||||
},
|
||||
}),
|
||||
label: 'Intro Content',
|
||||
},
|
||||
{
|
||||
name: 'populateBy',
|
||||
type: 'select',
|
||||
defaultValue: 'collection',
|
||||
options: [
|
||||
{
|
||||
label: 'Collection',
|
||||
value: 'collection',
|
||||
},
|
||||
{
|
||||
label: 'Individual Selection',
|
||||
value: 'selection',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'relationTo',
|
||||
type: 'select',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData.populateBy === 'collection',
|
||||
},
|
||||
defaultValue: 'posts',
|
||||
label: 'Collections To Show',
|
||||
options: [
|
||||
{
|
||||
label: 'Posts',
|
||||
value: 'posts',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'categories',
|
||||
type: 'relationship',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData.populateBy === 'collection',
|
||||
},
|
||||
hasMany: true,
|
||||
label: 'Categories To Show',
|
||||
relationTo: 'categories',
|
||||
},
|
||||
{
|
||||
name: 'limit',
|
||||
type: 'number',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData.populateBy === 'collection',
|
||||
step: 1,
|
||||
},
|
||||
defaultValue: 10,
|
||||
label: 'Limit',
|
||||
},
|
||||
{
|
||||
name: 'selectedDocs',
|
||||
type: 'relationship',
|
||||
admin: {
|
||||
condition: (_, siblingData) => siblingData.populateBy === 'selection',
|
||||
},
|
||||
hasMany: true,
|
||||
label: 'Selection',
|
||||
relationTo: ['posts'],
|
||||
},
|
||||
],
|
||||
labels: {
|
||||
plural: 'Archives',
|
||||
singular: 'Archive',
|
||||
},
|
||||
}
|
||||
26
apps/backend/src/blocks/Banner/Component.tsx
Normal file
26
apps/backend/src/blocks/Banner/Component.tsx
Normal file
@@ -0,0 +1,26 @@
|
||||
import type { BannerBlock as BannerBlockProps } from 'src/payload-types'
|
||||
|
||||
import { cn } from '@/utilities/ui'
|
||||
import React from 'react'
|
||||
import RichText from '@/components/RichText'
|
||||
|
||||
type Props = {
|
||||
className?: string
|
||||
} & BannerBlockProps
|
||||
|
||||
export const BannerBlock: React.FC<Props> = ({ className, content, style }) => {
|
||||
return (
|
||||
<div className={cn('mx-auto my-8 w-full', className)}>
|
||||
<div
|
||||
className={cn('border py-3 px-6 flex items-center rounded', {
|
||||
'border-border bg-card': style === 'info',
|
||||
'border-error bg-error/30': style === 'error',
|
||||
'border-success bg-success/30': style === 'success',
|
||||
'border-warning bg-warning/30': style === 'warning',
|
||||
})}
|
||||
>
|
||||
<RichText data={content} enableGutter={false} enableProse={false} />
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
37
apps/backend/src/blocks/Banner/config.ts
Normal file
37
apps/backend/src/blocks/Banner/config.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import type { Block } from 'payload'
|
||||
|
||||
import {
|
||||
FixedToolbarFeature,
|
||||
InlineToolbarFeature,
|
||||
lexicalEditor,
|
||||
} from '@payloadcms/richtext-lexical'
|
||||
|
||||
export const Banner: Block = {
|
||||
slug: 'banner',
|
||||
fields: [
|
||||
{
|
||||
name: 'style',
|
||||
type: 'select',
|
||||
defaultValue: 'info',
|
||||
options: [
|
||||
{ label: 'Info', value: 'info' },
|
||||
{ label: 'Warning', value: 'warning' },
|
||||
{ label: 'Error', value: 'error' },
|
||||
{ label: 'Success', value: 'success' },
|
||||
],
|
||||
required: true,
|
||||
},
|
||||
{
|
||||
name: 'content',
|
||||
type: 'richText',
|
||||
editor: lexicalEditor({
|
||||
features: ({ rootFeatures }) => {
|
||||
return [...rootFeatures, FixedToolbarFeature(), InlineToolbarFeature()]
|
||||
},
|
||||
}),
|
||||
label: false,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
interfaceName: 'BannerBlock',
|
||||
}
|
||||
23
apps/backend/src/blocks/CallToAction/Component.tsx
Normal file
23
apps/backend/src/blocks/CallToAction/Component.tsx
Normal file
@@ -0,0 +1,23 @@
|
||||
import React from 'react'
|
||||
|
||||
import type { CallToActionBlock as CTABlockProps } from '@/payload-types'
|
||||
|
||||
import RichText from '@/components/RichText'
|
||||
import { CMSLink } from '@/components/Link'
|
||||
|
||||
export const CallToActionBlock: React.FC<CTABlockProps> = ({ links, richText }) => {
|
||||
return (
|
||||
<div className="container">
|
||||
<div className="bg-card rounded border-border border p-4 flex flex-col gap-8 md:flex-row md:justify-between md:items-center">
|
||||
<div className="max-w-[48rem] flex items-center">
|
||||
{richText && <RichText className="mb-0" data={richText} enableGutter={false} />}
|
||||
</div>
|
||||
<div className="flex flex-col gap-8">
|
||||
{(links || []).map(({ link }, i) => {
|
||||
return <CMSLink key={i} size="lg" {...link} />
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
42
apps/backend/src/blocks/CallToAction/config.ts
Normal file
42
apps/backend/src/blocks/CallToAction/config.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { Block } from 'payload'
|
||||
|
||||
import {
|
||||
FixedToolbarFeature,
|
||||
HeadingFeature,
|
||||
InlineToolbarFeature,
|
||||
lexicalEditor,
|
||||
} from '@payloadcms/richtext-lexical'
|
||||
|
||||
import { linkGroup } from '../../fields/linkGroup'
|
||||
|
||||
export const CallToAction: Block = {
|
||||
slug: 'cta',
|
||||
interfaceName: 'CallToActionBlock',
|
||||
fields: [
|
||||
{
|
||||
name: 'richText',
|
||||
type: 'richText',
|
||||
editor: lexicalEditor({
|
||||
features: ({ rootFeatures }) => {
|
||||
return [
|
||||
...rootFeatures,
|
||||
HeadingFeature({ enabledHeadingSizes: ['h1', 'h2', 'h3', 'h4'] }),
|
||||
FixedToolbarFeature(),
|
||||
InlineToolbarFeature(),
|
||||
]
|
||||
},
|
||||
}),
|
||||
label: false,
|
||||
},
|
||||
linkGroup({
|
||||
appearances: ['default', 'outline'],
|
||||
overrides: {
|
||||
maxRows: 2,
|
||||
},
|
||||
}),
|
||||
],
|
||||
labels: {
|
||||
plural: 'Calls to Action',
|
||||
singular: 'Call to Action',
|
||||
},
|
||||
}
|
||||
33
apps/backend/src/blocks/Code/Component.client.tsx
Normal file
33
apps/backend/src/blocks/Code/Component.client.tsx
Normal file
@@ -0,0 +1,33 @@
|
||||
'use client'
|
||||
import { Highlight, themes } from 'prism-react-renderer'
|
||||
import React from 'react'
|
||||
import { CopyButton } from './CopyButton'
|
||||
|
||||
type Props = {
|
||||
code: string
|
||||
language?: string
|
||||
}
|
||||
|
||||
export const Code: React.FC<Props> = ({ code, language = '' }) => {
|
||||
if (!code) return null
|
||||
|
||||
return (
|
||||
<Highlight code={code} language={language} theme={themes.vsDark}>
|
||||
{({ getLineProps, getTokenProps, tokens }) => (
|
||||
<pre className="bg-black p-4 border text-xs border-border rounded overflow-x-auto">
|
||||
{tokens.map((line, i) => (
|
||||
<div key={i} {...getLineProps({ className: 'table-row', line })}>
|
||||
<span className="table-cell select-none text-right text-white/25">{i + 1}</span>
|
||||
<span className="table-cell pl-4">
|
||||
{line.map((token, key) => (
|
||||
<span key={key} {...getTokenProps({ token })} />
|
||||
))}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
<CopyButton code={code} />
|
||||
</pre>
|
||||
)}
|
||||
</Highlight>
|
||||
)
|
||||
}
|
||||
21
apps/backend/src/blocks/Code/Component.tsx
Normal file
21
apps/backend/src/blocks/Code/Component.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
import React from 'react'
|
||||
|
||||
import { Code } from './Component.client'
|
||||
|
||||
export type CodeBlockProps = {
|
||||
code: string
|
||||
language?: string
|
||||
blockType: 'code'
|
||||
}
|
||||
|
||||
type Props = CodeBlockProps & {
|
||||
className?: string
|
||||
}
|
||||
|
||||
export const CodeBlock: React.FC<Props> = ({ className, code, language }) => {
|
||||
return (
|
||||
<div className={[className, 'not-prose'].filter(Boolean).join(' ')}>
|
||||
<Code code={code} language={language} />
|
||||
</div>
|
||||
)
|
||||
}
|
||||
33
apps/backend/src/blocks/Code/CopyButton.tsx
Normal file
33
apps/backend/src/blocks/Code/CopyButton.tsx
Normal file
@@ -0,0 +1,33 @@
|
||||
'use client'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import { CopyIcon } from '@payloadcms/ui/icons/Copy'
|
||||
import { useState } from 'react'
|
||||
|
||||
export function CopyButton({ code }: { code: string }) {
|
||||
const [text, setText] = useState('Copy')
|
||||
|
||||
function updateCopyStatus() {
|
||||
if (text === 'Copy') {
|
||||
setText(() => 'Copied!')
|
||||
setTimeout(() => {
|
||||
setText(() => 'Copy')
|
||||
}, 1000)
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex justify-end align-middle">
|
||||
<Button
|
||||
className="flex gap-1"
|
||||
variant={'secondary'}
|
||||
onClick={async () => {
|
||||
await navigator.clipboard.writeText(code)
|
||||
updateCopyStatus()
|
||||
}}
|
||||
>
|
||||
<p>{text}</p>
|
||||
<CopyIcon />
|
||||
</Button>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
33
apps/backend/src/blocks/Code/config.ts
Normal file
33
apps/backend/src/blocks/Code/config.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import type { Block } from 'payload'
|
||||
|
||||
export const Code: Block = {
|
||||
slug: 'code',
|
||||
interfaceName: 'CodeBlock',
|
||||
fields: [
|
||||
{
|
||||
name: 'language',
|
||||
type: 'select',
|
||||
defaultValue: 'typescript',
|
||||
options: [
|
||||
{
|
||||
label: 'Typescript',
|
||||
value: 'typescript',
|
||||
},
|
||||
{
|
||||
label: 'Javascript',
|
||||
value: 'javascript',
|
||||
},
|
||||
{
|
||||
label: 'CSS',
|
||||
value: 'css',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'code',
|
||||
type: 'code',
|
||||
label: false,
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
}
|
||||
43
apps/backend/src/blocks/Content/Component.tsx
Normal file
43
apps/backend/src/blocks/Content/Component.tsx
Normal file
@@ -0,0 +1,43 @@
|
||||
import { cn } from '@/utilities/ui'
|
||||
import React from 'react'
|
||||
import RichText from '@/components/RichText'
|
||||
|
||||
import type { ContentBlock as ContentBlockProps } from '@/payload-types'
|
||||
|
||||
import { CMSLink } from '../../components/Link'
|
||||
|
||||
export const ContentBlock: React.FC<ContentBlockProps> = (props) => {
|
||||
const { columns } = props
|
||||
|
||||
const colsSpanClasses = {
|
||||
full: '12',
|
||||
half: '6',
|
||||
oneThird: '4',
|
||||
twoThirds: '8',
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="container my-16">
|
||||
<div className="grid grid-cols-4 lg:grid-cols-12 gap-y-8 gap-x-16">
|
||||
{columns &&
|
||||
columns.length > 0 &&
|
||||
columns.map((col, index) => {
|
||||
const { enableLink, link, richText, size } = col
|
||||
|
||||
return (
|
||||
<div
|
||||
className={cn(`col-span-4 lg:col-span-${colsSpanClasses[size!]}`, {
|
||||
'md:col-span-2': size !== 'full',
|
||||
})}
|
||||
key={index}
|
||||
>
|
||||
{richText && <RichText data={richText} enableGutter={false} />}
|
||||
|
||||
{enableLink && <CMSLink {...link} />}
|
||||
</div>
|
||||
)
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
79
apps/backend/src/blocks/Content/config.ts
Normal file
79
apps/backend/src/blocks/Content/config.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import type { Block, Field } from 'payload'
|
||||
|
||||
import {
|
||||
FixedToolbarFeature,
|
||||
HeadingFeature,
|
||||
InlineToolbarFeature,
|
||||
lexicalEditor,
|
||||
} from '@payloadcms/richtext-lexical'
|
||||
|
||||
import { link } from '@/fields/link'
|
||||
|
||||
const columnFields: Field[] = [
|
||||
{
|
||||
name: 'size',
|
||||
type: 'select',
|
||||
defaultValue: 'oneThird',
|
||||
options: [
|
||||
{
|
||||
label: 'One Third',
|
||||
value: 'oneThird',
|
||||
},
|
||||
{
|
||||
label: 'Half',
|
||||
value: 'half',
|
||||
},
|
||||
{
|
||||
label: 'Two Thirds',
|
||||
value: 'twoThirds',
|
||||
},
|
||||
{
|
||||
label: 'Full',
|
||||
value: 'full',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'richText',
|
||||
type: 'richText',
|
||||
editor: lexicalEditor({
|
||||
features: ({ rootFeatures }) => {
|
||||
return [
|
||||
...rootFeatures,
|
||||
HeadingFeature({ enabledHeadingSizes: ['h2', 'h3', 'h4'] }),
|
||||
FixedToolbarFeature(),
|
||||
InlineToolbarFeature(),
|
||||
]
|
||||
},
|
||||
}),
|
||||
label: false,
|
||||
},
|
||||
{
|
||||
name: 'enableLink',
|
||||
type: 'checkbox',
|
||||
},
|
||||
link({
|
||||
overrides: {
|
||||
admin: {
|
||||
condition: (_data, siblingData) => {
|
||||
return Boolean(siblingData?.enableLink)
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
]
|
||||
|
||||
export const Content: Block = {
|
||||
slug: 'content',
|
||||
interfaceName: 'ContentBlock',
|
||||
fields: [
|
||||
{
|
||||
name: 'columns',
|
||||
type: 'array',
|
||||
admin: {
|
||||
initCollapsed: true,
|
||||
},
|
||||
fields: columnFields,
|
||||
},
|
||||
],
|
||||
}
|
||||
45
apps/backend/src/blocks/Form/Checkbox/index.tsx
Normal file
45
apps/backend/src/blocks/Form/Checkbox/index.tsx
Normal file
@@ -0,0 +1,45 @@
|
||||
import type { CheckboxField } from '@payloadcms/plugin-form-builder/types'
|
||||
import type { FieldErrorsImpl, FieldValues, UseFormRegister } from 'react-hook-form'
|
||||
|
||||
import { useFormContext } from 'react-hook-form'
|
||||
|
||||
import { Checkbox as CheckboxUi } from '@/components/ui/checkbox'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import React from 'react'
|
||||
|
||||
import { Error } from '../Error'
|
||||
import { Width } from '../Width'
|
||||
|
||||
export const Checkbox: React.FC<
|
||||
CheckboxField & {
|
||||
errors: Partial<FieldErrorsImpl>
|
||||
register: UseFormRegister<FieldValues>
|
||||
}
|
||||
> = ({ name, defaultValue, errors, label, register, required, width }) => {
|
||||
const props = register(name, { required: required })
|
||||
const { setValue } = useFormContext()
|
||||
|
||||
return (
|
||||
<Width width={width}>
|
||||
<div className="flex items-center gap-2">
|
||||
<CheckboxUi
|
||||
defaultChecked={defaultValue}
|
||||
id={name}
|
||||
{...props}
|
||||
onCheckedChange={(checked) => {
|
||||
setValue(props.name, checked)
|
||||
}}
|
||||
/>
|
||||
<Label htmlFor={name}>
|
||||
{required && (
|
||||
<span className="required">
|
||||
* <span className="sr-only">(required)</span>
|
||||
</span>
|
||||
)}
|
||||
{label}
|
||||
</Label>
|
||||
</div>
|
||||
{errors[name] && <Error name={name} />}
|
||||
</Width>
|
||||
)
|
||||
}
|
||||
163
apps/backend/src/blocks/Form/Component.tsx
Normal file
163
apps/backend/src/blocks/Form/Component.tsx
Normal file
@@ -0,0 +1,163 @@
|
||||
'use client'
|
||||
import type { FormFieldBlock, Form as FormType } from '@payloadcms/plugin-form-builder/types'
|
||||
|
||||
import { useRouter } from 'next/navigation'
|
||||
import React, { useCallback, useState } from 'react'
|
||||
import { useForm, FormProvider } from 'react-hook-form'
|
||||
import RichText from '@/components/RichText'
|
||||
import { Button } from '@/components/ui/button'
|
||||
import type { DefaultTypedEditorState } from '@payloadcms/richtext-lexical'
|
||||
|
||||
import { fields } from './fields'
|
||||
import { getClientSideURL } from '@/utilities/getURL'
|
||||
|
||||
export type FormBlockType = {
|
||||
blockName?: string
|
||||
blockType?: 'formBlock'
|
||||
enableIntro: boolean
|
||||
form: FormType
|
||||
introContent?: DefaultTypedEditorState
|
||||
}
|
||||
|
||||
export const FormBlock: React.FC<
|
||||
{
|
||||
id?: string
|
||||
} & FormBlockType
|
||||
> = (props) => {
|
||||
const {
|
||||
enableIntro,
|
||||
form: formFromProps,
|
||||
form: { id: formID, confirmationMessage, confirmationType, redirect, submitButtonLabel } = {},
|
||||
introContent,
|
||||
} = props
|
||||
|
||||
const formMethods = useForm({
|
||||
defaultValues: formFromProps.fields,
|
||||
})
|
||||
const {
|
||||
control,
|
||||
formState: { errors },
|
||||
handleSubmit,
|
||||
register,
|
||||
} = formMethods
|
||||
|
||||
const [isLoading, setIsLoading] = useState(false)
|
||||
const [hasSubmitted, setHasSubmitted] = useState<boolean>()
|
||||
const [error, setError] = useState<{ message: string; status?: string } | undefined>()
|
||||
const router = useRouter()
|
||||
|
||||
const onSubmit = useCallback(
|
||||
(data: FormFieldBlock[]) => {
|
||||
let loadingTimerID: ReturnType<typeof setTimeout>
|
||||
const submitForm = async () => {
|
||||
setError(undefined)
|
||||
|
||||
const dataToSend = Object.entries(data).map(([name, value]) => ({
|
||||
field: name,
|
||||
value,
|
||||
}))
|
||||
|
||||
// delay loading indicator by 1s
|
||||
loadingTimerID = setTimeout(() => {
|
||||
setIsLoading(true)
|
||||
}, 1000)
|
||||
|
||||
try {
|
||||
const req = await fetch(`${getClientSideURL()}/api/form-submissions`, {
|
||||
body: JSON.stringify({
|
||||
form: formID,
|
||||
submissionData: dataToSend,
|
||||
}),
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
method: 'POST',
|
||||
})
|
||||
|
||||
const res = await req.json()
|
||||
|
||||
clearTimeout(loadingTimerID)
|
||||
|
||||
if (req.status >= 400) {
|
||||
setIsLoading(false)
|
||||
|
||||
setError({
|
||||
message: res.errors?.[0]?.message || 'Internal Server Error',
|
||||
status: res.status,
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
setIsLoading(false)
|
||||
setHasSubmitted(true)
|
||||
|
||||
if (confirmationType === 'redirect' && redirect) {
|
||||
const { url } = redirect
|
||||
|
||||
const redirectUrl = url
|
||||
|
||||
if (redirectUrl) router.push(redirectUrl)
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(err)
|
||||
setIsLoading(false)
|
||||
setError({
|
||||
message: 'Something went wrong.',
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
void submitForm()
|
||||
},
|
||||
[router, formID, redirect, confirmationType],
|
||||
)
|
||||
|
||||
return (
|
||||
<div className="container lg:max-w-[48rem]">
|
||||
{enableIntro && introContent && !hasSubmitted && (
|
||||
<RichText className="mb-8 lg:mb-12" data={introContent} enableGutter={false} />
|
||||
)}
|
||||
<div className="p-4 lg:p-6 border border-border rounded-[0.8rem]">
|
||||
<FormProvider {...formMethods}>
|
||||
{!isLoading && hasSubmitted && confirmationType === 'message' && (
|
||||
<RichText data={confirmationMessage} />
|
||||
)}
|
||||
{isLoading && !hasSubmitted && <p>Loading, please wait...</p>}
|
||||
{error && <div>{`${error.status || '500'}: ${error.message || ''}`}</div>}
|
||||
{!hasSubmitted && (
|
||||
<form id={formID} onSubmit={handleSubmit(onSubmit)}>
|
||||
<div className="mb-4 last:mb-0">
|
||||
{formFromProps &&
|
||||
formFromProps.fields &&
|
||||
formFromProps.fields?.map((field, index) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const Field: React.FC<any> = fields?.[field.blockType as keyof typeof fields]
|
||||
if (Field) {
|
||||
return (
|
||||
<div className="mb-6 last:mb-0" key={index}>
|
||||
<Field
|
||||
form={formFromProps}
|
||||
{...field}
|
||||
{...formMethods}
|
||||
control={control}
|
||||
errors={errors}
|
||||
register={register}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
return null
|
||||
})}
|
||||
</div>
|
||||
|
||||
<Button form={formID} type="submit" variant="default">
|
||||
{submitButtonLabel}
|
||||
</Button>
|
||||
</form>
|
||||
)}
|
||||
</FormProvider>
|
||||
</div>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
65
apps/backend/src/blocks/Form/Country/index.tsx
Normal file
65
apps/backend/src/blocks/Form/Country/index.tsx
Normal file
@@ -0,0 +1,65 @@
|
||||
import type { CountryField } from '@payloadcms/plugin-form-builder/types'
|
||||
import type { Control, FieldErrorsImpl } from 'react-hook-form'
|
||||
|
||||
import { Label } from '@/components/ui/label'
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from '@/components/ui/select'
|
||||
import React from 'react'
|
||||
import { Controller } from 'react-hook-form'
|
||||
|
||||
import { Error } from '../Error'
|
||||
import { Width } from '../Width'
|
||||
import { countryOptions } from './options'
|
||||
|
||||
export const Country: React.FC<
|
||||
CountryField & {
|
||||
control: Control
|
||||
errors: Partial<FieldErrorsImpl>
|
||||
}
|
||||
> = ({ name, control, errors, label, required, width }) => {
|
||||
return (
|
||||
<Width width={width}>
|
||||
<Label className="" htmlFor={name}>
|
||||
{label}
|
||||
|
||||
{required && (
|
||||
<span className="required">
|
||||
* <span className="sr-only">(required)</span>
|
||||
</span>
|
||||
)}
|
||||
</Label>
|
||||
<Controller
|
||||
control={control}
|
||||
defaultValue=""
|
||||
name={name}
|
||||
render={({ field: { onChange, value } }) => {
|
||||
const controlledValue = countryOptions.find((t) => t.value === value)
|
||||
|
||||
return (
|
||||
<Select onValueChange={(val) => onChange(val)} value={controlledValue?.value}>
|
||||
<SelectTrigger className="w-full" id={name}>
|
||||
<SelectValue placeholder={label} />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
{countryOptions.map(({ label, value }) => {
|
||||
return (
|
||||
<SelectItem key={value} value={value}>
|
||||
{label}
|
||||
</SelectItem>
|
||||
)
|
||||
})}
|
||||
</SelectContent>
|
||||
</Select>
|
||||
)
|
||||
}}
|
||||
rules={{ required }}
|
||||
/>
|
||||
{errors[name] && <Error name={name} />}
|
||||
</Width>
|
||||
)
|
||||
}
|
||||
982
apps/backend/src/blocks/Form/Country/options.ts
Normal file
982
apps/backend/src/blocks/Form/Country/options.ts
Normal file
@@ -0,0 +1,982 @@
|
||||
export const countryOptions = [
|
||||
{
|
||||
label: 'Afghanistan',
|
||||
value: 'AF',
|
||||
},
|
||||
{
|
||||
label: 'Åland Islands',
|
||||
value: 'AX',
|
||||
},
|
||||
{
|
||||
label: 'Albania',
|
||||
value: 'AL',
|
||||
},
|
||||
{
|
||||
label: 'Algeria',
|
||||
value: 'DZ',
|
||||
},
|
||||
{
|
||||
label: 'American Samoa',
|
||||
value: 'AS',
|
||||
},
|
||||
{
|
||||
label: 'Andorra',
|
||||
value: 'AD',
|
||||
},
|
||||
{
|
||||
label: 'Angola',
|
||||
value: 'AO',
|
||||
},
|
||||
{
|
||||
label: 'Anguilla',
|
||||
value: 'AI',
|
||||
},
|
||||
{
|
||||
label: 'Antarctica',
|
||||
value: 'AQ',
|
||||
},
|
||||
{
|
||||
label: 'Antigua and Barbuda',
|
||||
value: 'AG',
|
||||
},
|
||||
{
|
||||
label: 'Argentina',
|
||||
value: 'AR',
|
||||
},
|
||||
{
|
||||
label: 'Armenia',
|
||||
value: 'AM',
|
||||
},
|
||||
{
|
||||
label: 'Aruba',
|
||||
value: 'AW',
|
||||
},
|
||||
{
|
||||
label: 'Australia',
|
||||
value: 'AU',
|
||||
},
|
||||
{
|
||||
label: 'Austria',
|
||||
value: 'AT',
|
||||
},
|
||||
{
|
||||
label: 'Azerbaijan',
|
||||
value: 'AZ',
|
||||
},
|
||||
{
|
||||
label: 'Bahamas',
|
||||
value: 'BS',
|
||||
},
|
||||
{
|
||||
label: 'Bahrain',
|
||||
value: 'BH',
|
||||
},
|
||||
{
|
||||
label: 'Bangladesh',
|
||||
value: 'BD',
|
||||
},
|
||||
{
|
||||
label: 'Barbados',
|
||||
value: 'BB',
|
||||
},
|
||||
{
|
||||
label: 'Belarus',
|
||||
value: 'BY',
|
||||
},
|
||||
{
|
||||
label: 'Belgium',
|
||||
value: 'BE',
|
||||
},
|
||||
{
|
||||
label: 'Belize',
|
||||
value: 'BZ',
|
||||
},
|
||||
{
|
||||
label: 'Benin',
|
||||
value: 'BJ',
|
||||
},
|
||||
{
|
||||
label: 'Bermuda',
|
||||
value: 'BM',
|
||||
},
|
||||
{
|
||||
label: 'Bhutan',
|
||||
value: 'BT',
|
||||
},
|
||||
{
|
||||
label: 'Bolivia',
|
||||
value: 'BO',
|
||||
},
|
||||
{
|
||||
label: 'Bosnia and Herzegovina',
|
||||
value: 'BA',
|
||||
},
|
||||
{
|
||||
label: 'Botswana',
|
||||
value: 'BW',
|
||||
},
|
||||
{
|
||||
label: 'Bouvet Island',
|
||||
value: 'BV',
|
||||
},
|
||||
{
|
||||
label: 'Brazil',
|
||||
value: 'BR',
|
||||
},
|
||||
{
|
||||
label: 'British Indian Ocean Territory',
|
||||
value: 'IO',
|
||||
},
|
||||
{
|
||||
label: 'Brunei Darussalam',
|
||||
value: 'BN',
|
||||
},
|
||||
{
|
||||
label: 'Bulgaria',
|
||||
value: 'BG',
|
||||
},
|
||||
{
|
||||
label: 'Burkina Faso',
|
||||
value: 'BF',
|
||||
},
|
||||
{
|
||||
label: 'Burundi',
|
||||
value: 'BI',
|
||||
},
|
||||
{
|
||||
label: 'Cambodia',
|
||||
value: 'KH',
|
||||
},
|
||||
{
|
||||
label: 'Cameroon',
|
||||
value: 'CM',
|
||||
},
|
||||
{
|
||||
label: 'Canada',
|
||||
value: 'CA',
|
||||
},
|
||||
{
|
||||
label: 'Cape Verde',
|
||||
value: 'CV',
|
||||
},
|
||||
{
|
||||
label: 'Cayman Islands',
|
||||
value: 'KY',
|
||||
},
|
||||
{
|
||||
label: 'Central African Republic',
|
||||
value: 'CF',
|
||||
},
|
||||
{
|
||||
label: 'Chad',
|
||||
value: 'TD',
|
||||
},
|
||||
{
|
||||
label: 'Chile',
|
||||
value: 'CL',
|
||||
},
|
||||
{
|
||||
label: 'China',
|
||||
value: 'CN',
|
||||
},
|
||||
{
|
||||
label: 'Christmas Island',
|
||||
value: 'CX',
|
||||
},
|
||||
{
|
||||
label: 'Cocos (Keeling) Islands',
|
||||
value: 'CC',
|
||||
},
|
||||
{
|
||||
label: 'Colombia',
|
||||
value: 'CO',
|
||||
},
|
||||
{
|
||||
label: 'Comoros',
|
||||
value: 'KM',
|
||||
},
|
||||
{
|
||||
label: 'Congo',
|
||||
value: 'CG',
|
||||
},
|
||||
{
|
||||
label: 'Congo, The Democratic Republic of the',
|
||||
value: 'CD',
|
||||
},
|
||||
{
|
||||
label: 'Cook Islands',
|
||||
value: 'CK',
|
||||
},
|
||||
{
|
||||
label: 'Costa Rica',
|
||||
value: 'CR',
|
||||
},
|
||||
{
|
||||
label: "Cote D'Ivoire",
|
||||
value: 'CI',
|
||||
},
|
||||
{
|
||||
label: 'Croatia',
|
||||
value: 'HR',
|
||||
},
|
||||
{
|
||||
label: 'Cuba',
|
||||
value: 'CU',
|
||||
},
|
||||
{
|
||||
label: 'Cyprus',
|
||||
value: 'CY',
|
||||
},
|
||||
{
|
||||
label: 'Czech Republic',
|
||||
value: 'CZ',
|
||||
},
|
||||
{
|
||||
label: 'Denmark',
|
||||
value: 'DK',
|
||||
},
|
||||
{
|
||||
label: 'Djibouti',
|
||||
value: 'DJ',
|
||||
},
|
||||
{
|
||||
label: 'Dominica',
|
||||
value: 'DM',
|
||||
},
|
||||
{
|
||||
label: 'Dominican Republic',
|
||||
value: 'DO',
|
||||
},
|
||||
{
|
||||
label: 'Ecuador',
|
||||
value: 'EC',
|
||||
},
|
||||
{
|
||||
label: 'Egypt',
|
||||
value: 'EG',
|
||||
},
|
||||
{
|
||||
label: 'El Salvador',
|
||||
value: 'SV',
|
||||
},
|
||||
{
|
||||
label: 'Equatorial Guinea',
|
||||
value: 'GQ',
|
||||
},
|
||||
{
|
||||
label: 'Eritrea',
|
||||
value: 'ER',
|
||||
},
|
||||
{
|
||||
label: 'Estonia',
|
||||
value: 'EE',
|
||||
},
|
||||
{
|
||||
label: 'Ethiopia',
|
||||
value: 'ET',
|
||||
},
|
||||
{
|
||||
label: 'Falkland Islands (Malvinas)',
|
||||
value: 'FK',
|
||||
},
|
||||
{
|
||||
label: 'Faroe Islands',
|
||||
value: 'FO',
|
||||
},
|
||||
{
|
||||
label: 'Fiji',
|
||||
value: 'FJ',
|
||||
},
|
||||
{
|
||||
label: 'Finland',
|
||||
value: 'FI',
|
||||
},
|
||||
{
|
||||
label: 'France',
|
||||
value: 'FR',
|
||||
},
|
||||
{
|
||||
label: 'French Guiana',
|
||||
value: 'GF',
|
||||
},
|
||||
{
|
||||
label: 'French Polynesia',
|
||||
value: 'PF',
|
||||
},
|
||||
{
|
||||
label: 'French Southern Territories',
|
||||
value: 'TF',
|
||||
},
|
||||
{
|
||||
label: 'Gabon',
|
||||
value: 'GA',
|
||||
},
|
||||
{
|
||||
label: 'Gambia',
|
||||
value: 'GM',
|
||||
},
|
||||
{
|
||||
label: 'Georgia',
|
||||
value: 'GE',
|
||||
},
|
||||
{
|
||||
label: 'Germany',
|
||||
value: 'DE',
|
||||
},
|
||||
{
|
||||
label: 'Ghana',
|
||||
value: 'GH',
|
||||
},
|
||||
{
|
||||
label: 'Gibraltar',
|
||||
value: 'GI',
|
||||
},
|
||||
{
|
||||
label: 'Greece',
|
||||
value: 'GR',
|
||||
},
|
||||
{
|
||||
label: 'Greenland',
|
||||
value: 'GL',
|
||||
},
|
||||
{
|
||||
label: 'Grenada',
|
||||
value: 'GD',
|
||||
},
|
||||
{
|
||||
label: 'Guadeloupe',
|
||||
value: 'GP',
|
||||
},
|
||||
{
|
||||
label: 'Guam',
|
||||
value: 'GU',
|
||||
},
|
||||
{
|
||||
label: 'Guatemala',
|
||||
value: 'GT',
|
||||
},
|
||||
{
|
||||
label: 'Guernsey',
|
||||
value: 'GG',
|
||||
},
|
||||
{
|
||||
label: 'Guinea',
|
||||
value: 'GN',
|
||||
},
|
||||
{
|
||||
label: 'Guinea-Bissau',
|
||||
value: 'GW',
|
||||
},
|
||||
{
|
||||
label: 'Guyana',
|
||||
value: 'GY',
|
||||
},
|
||||
{
|
||||
label: 'Haiti',
|
||||
value: 'HT',
|
||||
},
|
||||
{
|
||||
label: 'Heard Island and Mcdonald Islands',
|
||||
value: 'HM',
|
||||
},
|
||||
{
|
||||
label: 'Holy See (Vatican City State)',
|
||||
value: 'VA',
|
||||
},
|
||||
{
|
||||
label: 'Honduras',
|
||||
value: 'HN',
|
||||
},
|
||||
{
|
||||
label: 'Hong Kong',
|
||||
value: 'HK',
|
||||
},
|
||||
{
|
||||
label: 'Hungary',
|
||||
value: 'HU',
|
||||
},
|
||||
{
|
||||
label: 'Iceland',
|
||||
value: 'IS',
|
||||
},
|
||||
{
|
||||
label: 'India',
|
||||
value: 'IN',
|
||||
},
|
||||
{
|
||||
label: 'Indonesia',
|
||||
value: 'ID',
|
||||
},
|
||||
{
|
||||
label: 'Iran, Islamic Republic Of',
|
||||
value: 'IR',
|
||||
},
|
||||
{
|
||||
label: 'Iraq',
|
||||
value: 'IQ',
|
||||
},
|
||||
{
|
||||
label: 'Ireland',
|
||||
value: 'IE',
|
||||
},
|
||||
{
|
||||
label: 'Isle of Man',
|
||||
value: 'IM',
|
||||
},
|
||||
{
|
||||
label: 'Israel',
|
||||
value: 'IL',
|
||||
},
|
||||
{
|
||||
label: 'Italy',
|
||||
value: 'IT',
|
||||
},
|
||||
{
|
||||
label: 'Jamaica',
|
||||
value: 'JM',
|
||||
},
|
||||
{
|
||||
label: 'Japan',
|
||||
value: 'JP',
|
||||
},
|
||||
{
|
||||
label: 'Jersey',
|
||||
value: 'JE',
|
||||
},
|
||||
{
|
||||
label: 'Jordan',
|
||||
value: 'JO',
|
||||
},
|
||||
{
|
||||
label: 'Kazakhstan',
|
||||
value: 'KZ',
|
||||
},
|
||||
{
|
||||
label: 'Kenya',
|
||||
value: 'KE',
|
||||
},
|
||||
{
|
||||
label: 'Kiribati',
|
||||
value: 'KI',
|
||||
},
|
||||
{
|
||||
label: "Democratic People's Republic of Korea",
|
||||
value: 'KP',
|
||||
},
|
||||
{
|
||||
label: 'Korea, Republic of',
|
||||
value: 'KR',
|
||||
},
|
||||
{
|
||||
label: 'Kosovo',
|
||||
value: 'XK',
|
||||
},
|
||||
{
|
||||
label: 'Kuwait',
|
||||
value: 'KW',
|
||||
},
|
||||
{
|
||||
label: 'Kyrgyzstan',
|
||||
value: 'KG',
|
||||
},
|
||||
{
|
||||
label: "Lao People's Democratic Republic",
|
||||
value: 'LA',
|
||||
},
|
||||
{
|
||||
label: 'Latvia',
|
||||
value: 'LV',
|
||||
},
|
||||
{
|
||||
label: 'Lebanon',
|
||||
value: 'LB',
|
||||
},
|
||||
{
|
||||
label: 'Lesotho',
|
||||
value: 'LS',
|
||||
},
|
||||
{
|
||||
label: 'Liberia',
|
||||
value: 'LR',
|
||||
},
|
||||
{
|
||||
label: 'Libyan Arab Jamahiriya',
|
||||
value: 'LY',
|
||||
},
|
||||
{
|
||||
label: 'Liechtenstein',
|
||||
value: 'LI',
|
||||
},
|
||||
{
|
||||
label: 'Lithuania',
|
||||
value: 'LT',
|
||||
},
|
||||
{
|
||||
label: 'Luxembourg',
|
||||
value: 'LU',
|
||||
},
|
||||
{
|
||||
label: 'Macao',
|
||||
value: 'MO',
|
||||
},
|
||||
{
|
||||
label: 'Macedonia, The Former Yugoslav Republic of',
|
||||
value: 'MK',
|
||||
},
|
||||
{
|
||||
label: 'Madagascar',
|
||||
value: 'MG',
|
||||
},
|
||||
{
|
||||
label: 'Malawi',
|
||||
value: 'MW',
|
||||
},
|
||||
{
|
||||
label: 'Malaysia',
|
||||
value: 'MY',
|
||||
},
|
||||
{
|
||||
label: 'Maldives',
|
||||
value: 'MV',
|
||||
},
|
||||
{
|
||||
label: 'Mali',
|
||||
value: 'ML',
|
||||
},
|
||||
{
|
||||
label: 'Malta',
|
||||
value: 'MT',
|
||||
},
|
||||
{
|
||||
label: 'Marshall Islands',
|
||||
value: 'MH',
|
||||
},
|
||||
{
|
||||
label: 'Martinique',
|
||||
value: 'MQ',
|
||||
},
|
||||
{
|
||||
label: 'Mauritania',
|
||||
value: 'MR',
|
||||
},
|
||||
{
|
||||
label: 'Mauritius',
|
||||
value: 'MU',
|
||||
},
|
||||
{
|
||||
label: 'Mayotte',
|
||||
value: 'YT',
|
||||
},
|
||||
{
|
||||
label: 'Mexico',
|
||||
value: 'MX',
|
||||
},
|
||||
{
|
||||
label: 'Micronesia, Federated States of',
|
||||
value: 'FM',
|
||||
},
|
||||
{
|
||||
label: 'Moldova, Republic of',
|
||||
value: 'MD',
|
||||
},
|
||||
{
|
||||
label: 'Monaco',
|
||||
value: 'MC',
|
||||
},
|
||||
{
|
||||
label: 'Mongolia',
|
||||
value: 'MN',
|
||||
},
|
||||
{
|
||||
label: 'Montenegro',
|
||||
value: 'ME',
|
||||
},
|
||||
{
|
||||
label: 'Montserrat',
|
||||
value: 'MS',
|
||||
},
|
||||
{
|
||||
label: 'Morocco',
|
||||
value: 'MA',
|
||||
},
|
||||
{
|
||||
label: 'Mozambique',
|
||||
value: 'MZ',
|
||||
},
|
||||
{
|
||||
label: 'Myanmar',
|
||||
value: 'MM',
|
||||
},
|
||||
{
|
||||
label: 'Namibia',
|
||||
value: 'NA',
|
||||
},
|
||||
{
|
||||
label: 'Nauru',
|
||||
value: 'NR',
|
||||
},
|
||||
{
|
||||
label: 'Nepal',
|
||||
value: 'NP',
|
||||
},
|
||||
{
|
||||
label: 'Netherlands',
|
||||
value: 'NL',
|
||||
},
|
||||
{
|
||||
label: 'Netherlands Antilles',
|
||||
value: 'AN',
|
||||
},
|
||||
{
|
||||
label: 'New Caledonia',
|
||||
value: 'NC',
|
||||
},
|
||||
{
|
||||
label: 'New Zealand',
|
||||
value: 'NZ',
|
||||
},
|
||||
{
|
||||
label: 'Nicaragua',
|
||||
value: 'NI',
|
||||
},
|
||||
{
|
||||
label: 'Niger',
|
||||
value: 'NE',
|
||||
},
|
||||
{
|
||||
label: 'Nigeria',
|
||||
value: 'NG',
|
||||
},
|
||||
{
|
||||
label: 'Niue',
|
||||
value: 'NU',
|
||||
},
|
||||
{
|
||||
label: 'Norfolk Island',
|
||||
value: 'NF',
|
||||
},
|
||||
{
|
||||
label: 'Northern Mariana Islands',
|
||||
value: 'MP',
|
||||
},
|
||||
{
|
||||
label: 'Norway',
|
||||
value: 'NO',
|
||||
},
|
||||
{
|
||||
label: 'Oman',
|
||||
value: 'OM',
|
||||
},
|
||||
{
|
||||
label: 'Pakistan',
|
||||
value: 'PK',
|
||||
},
|
||||
{
|
||||
label: 'Palau',
|
||||
value: 'PW',
|
||||
},
|
||||
{
|
||||
label: 'Palestinian Territory, Occupied',
|
||||
value: 'PS',
|
||||
},
|
||||
{
|
||||
label: 'Panama',
|
||||
value: 'PA',
|
||||
},
|
||||
{
|
||||
label: 'Papua New Guinea',
|
||||
value: 'PG',
|
||||
},
|
||||
{
|
||||
label: 'Paraguay',
|
||||
value: 'PY',
|
||||
},
|
||||
{
|
||||
label: 'Peru',
|
||||
value: 'PE',
|
||||
},
|
||||
{
|
||||
label: 'Philippines',
|
||||
value: 'PH',
|
||||
},
|
||||
{
|
||||
label: 'Pitcairn',
|
||||
value: 'PN',
|
||||
},
|
||||
{
|
||||
label: 'Poland',
|
||||
value: 'PL',
|
||||
},
|
||||
{
|
||||
label: 'Portugal',
|
||||
value: 'PT',
|
||||
},
|
||||
{
|
||||
label: 'Puerto Rico',
|
||||
value: 'PR',
|
||||
},
|
||||
{
|
||||
label: 'Qatar',
|
||||
value: 'QA',
|
||||
},
|
||||
{
|
||||
label: 'Reunion',
|
||||
value: 'RE',
|
||||
},
|
||||
{
|
||||
label: 'Romania',
|
||||
value: 'RO',
|
||||
},
|
||||
{
|
||||
label: 'Russian Federation',
|
||||
value: 'RU',
|
||||
},
|
||||
{
|
||||
label: 'Rwanda',
|
||||
value: 'RW',
|
||||
},
|
||||
{
|
||||
label: 'Saint Helena',
|
||||
value: 'SH',
|
||||
},
|
||||
{
|
||||
label: 'Saint Kitts and Nevis',
|
||||
value: 'KN',
|
||||
},
|
||||
{
|
||||
label: 'Saint Lucia',
|
||||
value: 'LC',
|
||||
},
|
||||
{
|
||||
label: 'Saint Pierre and Miquelon',
|
||||
value: 'PM',
|
||||
},
|
||||
{
|
||||
label: 'Saint Vincent and the Grenadines',
|
||||
value: 'VC',
|
||||
},
|
||||
{
|
||||
label: 'Samoa',
|
||||
value: 'WS',
|
||||
},
|
||||
{
|
||||
label: 'San Marino',
|
||||
value: 'SM',
|
||||
},
|
||||
{
|
||||
label: 'Sao Tome and Principe',
|
||||
value: 'ST',
|
||||
},
|
||||
{
|
||||
label: 'Saudi Arabia',
|
||||
value: 'SA',
|
||||
},
|
||||
{
|
||||
label: 'Senegal',
|
||||
value: 'SN',
|
||||
},
|
||||
{
|
||||
label: 'Serbia',
|
||||
value: 'RS',
|
||||
},
|
||||
{
|
||||
label: 'Seychelles',
|
||||
value: 'SC',
|
||||
},
|
||||
{
|
||||
label: 'Sierra Leone',
|
||||
value: 'SL',
|
||||
},
|
||||
{
|
||||
label: 'Singapore',
|
||||
value: 'SG',
|
||||
},
|
||||
{
|
||||
label: 'Slovakia',
|
||||
value: 'SK',
|
||||
},
|
||||
{
|
||||
label: 'Slovenia',
|
||||
value: 'SI',
|
||||
},
|
||||
{
|
||||
label: 'Solomon Islands',
|
||||
value: 'SB',
|
||||
},
|
||||
{
|
||||
label: 'Somalia',
|
||||
value: 'SO',
|
||||
},
|
||||
{
|
||||
label: 'South Africa',
|
||||
value: 'ZA',
|
||||
},
|
||||
{
|
||||
label: 'South Georgia and the South Sandwich Islands',
|
||||
value: 'GS',
|
||||
},
|
||||
{
|
||||
label: 'Spain',
|
||||
value: 'ES',
|
||||
},
|
||||
{
|
||||
label: 'Sri Lanka',
|
||||
value: 'LK',
|
||||
},
|
||||
{
|
||||
label: 'Sudan',
|
||||
value: 'SD',
|
||||
},
|
||||
{
|
||||
label: 'Suriname',
|
||||
value: 'SR',
|
||||
},
|
||||
{
|
||||
label: 'Svalbard and Jan Mayen',
|
||||
value: 'SJ',
|
||||
},
|
||||
{
|
||||
label: 'Swaziland',
|
||||
value: 'SZ',
|
||||
},
|
||||
{
|
||||
label: 'Sweden',
|
||||
value: 'SE',
|
||||
},
|
||||
{
|
||||
label: 'Switzerland',
|
||||
value: 'CH',
|
||||
},
|
||||
{
|
||||
label: 'Syrian Arab Republic',
|
||||
value: 'SY',
|
||||
},
|
||||
{
|
||||
label: 'Taiwan',
|
||||
value: 'TW',
|
||||
},
|
||||
{
|
||||
label: 'Tajikistan',
|
||||
value: 'TJ',
|
||||
},
|
||||
{
|
||||
label: 'Tanzania, United Republic of',
|
||||
value: 'TZ',
|
||||
},
|
||||
{
|
||||
label: 'Thailand',
|
||||
value: 'TH',
|
||||
},
|
||||
{
|
||||
label: 'Timor-Leste',
|
||||
value: 'TL',
|
||||
},
|
||||
{
|
||||
label: 'Togo',
|
||||
value: 'TG',
|
||||
},
|
||||
{
|
||||
label: 'Tokelau',
|
||||
value: 'TK',
|
||||
},
|
||||
{
|
||||
label: 'Tonga',
|
||||
value: 'TO',
|
||||
},
|
||||
{
|
||||
label: 'Trinidad and Tobago',
|
||||
value: 'TT',
|
||||
},
|
||||
{
|
||||
label: 'Tunisia',
|
||||
value: 'TN',
|
||||
},
|
||||
{
|
||||
label: 'Turkey',
|
||||
value: 'TR',
|
||||
},
|
||||
{
|
||||
label: 'Turkmenistan',
|
||||
value: 'TM',
|
||||
},
|
||||
{
|
||||
label: 'Turks and Caicos Islands',
|
||||
value: 'TC',
|
||||
},
|
||||
{
|
||||
label: 'Tuvalu',
|
||||
value: 'TV',
|
||||
},
|
||||
{
|
||||
label: 'Uganda',
|
||||
value: 'UG',
|
||||
},
|
||||
{
|
||||
label: 'Ukraine',
|
||||
value: 'UA',
|
||||
},
|
||||
{
|
||||
label: 'United Arab Emirates',
|
||||
value: 'AE',
|
||||
},
|
||||
{
|
||||
label: 'United Kingdom',
|
||||
value: 'GB',
|
||||
},
|
||||
{
|
||||
label: 'United States',
|
||||
value: 'US',
|
||||
},
|
||||
{
|
||||
label: 'United States Minor Outlying Islands',
|
||||
value: 'UM',
|
||||
},
|
||||
{
|
||||
label: 'Uruguay',
|
||||
value: 'UY',
|
||||
},
|
||||
{
|
||||
label: 'Uzbekistan',
|
||||
value: 'UZ',
|
||||
},
|
||||
{
|
||||
label: 'Vanuatu',
|
||||
value: 'VU',
|
||||
},
|
||||
{
|
||||
label: 'Venezuela',
|
||||
value: 'VE',
|
||||
},
|
||||
{
|
||||
label: 'Viet Nam',
|
||||
value: 'VN',
|
||||
},
|
||||
{
|
||||
label: 'Virgin Islands, British',
|
||||
value: 'VG',
|
||||
},
|
||||
{
|
||||
label: 'Virgin Islands, U.S.',
|
||||
value: 'VI',
|
||||
},
|
||||
{
|
||||
label: 'Wallis and Futuna',
|
||||
value: 'WF',
|
||||
},
|
||||
{
|
||||
label: 'Western Sahara',
|
||||
value: 'EH',
|
||||
},
|
||||
{
|
||||
label: 'Yemen',
|
||||
value: 'YE',
|
||||
},
|
||||
{
|
||||
label: 'Zambia',
|
||||
value: 'ZM',
|
||||
},
|
||||
{
|
||||
label: 'Zimbabwe',
|
||||
value: 'ZW',
|
||||
},
|
||||
]
|
||||
38
apps/backend/src/blocks/Form/Email/index.tsx
Normal file
38
apps/backend/src/blocks/Form/Email/index.tsx
Normal file
@@ -0,0 +1,38 @@
|
||||
import type { EmailField } from '@payloadcms/plugin-form-builder/types'
|
||||
import type { FieldErrorsImpl, FieldValues, UseFormRegister } from 'react-hook-form'
|
||||
|
||||
import { Input } from '@/components/ui/input'
|
||||
import { Label } from '@/components/ui/label'
|
||||
import React from 'react'
|
||||
|
||||
import { Error } from '../Error'
|
||||
import { Width } from '../Width'
|
||||
|
||||
export const Email: React.FC<
|
||||
EmailField & {
|
||||
errors: Partial<FieldErrorsImpl>
|
||||
register: UseFormRegister<FieldValues>
|
||||
}
|
||||
> = ({ name, defaultValue, errors, label, register, required, width }) => {
|
||||
return (
|
||||
<Width width={width}>
|
||||
<Label htmlFor={name}>
|
||||
{label}
|
||||
|
||||
{required && (
|
||||
<span className="required">
|
||||
* <span className="sr-only">(required)</span>
|
||||
</span>
|
||||
)}
|
||||
</Label>
|
||||
<Input
|
||||
defaultValue={defaultValue}
|
||||
id={name}
|
||||
type="text"
|
||||
{...register(name, { pattern: /^\S[^\s@]*@\S+$/, required })}
|
||||
/>
|
||||
|
||||
{errors[name] && <Error name={name} />}
|
||||
</Width>
|
||||
)
|
||||
}
|
||||
15
apps/backend/src/blocks/Form/Error/index.tsx
Normal file
15
apps/backend/src/blocks/Form/Error/index.tsx
Normal file
@@ -0,0 +1,15 @@
|
||||
'use client'
|
||||
|
||||
import * as React from 'react'
|
||||
import { useFormContext } from 'react-hook-form'
|
||||
|
||||
export const Error = ({ name }: { name: string }) => {
|
||||
const {
|
||||
formState: { errors },
|
||||
} = useFormContext()
|
||||
return (
|
||||
<div className="mt-2 text-red-500 text-sm">
|
||||
{(errors[name]?.message as string) || 'This field is required'}
|
||||
</div>
|
||||
)
|
||||
}
|
||||
13
apps/backend/src/blocks/Form/Message/index.tsx
Normal file
13
apps/backend/src/blocks/Form/Message/index.tsx
Normal file
@@ -0,0 +1,13 @@
|
||||
import RichText from '@/components/RichText'
|
||||
import React from 'react'
|
||||
|
||||
import { Width } from '../Width'
|
||||
import { DefaultTypedEditorState } from '@payloadcms/richtext-lexical'
|
||||
|
||||
export const Message: React.FC<{ message: DefaultTypedEditorState }> = ({ message }) => {
|
||||
return (
|
||||
<Width className="my-12" width="100">
|
||||
{message && <RichText data={message} />}
|
||||
</Width>
|
||||
)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user