commit bba4bb40c854b477e46f49a014d355e9f7b442c3
Author: chuan
Date: Tue Nov 11 01:56:44 2025 +0800
feat: init
diff --git a/.codespellrc b/.codespellrc
new file mode 100644
index 00000000..ffe730b7
--- /dev/null
+++ b/.codespellrc
@@ -0,0 +1,7 @@
+[codespell]
+# Ref: https://github.com/codespell-project/codespell#using-a-config-file
+skip = .git*,*.svg,i18n,*-lock.yaml,*.css,.codespellrc,migrations,*.js,*.map,*.mjs
+check-hidden = true
+# ignore all CamelCase and camelCase
+ignore-regex = \b[A-Za-z][a-z]+[A-Z][a-zA-Z]+\b
+ignore-words-list = tread
diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 00000000..fe11e95b
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,69 @@
+.git
+*.pyc
+.env
+venv
+.venv
+node_modules/
+**/node_modules/
+npm-debug.log
+.next/
+**/.next/
+.turbo/
+**/.turbo/
+build/
+**/build/
+out/
+**/out/
+dist/
+**/dist/
+# Logs
+npm-debug.log*
+pnpm-debug.log*
+.pnpm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# OS junk
+.DS_Store
+Thumbs.db
+
+# Editor settings
+.vscode
+.idea
+
+# Coverage and test output
+coverage/
+**/coverage/
+*.lcov
+.junit/
+test-results/
+
+# Caches and build artifacts
+.cache/
+**/.cache/
+storybook-static/
+*storybook.log
+*.tsbuildinfo
+
+# Local env and secrets
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+.secrets
+tmp/
+temp/
+
+# Database/cache dumps
+*.rdb
+*.rdb.gz
+
+# Misc
+*.pem
+*.key
+
+# React Router - https://github.com/remix-run/react-router-templates/blob/dc79b1a065f59f3bfd840d4ef75cc27689b611e6/default/.dockerignore
+.react-router/
+build/
+node_modules/
+README.md
\ No newline at end of file
diff --git a/.env.example b/.env.example
new file mode 100644
index 00000000..90efa8b4
--- /dev/null
+++ b/.env.example
@@ -0,0 +1,56 @@
+# Database Settings
+POSTGRES_USER="plane"
+POSTGRES_PASSWORD="plane"
+POSTGRES_DB="plane"
+PGDATA="/var/lib/postgresql/data"
+
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+
+# RabbitMQ Settings
+RABBITMQ_HOST="plane-mq"
+RABBITMQ_PORT="5672"
+RABBITMQ_USER="plane"
+RABBITMQ_PASSWORD="plane"
+RABBITMQ_VHOST="plane"
+
+LISTEN_HTTP_PORT=80
+LISTEN_HTTPS_PORT=443
+
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://plane-minio:9000"
+# Changing this requires change in the proxy config for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+
+# GPT settings
+OPENAI_API_BASE="https://api.openai.com/v1" # deprecated
+OPENAI_API_KEY="sk-" # deprecated
+GPT_ENGINE="gpt-3.5-turbo" # deprecated
+
+# Settings related to Docker
+DOCKERIZED=1 # deprecated
+
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=1
+
+# If SSL Cert to be generated, set CERT_EMAIl="email "
+CERT_ACME_CA=https://acme-v02.api.letsencrypt.org/directory
+TRUSTED_PROXIES=0.0.0.0/0
+SITE_ADDRESS=:80
+CERT_EMAIL=
+
+# For DNS Challenge based certificate generation, set the CERT_ACME_DNS, CERT_EMAIL
+# CERT_ACME_DNS="acme_dns "
+CERT_ACME_DNS=
+
+# Force HTTPS for handling SSL Termination
+MINIO_ENDPOINT_SSL=0
+
+# API key rate limit
+API_KEY_RATE_LIMIT="60/minute"
diff --git a/.gitattributes b/.gitattributes
new file mode 100644
index 00000000..526c8a38
--- /dev/null
+++ b/.gitattributes
@@ -0,0 +1 @@
+*.sh text eol=lf
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/--bug-report.yaml b/.github/ISSUE_TEMPLATE/--bug-report.yaml
new file mode 100644
index 00000000..ec037692
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/--bug-report.yaml
@@ -0,0 +1,73 @@
+name: Bug report
+description: Create a bug report to help us improve Plane
+title: "[bug]: "
+labels: [🐛bug]
+assignees: [vihar, pushya22]
+body:
+- type: markdown
+ attributes:
+ value: |
+ Thank you for taking the time to fill out this bug report.
+- type: checkboxes
+ attributes:
+ label: Is there an existing issue for this?
+ description: Please search to see if an issue already exists for the bug you encountered
+ options:
+ - label: I have searched the existing issues
+ required: true
+- type: textarea
+ attributes:
+ label: Current behavior
+ description: A concise description of what you're experiencing and what you expect
+ placeholder: |
+ When I do , happens and I see the error message attached below:
+ ```...```
+ What I expect is
+ validations:
+ required: true
+- type: textarea
+ attributes:
+ label: Steps to reproduce
+ description: Add steps to reproduce this behaviour, include console or network logs and screenshots
+ placeholder: |
+ 1. Go to '...'
+ 2. Click on '....'
+ 3. Scroll down to '....'
+ 4. See error
+ validations:
+ required: true
+- type: dropdown
+ id: env
+ attributes:
+ label: Environment
+ options:
+ - Production
+ - Deploy preview
+ validations:
+ required: true
+- type: dropdown
+ id: browser
+ attributes:
+ label: Browser
+ options:
+ - Google Chrome
+ - Mozilla Firefox
+ - Safari
+ - Other
+- type: dropdown
+ id: variant
+ attributes:
+ label: Variant
+ options:
+ - Cloud
+ - Self-hosted
+ - Local
+ validations:
+ required: true
+- type: input
+ id: version
+ attributes:
+ label: Version
+ placeholder: v0.17.0-dev
+ validations:
+ required: true
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/--feature-request.yaml b/.github/ISSUE_TEMPLATE/--feature-request.yaml
new file mode 100644
index 00000000..390c95aa
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/--feature-request.yaml
@@ -0,0 +1,29 @@
+name: Feature request
+description: Suggest a feature to improve Plane
+title: "[feature]: "
+labels: [✨feature]
+assignees: [vihar, pushya22]
+body:
+- type: markdown
+ attributes:
+ value: |
+ Thank you for taking the time to request a feature for Plane
+- type: checkboxes
+ attributes:
+ label: Is there an existing issue for this?
+ description: Please search to see if an issue related to this feature request already exists
+ options:
+ - label: I have searched the existing issues
+ required: true
+- type: textarea
+ attributes:
+ label: Summary
+ description: One paragraph description of the feature
+ validations:
+ required: true
+- type: textarea
+ attributes:
+ label: Why should this be worked on?
+ description: A concise description of the problems or use cases for this feature request
+ validations:
+ required: true
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/config.yaml b/.github/ISSUE_TEMPLATE/config.yaml
new file mode 100644
index 00000000..29c26783
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/config.yaml
@@ -0,0 +1,6 @@
+contact_links:
+ - name: Help and support
+ about: Reach out to us on our Discord server or GitHub discussions.
+ - name: Dedicated support
+ url: mailto:support@plane.so
+ about: Write to us if you'd like dedicated support using Plane
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
new file mode 100644
index 00000000..fa445360
--- /dev/null
+++ b/.github/pull_request_template.md
@@ -0,0 +1,20 @@
+### Description
+
+
+### Type of Change
+
+- [ ] Bug fix (non-breaking change which fixes an issue)
+- [ ] Feature (non-breaking change which adds functionality)
+- [ ] Improvement (change that would cause existing functionality to not work as expected)
+- [ ] Code refactoring
+- [ ] Performance improvements
+- [ ] Documentation update
+
+### Screenshots and Media (if applicable)
+
+
+### Test Scenarios
+
+
+### References
+
\ No newline at end of file
diff --git a/.github/workflows/build-branch.yml b/.github/workflows/build-branch.yml
new file mode 100644
index 00000000..087a012d
--- /dev/null
+++ b/.github/workflows/build-branch.yml
@@ -0,0 +1,409 @@
+name: Branch Build CE
+
+on:
+ workflow_dispatch:
+ inputs:
+ build_type:
+ description: "Type of build to run"
+ required: true
+ type: choice
+ default: "Build"
+ options:
+ - "Build"
+ - "Release"
+ releaseVersion:
+ description: "Release Version"
+ type: string
+ default: v0.0.0
+ isPrerelease:
+ description: "Is Pre-release"
+ type: boolean
+ default: false
+ required: true
+ arm64:
+ description: "Build for ARM64 architecture"
+ required: false
+ default: false
+ type: boolean
+ aio_build:
+ description: "Build for AIO docker image"
+ required: false
+ default: false
+ type: boolean
+ push:
+ branches:
+ - preview
+ - canary
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+env:
+ TARGET_BRANCH: ${{ github.ref_name }}
+ ARM64_BUILD: ${{ github.event.inputs.arm64 }}
+ BUILD_TYPE: ${{ github.event.inputs.build_type }}
+ RELEASE_VERSION: ${{ github.event.inputs.releaseVersion }}
+ IS_PRERELEASE: ${{ github.event.inputs.isPrerelease }}
+ AIO_BUILD: ${{ github.event.inputs.aio_build }}
+
+jobs:
+ branch_build_setup:
+ name: Build Setup
+ runs-on: ubuntu-22.04
+ outputs:
+ gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
+ gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
+ gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
+ gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
+ gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
+
+ dh_img_web: ${{ steps.set_env_variables.outputs.DH_IMG_WEB }}
+ dh_img_space: ${{ steps.set_env_variables.outputs.DH_IMG_SPACE }}
+ dh_img_admin: ${{ steps.set_env_variables.outputs.DH_IMG_ADMIN }}
+ dh_img_live: ${{ steps.set_env_variables.outputs.DH_IMG_LIVE }}
+ dh_img_backend: ${{ steps.set_env_variables.outputs.DH_IMG_BACKEND }}
+ dh_img_proxy: ${{ steps.set_env_variables.outputs.DH_IMG_PROXY }}
+ dh_img_aio: ${{ steps.set_env_variables.outputs.DH_IMG_AIO }}
+
+ build_type: ${{steps.set_env_variables.outputs.BUILD_TYPE}}
+ build_release: ${{ steps.set_env_variables.outputs.BUILD_RELEASE }}
+ build_prerelease: ${{ steps.set_env_variables.outputs.BUILD_PRERELEASE }}
+ release_version: ${{ steps.set_env_variables.outputs.RELEASE_VERSION }}
+ aio_build: ${{ steps.set_env_variables.outputs.AIO_BUILD }}
+
+ steps:
+ - id: set_env_variables
+ name: Set Environment Variables
+ run: |
+ if [ "${{ env.ARM64_BUILD }}" == "true" ] || ([ "${{ env.BUILD_TYPE }}" == "Release" ] && [ "${{ env.IS_PRERELEASE }}" != "true" ]); then
+ echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
+ echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
+ echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
+ echo "BUILDX_ENDPOINT=makeplane/plane-dev" >> $GITHUB_OUTPUT
+ else
+ echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
+ echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
+ echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
+ echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
+ fi
+ BR_NAME=$( echo "${{ env.TARGET_BRANCH }}" |sed 's/[^a-zA-Z0-9.-]//g')
+ echo "TARGET_BRANCH=$BR_NAME" >> $GITHUB_OUTPUT
+
+ echo "DH_IMG_WEB=plane-frontend" >> $GITHUB_OUTPUT
+ echo "DH_IMG_SPACE=plane-space" >> $GITHUB_OUTPUT
+ echo "DH_IMG_ADMIN=plane-admin" >> $GITHUB_OUTPUT
+ echo "DH_IMG_LIVE=plane-live" >> $GITHUB_OUTPUT
+ echo "DH_IMG_BACKEND=plane-backend" >> $GITHUB_OUTPUT
+ echo "DH_IMG_PROXY=plane-proxy" >> $GITHUB_OUTPUT
+ echo "DH_IMG_AIO=plane-aio-community" >> $GITHUB_OUTPUT
+
+ echo "BUILD_TYPE=${{env.BUILD_TYPE}}" >> $GITHUB_OUTPUT
+ BUILD_RELEASE=false
+ BUILD_PRERELEASE=false
+ RELVERSION="latest"
+
+ BUILD_AIO=${{ env.AIO_BUILD }}
+
+ if [ "${{ env.BUILD_TYPE }}" == "Release" ]; then
+ FLAT_RELEASE_VERSION=$(echo "${{ env.RELEASE_VERSION }}" | sed 's/[^a-zA-Z0-9.-]//g')
+ echo "FLAT_RELEASE_VERSION=${FLAT_RELEASE_VERSION}" >> $GITHUB_OUTPUT
+
+ semver_regex="^v([0-9]+)\.([0-9]+)\.([0-9]+)(-[a-zA-Z0-9]+(-[a-zA-Z0-9]+)*)?$"
+ if [[ ! $FLAT_RELEASE_VERSION =~ $semver_regex ]]; then
+ echo "Invalid Release Version Format : $FLAT_RELEASE_VERSION"
+ echo "Please provide a valid SemVer version"
+ echo "e.g. v1.2.3 or v1.2.3-alpha-1"
+ echo "Exiting the build process"
+ exit 1 # Exit with status 1 to fail the step
+ fi
+ BUILD_RELEASE=true
+ RELVERSION=$FLAT_RELEASE_VERSION
+
+ if [ "${{ env.IS_PRERELEASE }}" == "true" ]; then
+ BUILD_PRERELEASE=true
+ fi
+
+ BUILD_AIO=true
+ fi
+
+ echo "BUILD_RELEASE=${BUILD_RELEASE}" >> $GITHUB_OUTPUT
+ echo "BUILD_PRERELEASE=${BUILD_PRERELEASE}" >> $GITHUB_OUTPUT
+ echo "RELEASE_VERSION=${RELVERSION}" >> $GITHUB_OUTPUT
+ echo "AIO_BUILD=${BUILD_AIO}" >> $GITHUB_OUTPUT
+
+ - id: checkout_files
+ name: Checkout Files
+ uses: actions/checkout@v4
+
+ branch_build_push_admin:
+ name: Build-Push Admin Docker Image
+ runs-on: ubuntu-22.04
+ needs: [branch_build_setup]
+ steps:
+ - name: Admin Build and Push
+ uses: makeplane/actions/build-push@v1.0.0
+ with:
+ build-release: ${{ needs.branch_build_setup.outputs.build_release }}
+ build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
+ release-version: ${{ needs.branch_build_setup.outputs.release_version }}
+ dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
+ dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
+ docker-image-owner: makeplane
+ docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_admin }}
+ build-context: .
+ dockerfile-path: ./apps/admin/Dockerfile.admin
+ buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
+
+ branch_build_push_web:
+ name: Build-Push Web Docker Image
+ runs-on: ubuntu-22.04
+ needs: [branch_build_setup]
+ steps:
+ - name: Web Build and Push
+ uses: makeplane/actions/build-push@v1.0.0
+ with:
+ build-release: ${{ needs.branch_build_setup.outputs.build_release }}
+ build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
+ release-version: ${{ needs.branch_build_setup.outputs.release_version }}
+ dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
+ dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
+ docker-image-owner: makeplane
+ docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_web }}
+ build-context: .
+ dockerfile-path: ./apps/web/Dockerfile.web
+ buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
+
+ branch_build_push_space:
+ name: Build-Push Space Docker Image
+ runs-on: ubuntu-22.04
+ needs: [branch_build_setup]
+ steps:
+ - name: Space Build and Push
+ uses: makeplane/actions/build-push@v1.0.0
+ with:
+ build-release: ${{ needs.branch_build_setup.outputs.build_release }}
+ build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
+ release-version: ${{ needs.branch_build_setup.outputs.release_version }}
+ dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
+ dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
+ docker-image-owner: makeplane
+ docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_space }}
+ build-context: .
+ dockerfile-path: ./apps/space/Dockerfile.space
+ buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
+
+ branch_build_push_live:
+ name: Build-Push Live Collaboration Docker Image
+ runs-on: ubuntu-22.04
+ needs: [branch_build_setup]
+ steps:
+ - name: Live Build and Push
+ uses: makeplane/actions/build-push@v1.0.0
+ with:
+ build-release: ${{ needs.branch_build_setup.outputs.build_release }}
+ build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
+ release-version: ${{ needs.branch_build_setup.outputs.release_version }}
+ dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
+ dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
+ docker-image-owner: makeplane
+ docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_live }}
+ build-context: .
+ dockerfile-path: ./apps/live/Dockerfile.live
+ buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
+
+ branch_build_push_api:
+ name: Build-Push API Server Docker Image
+ runs-on: ubuntu-22.04
+ needs: [branch_build_setup]
+ steps:
+ - name: Backend Build and Push
+ uses: makeplane/actions/build-push@v1.0.0
+ with:
+ build-release: ${{ needs.branch_build_setup.outputs.build_release }}
+ build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
+ release-version: ${{ needs.branch_build_setup.outputs.release_version }}
+ dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
+ dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
+ docker-image-owner: makeplane
+ docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_backend }}
+ build-context: ./apps/api
+ dockerfile-path: ./apps/api/Dockerfile.api
+ buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
+
+ branch_build_push_proxy:
+ name: Build-Push Proxy Docker Image
+ runs-on: ubuntu-22.04
+ needs: [branch_build_setup]
+ steps:
+ - name: Proxy Build and Push
+ uses: makeplane/actions/build-push@v1.0.0
+ with:
+ build-release: ${{ needs.branch_build_setup.outputs.build_release }}
+ build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
+ release-version: ${{ needs.branch_build_setup.outputs.release_version }}
+ dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
+ dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
+ docker-image-owner: makeplane
+ docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_proxy }}
+ build-context: ./apps/proxy
+ dockerfile-path: ./apps/proxy/Dockerfile.ce
+ buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
+
+ branch_build_push_aio:
+ if: ${{ needs.branch_build_setup.outputs.aio_build == 'true' }}
+ name: Build-Push AIO Docker Image
+ runs-on: ubuntu-22.04
+ needs:
+ - branch_build_setup
+ - branch_build_push_admin
+ - branch_build_push_web
+ - branch_build_push_space
+ - branch_build_push_live
+ - branch_build_push_api
+ - branch_build_push_proxy
+ steps:
+ - name: Checkout Files
+ uses: actions/checkout@v4
+
+ - name: Prepare AIO Assets
+ id: prepare_aio_assets
+ run: |
+ cd deployments/aio/community
+
+ if [ "${{ needs.branch_build_setup.outputs.build_type }}" == "Release" ]; then
+ aio_version=${{ needs.branch_build_setup.outputs.release_version }}
+ else
+ aio_version=${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ fi
+ bash ./build.sh --release $aio_version
+ echo "AIO_BUILD_VERSION=${aio_version}" >> $GITHUB_OUTPUT
+
+ - name: Upload AIO Assets
+ uses: actions/upload-artifact@v4
+ with:
+ path: ./deployments/aio/community/dist
+ name: aio-assets-dist
+
+ - name: AIO Build and Push
+ uses: makeplane/actions/build-push@v1.1.0
+ with:
+ build-release: ${{ needs.branch_build_setup.outputs.build_release }}
+ build-prerelease: ${{ needs.branch_build_setup.outputs.build_prerelease }}
+ release-version: ${{ needs.branch_build_setup.outputs.release_version }}
+ dockerhub-username: ${{ secrets.DOCKERHUB_USERNAME }}
+ dockerhub-token: ${{ secrets.DOCKERHUB_TOKEN }}
+ docker-image-owner: makeplane
+ docker-image-name: ${{ needs.branch_build_setup.outputs.dh_img_aio }}
+ build-context: ./deployments/aio/community
+ dockerfile-path: ./deployments/aio/community/Dockerfile
+ buildx-driver: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ buildx-version: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ buildx-platforms: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ buildx-endpoint: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
+ additional-assets: aio-assets-dist
+ additional-assets-dir: ./deployments/aio/community/dist
+ build-args: |
+ PLANE_VERSION=${{ steps.prepare_aio_assets.outputs.AIO_BUILD_VERSION }}
+
+ upload_build_assets:
+ name: Upload Build Assets
+ runs-on: ubuntu-22.04
+ needs:
+ - branch_build_setup
+ - branch_build_push_admin
+ - branch_build_push_web
+ - branch_build_push_space
+ - branch_build_push_live
+ - branch_build_push_api
+ - branch_build_push_proxy
+ steps:
+ - name: Checkout Files
+ uses: actions/checkout@v4
+
+ - name: Update Assets
+ run: |
+ if [ "${{ needs.branch_build_setup.outputs.build_type }}" == "Release" ]; then
+ REL_VERSION=${{ needs.branch_build_setup.outputs.release_version }}
+ else
+ REL_VERSION=${{ needs.branch_build_setup.outputs.gh_branch_name }}
+ fi
+
+ cp ./deployments/cli/community/install.sh deployments/cli/community/setup.sh
+ sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deployments/cli/community/docker-compose.yml
+ # sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deployments/cli/community/variables.env
+
+ - name: Upload Assets
+ uses: actions/upload-artifact@v4
+ with:
+ name: community-assets
+ path: |
+ ./deployments/cli/community/setup.sh
+ ./deployments/cli/community/restore.sh
+ ./deployments/cli/community/restore-airgapped.sh
+ ./deployments/cli/community/docker-compose.yml
+ ./deployments/cli/community/variables.env
+ ./deployments/swarm/community/swarm.sh
+
+ publish_release:
+ if: ${{ needs.branch_build_setup.outputs.build_type == 'Release' }}
+ name: Build Release
+ runs-on: ubuntu-22.04
+ needs:
+ [
+ branch_build_setup,
+ branch_build_push_admin,
+ branch_build_push_web,
+ branch_build_push_space,
+ branch_build_push_live,
+ branch_build_push_api,
+ branch_build_push_proxy,
+ ]
+ env:
+ REL_VERSION: ${{ needs.branch_build_setup.outputs.release_version }}
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+
+ - name: Update Assets
+ run: |
+ cp ./deployments/cli/community/install.sh deployments/cli/community/setup.sh
+ sed -i 's/${APP_RELEASE:-stable}/${APP_RELEASE:-'${REL_VERSION}'}/g' deployments/cli/community/docker-compose.yml
+ # sed -i 's/APP_RELEASE=stable/APP_RELEASE='${REL_VERSION}'/g' deployments/cli/community/variables.env
+
+ - name: Create Release
+ id: create_release
+ uses: softprops/action-gh-release@v2.1.0
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token
+ with:
+ tag_name: ${{ env.REL_VERSION }}
+ name: ${{ env.REL_VERSION }}
+ draft: false
+ prerelease: ${{ env.IS_PRERELEASE }}
+ generate_release_notes: true
+ files: |
+ ${{ github.workspace }}/deployments/cli/community/setup.sh
+ ${{ github.workspace }}/deployments/cli/community/restore.sh
+ ${{ github.workspace }}/deployments/cli/community/restore-airgapped.sh
+ ${{ github.workspace }}/deployments/cli/community/docker-compose.yml
+ ${{ github.workspace }}/deployments/cli/community/variables.env
+ ${{ github.workspace }}/deployments/swarm/community/swarm.sh
diff --git a/.github/workflows/check-version.yml b/.github/workflows/check-version.yml
new file mode 100644
index 00000000..855ee359
--- /dev/null
+++ b/.github/workflows/check-version.yml
@@ -0,0 +1,43 @@
+name: Version Change Before Release
+
+on:
+ pull_request:
+ branches:
+ - master
+
+jobs:
+ check-version:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+ with:
+ ref: ${{ github.head_ref }}
+ fetch-depth: 0
+
+ - name: Setup Node.js
+ uses: actions/setup-node@v4
+
+ - name: Get PR Branch version
+ run: echo "PR_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
+
+ - name: Fetch base branch
+ run: git fetch origin master:master
+
+ - name: Get Master Branch version
+ run: |
+ git checkout master
+ echo "MASTER_VERSION=$(node -p "require('./package.json').version")" >> $GITHUB_ENV
+
+ - name: Get master branch version and compare
+ run: |
+ echo "Comparing versions: PR version is $PR_VERSION, Master version is $MASTER_VERSION"
+ if [ "$PR_VERSION" == "$MASTER_VERSION" ]; then
+ echo "Version in PR branch is the same as in master. Failing the CI."
+ exit 1
+ else
+ echo "Version check passed. Versions are different."
+ fi
+ env:
+ PR_VERSION: ${{ env.PR_VERSION }}
+ MASTER_VERSION: ${{ env.MASTER_VERSION }}
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
new file mode 100644
index 00000000..e3aba5cf
--- /dev/null
+++ b/.github/workflows/codeql.yml
@@ -0,0 +1,62 @@
+name: "CodeQL"
+
+on:
+ workflow_dispatch:
+ push:
+ branches: ["preview", "canary", "master"]
+ pull_request:
+ branches: ["preview", "canary", "master"]
+
+jobs:
+ analyze:
+ name: Analyze
+ runs-on: ubuntu-latest
+ permissions:
+ actions: read
+ contents: read
+ security-events: write
+
+ strategy:
+ fail-fast: false
+ matrix:
+ language: ["python", "javascript"]
+ # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
+ # Use only 'java' to analyze code written in Java, Kotlin or both
+ # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
+ # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ # Initializes the CodeQL tools for scanning.
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v3
+ with:
+ languages: ${{ matrix.language }}
+ # If you wish to specify custom queries, you can do so here or in a config file.
+ # By default, queries listed here will override any specified in a config file.
+ # Prefix the list here with "+" to use these queries and those in the config file.
+
+ # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
+ # queries: security-extended,security-and-quality
+
+ # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
+ # If this step fails, then you should remove it and run the build manually (see below)
+ - name: Autobuild
+ uses: github/codeql-action/autobuild@v3
+
+ # ℹ️ Command-line programs to run using the OS shell.
+ # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
+
+ # If the Autobuild fails above, remove it and uncomment the following three lines.
+ # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
+
+ # - run: |
+ # echo "Run, Build Application using script"
+ # ./location_of_script_within_repo/buildscript.sh
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v3
+ with:
+ category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml
new file mode 100644
index 00000000..ca87dc93
--- /dev/null
+++ b/.github/workflows/codespell.yml
@@ -0,0 +1,25 @@
+# Codespell configuration is within .codespellrc
+---
+name: Codespell
+
+on:
+ push:
+ branches: [preview]
+ pull_request:
+ branches: [preview]
+
+permissions:
+ contents: read
+
+jobs:
+ codespell:
+ name: Check for spelling errors
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v4
+ - name: Annotate locations with typos
+ uses: codespell-project/codespell-problem-matcher@v1
+ - name: Codespell
+ uses: codespell-project/actions-codespell@v2
diff --git a/.github/workflows/feature-deployment.yml b/.github/workflows/feature-deployment.yml
new file mode 100644
index 00000000..dad3489d
--- /dev/null
+++ b/.github/workflows/feature-deployment.yml
@@ -0,0 +1,168 @@
+name: Feature Preview
+
+on:
+ workflow_dispatch:
+ inputs:
+ base_tag_name:
+ description: 'Base Tag Name'
+ required: false
+ default: 'preview'
+
+env:
+ TARGET_BRANCH: ${{ github.ref_name }}
+
+jobs:
+ branch_build_setup:
+ name: Build Setup
+ runs-on: ubuntu-latest
+ outputs:
+ gh_branch_name: ${{ steps.set_env_variables.outputs.TARGET_BRANCH }}
+ flat_branch_name: ${{ steps.set_env_variables.outputs.FLAT_BRANCH_NAME }}
+ gh_buildx_driver: ${{ steps.set_env_variables.outputs.BUILDX_DRIVER }}
+ gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
+ gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
+ gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
+ aio_base_tag: ${{ steps.set_env_variables.outputs.AIO_BASE_TAG }}
+ do_full_build: ${{ steps.set_env_variables.outputs.DO_FULL_BUILD }}
+ do_slim_build: ${{ steps.set_env_variables.outputs.DO_SLIM_BUILD }}
+
+ steps:
+ - id: set_env_variables
+ name: Set Environment Variables
+ run: |
+ echo "BUILDX_DRIVER=docker-container" >> $GITHUB_OUTPUT
+ echo "BUILDX_VERSION=latest" >> $GITHUB_OUTPUT
+ echo "BUILDX_PLATFORMS=linux/amd64" >> $GITHUB_OUTPUT
+ echo "BUILDX_ENDPOINT=" >> $GITHUB_OUTPUT
+
+ if [ "${{ github.event.inputs.base_tag_name }}" != "" ]; then
+ echo "AIO_BASE_TAG=${{ github.event.inputs.base_tag_name }}" >> $GITHUB_OUTPUT
+ else
+ echo "AIO_BASE_TAG=develop" >> $GITHUB_OUTPUT
+ fi
+
+ echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
+
+ FLAT_BRANCH_NAME=$(echo "${{ env.TARGET_BRANCH }}" | sed 's/[^a-zA-Z0-9]/-/g')
+ echo "FLAT_BRANCH_NAME=$FLAT_BRANCH_NAME" >> $GITHUB_OUTPUT
+
+ - id: checkout_files
+ name: Checkout Files
+ uses: actions/checkout@v4
+
+ full_build_push:
+ runs-on: ubuntu-22.04
+ needs: [branch_build_setup]
+ env:
+ BUILD_TYPE: full
+ AIO_BASE_TAG: ${{ needs.branch_build_setup.outputs.aio_base_tag }}
+ AIO_IMAGE_TAGS: makeplane/plane-aio-feature:${{ needs.branch_build_setup.outputs.flat_branch_name }}
+ BUILDX_DRIVER: ${{ needs.branch_build_setup.outputs.gh_buildx_driver }}
+ BUILDX_VERSION: ${{ needs.branch_build_setup.outputs.gh_buildx_version }}
+ BUILDX_PLATFORMS: ${{ needs.branch_build_setup.outputs.gh_buildx_platforms }}
+ BUILDX_ENDPOINT: ${{ needs.branch_build_setup.outputs.gh_buildx_endpoint }}
+ steps:
+ - name: Login to Docker Hub
+ uses: docker/login-action@v3
+ with:
+ username: ${{ secrets.DOCKERHUB_USERNAME }}
+ password: ${{ secrets.DOCKERHUB_TOKEN }}
+
+ - name: Set up Docker Buildx
+ uses: docker/setup-buildx-action@v3
+ with:
+ driver: ${{ env.BUILDX_DRIVER }}
+ version: ${{ env.BUILDX_VERSION }}
+ endpoint: ${{ env.BUILDX_ENDPOINT }}
+
+ - name: Check out the repo
+ uses: actions/checkout@v4
+
+ - name: Build and Push to Docker Hub
+ uses: docker/build-push-action@v6.9.0
+ with:
+ context: .
+ file: ./aio/Dockerfile-app
+ platforms: ${{ env.BUILDX_PLATFORMS }}
+ tags: ${{ env.AIO_IMAGE_TAGS }}
+ push: true
+ build-args:
+ BUILD_TAG=${{ env.AIO_BASE_TAG }}
+ BUILD_TYPE=${{env.BUILD_TYPE}}
+ # cache-from: type=gha
+ # cache-to: type=gha,mode=max
+ env:
+ DOCKER_BUILDKIT: 1
+ DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
+ DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
+ outputs:
+ AIO_IMAGE_TAGS: ${{ env.AIO_IMAGE_TAGS }}
+
+ feature-deploy:
+ needs: [branch_build_setup, full_build_push]
+ name: Feature Deploy
+ runs-on: ubuntu-latest
+ env:
+ KUBE_CONFIG_FILE: ${{ secrets.FEATURE_PREVIEW_KUBE_CONFIG }}
+ DEPLOYMENT_NAME: ${{ needs.branch_build_setup.outputs.flat_branch_name }}
+ steps:
+ - name: Install AWS cli
+ run: |
+ sudo apt-get update
+ sudo apt-get install -y python3-pip
+ pip3 install awscli
+ - name: Tailscale
+ uses: tailscale/github-action@v2
+ with:
+ oauth-client-id: ${{ secrets.TAILSCALE_OAUTH_CLIENT_ID }}
+ oauth-secret: ${{ secrets.TAILSCALE_OAUTH_SECRET }}
+ tags: tag:ci
+ - name: Kubectl Setup
+ run: |
+ curl -LO "https://dl.k8s.io/release/${{ vars.FEATURE_PREVIEW_KUBE_VERSION }}/bin/linux/amd64/kubectl"
+ chmod +x kubectl
+
+ mkdir -p ~/.kube
+ echo "$KUBE_CONFIG_FILE" > ~/.kube/config
+ chmod 600 ~/.kube/config
+ - name: HELM Setup
+ run: |
+ curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/main/scripts/get-helm-3
+ chmod 700 get_helm.sh
+ ./get_helm.sh
+ - name: App Deploy
+ run: |
+ helm --kube-insecure-skip-tls-verify repo add feature-preview ${{ vars.FEATURE_PREVIEW_HELM_CHART_URL }}
+
+ APP_NAMESPACE="${{ vars.FEATURE_PREVIEW_NAMESPACE }}"
+
+ helm --kube-insecure-skip-tls-verify uninstall \
+ ${{ env.DEPLOYMENT_NAME }} \
+ --namespace $APP_NAMESPACE \
+ --timeout 10m0s \
+ --wait \
+ --ignore-not-found
+
+ METADATA=$(helm --kube-insecure-skip-tls-verify upgrade \
+ --install=true \
+ --namespace $APP_NAMESPACE \
+ --set dockerhub.loginid=${{ secrets.DOCKERHUB_USERNAME }} \
+ --set dockerhub.password=${{ secrets.DOCKERHUB_TOKEN_RO}} \
+ --set config.feature_branch=${{ env.DEPLOYMENT_NAME }} \
+ --set ingress.primaryDomain=${{vars.FEATURE_PREVIEW_PRIMARY_DOMAIN || 'feature.plane.tools' }} \
+ --set ingress.tls_secret=${{vars.FEATURE_PREVIEW_INGRESS_TLS_SECRET || '' }} \
+ --output json \
+ --timeout 10m0s \
+ --wait \
+ ${{ env.DEPLOYMENT_NAME }} feature-preview/${{ vars.FEATURE_PREVIEW_HELM_CHART_NAME }} )
+
+ APP_NAME=$(echo $METADATA | jq -r '.name')
+
+ INGRESS_HOSTNAME=$(kubectl get ingress -n $APP_NAMESPACE --insecure-skip-tls-verify \
+ -o jsonpath='{.items[?(@.metadata.annotations.meta\.helm\.sh\/release-name=="'$APP_NAME'")]}' | \
+ jq -r '.spec.rules[0].host')
+
+ echo "****************************************"
+ echo "APP NAME ::: $APP_NAME"
+ echo "INGRESS HOSTNAME ::: $INGRESS_HOSTNAME"
+ echo "****************************************"
diff --git a/.github/workflows/pull-request-build-lint-api.yml b/.github/workflows/pull-request-build-lint-api.yml
new file mode 100644
index 00000000..50d105ef
--- /dev/null
+++ b/.github/workflows/pull-request-build-lint-api.yml
@@ -0,0 +1,40 @@
+name: Build and lint API
+
+on:
+ workflow_dispatch:
+ pull_request:
+ branches:
+ - "preview"
+ types:
+ - "opened"
+ - "synchronize"
+ - "ready_for_review"
+ - "review_requested"
+ - "reopened"
+ paths:
+ - "apps/api/**"
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ lint-api:
+ name: Lint API
+ runs-on: ubuntu-latest
+ timeout-minutes: 25
+ if: |
+ github.event.pull_request.draft == false &&
+ github.event.pull_request.requested_reviewers != null
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.x"
+ - name: Install Pylint
+ run: python -m pip install ruff
+ - name: Install API Dependencies
+ run: cd apps/api && pip install -r requirements.txt
+ - name: Lint apps/api
+ run: ruff check --fix apps/api
diff --git a/.github/workflows/pull-request-build-lint-web-apps.yml b/.github/workflows/pull-request-build-lint-web-apps.yml
new file mode 100644
index 00000000..435ec209
--- /dev/null
+++ b/.github/workflows/pull-request-build-lint-web-apps.yml
@@ -0,0 +1,53 @@
+name: Build and lint web apps
+
+on:
+ workflow_dispatch:
+ pull_request:
+ branches:
+ - "preview"
+ types:
+ - "opened"
+ - "synchronize"
+ - "ready_for_review"
+ - "review_requested"
+ - "reopened"
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.ref }}
+ cancel-in-progress: true
+
+jobs:
+ build-and-lint:
+ name: Build and lint web apps
+ runs-on: ubuntu-latest
+ timeout-minutes: 25
+ if: |
+ github.event.pull_request.draft == false &&
+ github.event.pull_request.requested_reviewers != null
+ env:
+ TURBO_SCM_BASE: ${{ github.event.pull_request.base.sha }}
+ TURBO_SCM_HEAD: ${{ github.sha }}
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 50
+ filter: blob:none
+
+ - name: Set up Node.js
+ uses: actions/setup-node@v4
+
+ - name: Enable Corepack and pnpm
+ run: corepack enable pnpm
+
+ - name: Install dependencies
+ run: pnpm install --frozen-lockfile
+
+ - name: Lint Affected
+ run: pnpm turbo run check:lint --affected
+
+ - name: Check Affected format
+ run: pnpm turbo run check:format --affected
+
+ - name: Build Affected
+ run: pnpm turbo run build --affected
diff --git a/.github/workflows/sync-repo-pr.yml b/.github/workflows/sync-repo-pr.yml
new file mode 100644
index 00000000..548ccbf4
--- /dev/null
+++ b/.github/workflows/sync-repo-pr.yml
@@ -0,0 +1,52 @@
+name: Create PR on Sync
+
+on:
+ workflow_dispatch:
+ push:
+ branches:
+ - "sync/**"
+
+env:
+ CURRENT_BRANCH: ${{ github.ref_name }}
+ TARGET_BRANCH: "preview" # The target branch that you would like to merge changes like develop
+ GITHUB_TOKEN: ${{ secrets.ACCESS_TOKEN }} # Personal access token required to modify contents and workflows
+ ACCOUNT_USER_NAME: ${{ vars.ACCOUNT_USER_NAME }}
+ ACCOUNT_USER_EMAIL: ${{ vars.ACCOUNT_USER_EMAIL }}
+
+jobs:
+ create_pull_request:
+ runs-on: ubuntu-latest
+ permissions:
+ pull-requests: write
+ contents: write
+ steps:
+ - name: Checkout code
+ uses: actions/checkout@v4
+ with:
+ fetch-depth: 0 # Fetch all history for all branches and tags
+
+ - name: Setup Git
+ run: |
+ git config user.name "$ACCOUNT_USER_NAME"
+ git config user.email "$ACCOUNT_USER_EMAIL"
+
+ - name: Setup GH CLI and Git Config
+ run: |
+ type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
+ curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
+ sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
+ echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
+ sudo apt update
+ sudo apt install gh -y
+
+ - name: Create PR to Target Branch
+ run: |
+ # get all pull requests and check if there is already a PR
+ PR_EXISTS=$(gh pr list --base $TARGET_BRANCH --head $CURRENT_BRANCH --state open --json number | jq '.[] | .number')
+ if [ -n "$PR_EXISTS" ]; then
+ echo "Pull Request already exists: $PR_EXISTS"
+ else
+ echo "Creating new pull request"
+ PR_URL=$(gh pr create --base $TARGET_BRANCH --head $CURRENT_BRANCH --title "${{ vars.SYNC_PR_TITLE }}" --body "")
+ echo "Pull Request created: $PR_URL"
+ fi
diff --git a/.github/workflows/sync-repo.yml b/.github/workflows/sync-repo.yml
new file mode 100644
index 00000000..5d6c72cb
--- /dev/null
+++ b/.github/workflows/sync-repo.yml
@@ -0,0 +1,44 @@
+name: Sync Repositories
+
+on:
+ workflow_dispatch:
+ push:
+ branches:
+ - preview
+
+env:
+ SOURCE_BRANCH_NAME: ${{ github.ref_name }}
+
+jobs:
+ sync_changes:
+ runs-on: ubuntu-22.04
+ permissions:
+ pull-requests: write
+ contents: read
+ steps:
+ - name: Checkout Code
+ uses: actions/checkout@v4
+ with:
+ persist-credentials: false
+ fetch-depth: 0
+
+ - name: Setup GH CLI
+ run: |
+ type -p curl >/dev/null || (sudo apt update && sudo apt install curl -y)
+ curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
+ sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
+ echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
+ sudo apt update
+ sudo apt install gh -y
+
+ - name: Push Changes to Target Repo
+ env:
+ GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
+ run: |
+ TARGET_REPO="${{ vars.SYNC_TARGET_REPO }}"
+ TARGET_BRANCH="${{ vars.SYNC_TARGET_BRANCH_NAME }}"
+ SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
+
+ git checkout $SOURCE_BRANCH
+ git remote add target-origin-a "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
+ git push target-origin-a $SOURCE_BRANCH:$TARGET_BRANCH
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 00000000..0edc47dc
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,104 @@
+node_modules
+.next
+.yarn
+
+### NextJS ###
+# Dependencies
+/node_modules
+/.pnp
+.pnp.js
+
+# Testing
+/coverage
+
+# Next.js
+/.next/
+/out/
+
+# Production
+dist/
+out/
+build/
+.react-router/
+
+# Misc
+.DS_Store
+*.pem
+.history
+tsconfig.tsbuildinfo
+
+# Debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+pnpm-debug.log*
+.pnpm-debug.log*
+
+# Local env files
+.env
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+# Vercel
+.vercel
+
+# Turborepo
+.turbo
+
+## Django ##
+venv
+.venv
+*.pyc
+staticfiles
+mediafiles
+.env
+.DS_Store
+logs/
+htmlcov/
+.coverage
+
+node_modules/
+assets/dist/
+npm-debug.log
+yarn-error.log
+pnpm-debug.log
+
+# Editor directories and files
+.idea
+*.suo
+*.ntvs*
+*.njsproj
+*.sln
+package-lock.json
+.vscode
+
+# Sentry
+.sentryclirc
+
+# lock files
+package-lock.json
+
+
+
+.secrets
+tmp/
+
+## packages
+dist
+.temp/
+deploy/selfhost/plane-app/
+
+## Storybook
+*storybook.log
+output.css
+
+dev-editor
+# Redis
+*.rdb
+*.rdb.gz
+
+storybook-static
+
+CLAUDE.md
diff --git a/.idx/dev.nix b/.idx/dev.nix
new file mode 100644
index 00000000..f150f679
--- /dev/null
+++ b/.idx/dev.nix
@@ -0,0 +1,16 @@
+{ pkgs, ... }: {
+
+ # Which nixpkgs channel to use.
+ channel = "stable-23.11"; # or "unstable"
+
+ # Use https://search.nixos.org/packages to find packages
+ packages = [
+ pkgs.nodejs_20
+ pkgs.python3
+ ];
+
+ services.docker.enable = true;
+ services.postgres.enable = true;
+ services.redis.enable = true;
+
+}
\ No newline at end of file
diff --git a/.mise.toml b/.mise.toml
new file mode 100644
index 00000000..716b1b5b
--- /dev/null
+++ b/.mise.toml
@@ -0,0 +1,2 @@
+[tools]
+node = "22.18.0"
diff --git a/.npmrc b/.npmrc
new file mode 100644
index 00000000..d652acc3
--- /dev/null
+++ b/.npmrc
@@ -0,0 +1,34 @@
+# Enforce pnpm workspace behavior and allow Turbo's lifecycle hooks if scripts are disabled
+# This repo uses pnpm with workspaces.
+
+# Prefer linking local workspace packages when available
+prefer-workspace-packages=true
+link-workspace-packages=true
+shared-workspace-lockfile=true
+
+# Make peer installs smoother across the monorepo
+auto-install-peers=true
+strict-peer-dependencies=false
+
+# If scripts are disabled (e.g., CI with --ignore-scripts), allowlisted packages can still run their hooks
+# Turbo occasionally performs postinstall tasks for optimal performance
+# moved to pnpm-workspace.yaml: onlyBuiltDependencies (e.g., allow turbo)
+
+public-hoist-pattern[]=*eslint*
+public-hoist-pattern[]=prettier
+public-hoist-pattern[]=typescript
+
+# Reproducible installs across CI and dev
+prefer-frozen-lockfile=true
+
+# Prefer resolving to highest versions in monorepo to reduce duplication
+resolution-mode=highest
+
+# Speed up native module builds by caching side effects
+side-effects-cache=true
+
+# Speed up local dev by reusing local store when possible
+prefer-offline=true
+
+# Ensure workspace protocol is used when adding internal deps
+save-workspace-protocol=true
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 00000000..9fa847b6
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,128 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+We as members, contributors, and leaders pledge to make participation in our
+community a harassment-free experience for everyone, regardless of age, body
+size, visible or invisible disability, ethnicity, sex characteristics, gender
+identity and expression, level of experience, education, socio-economic status,
+nationality, personal appearance, race, religion, or sexual identity
+and orientation.
+
+We pledge to act and interact in ways that contribute to an open, welcoming,
+diverse, inclusive, and healthy community.
+
+## Our Standards
+
+Examples of behavior that contributes to a positive environment for our
+community include:
+
+- Demonstrating empathy and kindness toward other people
+- Being respectful of differing opinions, viewpoints, and experiences
+- Giving and gracefully accepting constructive feedback
+- Accepting responsibility and apologizing to those affected by our mistakes,
+ and learning from the experience
+- Focusing on what is best not just for us as individuals, but for the
+ overall community
+
+Examples of unacceptable behavior include:
+
+- The use of sexualized language or imagery, and sexual attention or
+ advances of any kind
+- Trolling, insulting or derogatory comments, and personal or political attacks
+- Public or private harassment
+- Publishing others' private information, such as a physical or email
+ address, without their explicit permission
+- Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Enforcement Responsibilities
+
+Community leaders are responsible for clarifying and enforcing our standards of
+acceptable behavior and will take appropriate and fair corrective action in
+response to any behavior that they deem inappropriate, threatening, offensive,
+or harmful.
+
+Community leaders have the right and responsibility to remove, edit, or reject
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, and will communicate reasons for moderation
+decisions when appropriate.
+
+## Scope
+
+This Code of Conduct applies within all community spaces, and also applies when
+an individual is officially representing the community in public spaces.
+Examples of representing our community include using an official e-mail address,
+posting via an official social media account, or acting as an appointed
+representative at an online or offline event.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be
+reported to the community leaders responsible for enforcement at
+squawk@plane.so.
+All complaints will be reviewed and investigated promptly and fairly.
+
+All community leaders are obligated to respect the privacy and security of the
+reporter of any incident.
+
+## Enforcement Guidelines
+
+Community leaders will follow these Community Impact Guidelines in determining
+the consequences for any action they deem in violation of this Code of Conduct:
+
+### 1. Correction
+
+**Community Impact**: Use of inappropriate language or other behavior deemed
+unprofessional or unwelcome in the community.
+
+**Consequence**: A private, written warning from community leaders, providing
+clarity around the nature of the violation and an explanation of why the
+behavior was inappropriate. A public apology may be requested.
+
+### 2. Warning
+
+**Community Impact**: A violation through a single incident or series
+of actions.
+
+**Consequence**: A warning with consequences for continued behavior. No
+interaction with the people involved, including unsolicited interaction with
+those enforcing the Code of Conduct, for a specified period of time. This
+includes avoiding interactions in community spaces as well as external channels
+like social media. Violating these terms may lead to a temporary or
+permanent ban.
+
+### 3. Temporary Ban
+
+**Community Impact**: A serious violation of community standards, including
+sustained inappropriate behavior.
+
+**Consequence**: A temporary ban from any sort of interaction or public
+communication with the community for a specified period of time. No public or
+private interaction with the people involved, including unsolicited interaction
+with those enforcing the Code of Conduct, is allowed during this period.
+Violating these terms may lead to a permanent ban.
+
+### 4. Permanent Ban
+
+**Community Impact**: Demonstrating a pattern of violation of community
+standards, including sustained inappropriate behavior, harassment of an
+individual, or aggression toward or disparagement of classes of individuals.
+
+**Consequence**: A permanent ban from any sort of public interaction within
+the community.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage],
+version 2.0, available at
+https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
+
+Community Impact Guidelines were inspired by [Mozilla's code of conduct
+enforcement ladder](https://github.com/mozilla/diversity).
+
+[homepage]: https://www.contributor-covenant.org
+
+For answers to common questions about this code of conduct, see the FAQ at
+https://www.contributor-covenant.org/faq. Translations are available at
+https://www.contributor-covenant.org/translations.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 00000000..39eb4e80
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,245 @@
+# Contributing to Plane
+
+Thank you for showing an interest in contributing to Plane! All kinds of contributions are valuable to us. In this guide, we will cover how you can quickly onboard and make your first contribution.
+
+## Submitting an issue
+
+Before submitting a new issue, please search the [issues](https://github.com/makeplane/plane/issues) tab. Maybe an issue or discussion already exists and might inform you of workarounds. Otherwise, you can give new information.
+
+While we want to fix all the [issues](https://github.com/makeplane/plane/issues), before fixing a bug we need to be able to reproduce and confirm it. Please provide us with a minimal reproduction scenario using a repository or [Gist](https://gist.github.com/). Having a live, reproducible scenario gives us the information without asking questions back & forth with additional questions like:
+
+- 3rd-party libraries being used and their versions
+- a use-case that fails
+
+Without said minimal reproduction, we won't be able to investigate all [issues](https://github.com/makeplane/plane/issues), and the issue might not be resolved.
+
+You can open a new issue with this [issue form](https://github.com/makeplane/plane/issues/new).
+
+### Naming conventions for issues
+
+When opening a new issue, please use a clear and concise title that follows this format:
+
+- For bugs: `🐛 Bug: [short description]`
+- For features: `🚀 Feature: [short description]`
+- For improvements: `🛠️ Improvement: [short description]`
+- For documentation: `📘 Docs: [short description]`
+
+**Examples:**
+
+- `🐛 Bug: API token expiry time not saving correctly`
+- `📘 Docs: Clarify RAM requirement for local setup`
+- `🚀 Feature: Allow custom time selection for token expiration`
+
+This helps us triage and manage issues more efficiently.
+
+## Projects setup and Architecture
+
+### Requirements
+
+- Docker Engine installed and running
+- Node.js version 20+ [LTS version](https://nodejs.org/en/about/previous-releases)
+- Python version 3.8+
+- Postgres version v14
+- Redis version v6.2.7
+- **Memory**: Minimum **12 GB RAM** recommended
+ > ⚠️ Running the project on a system with only 8 GB RAM may lead to setup failures or memory crashes (especially during Docker container build/start or dependency install). Use cloud environments like GitHub Codespaces or upgrade local RAM if possible.
+
+### Setup the project
+
+The project is a monorepo, with backend api and frontend in a single repo.
+
+The backend is a django project which is kept inside apps/api
+
+1. Clone the repo
+
+```bash
+git clone https://github.com/makeplane/plane.git [folder-name]
+cd [folder-name]
+chmod +x setup.sh
+```
+
+2. Run setup.sh
+
+```bash
+./setup.sh
+```
+
+3. Start the containers
+
+```bash
+docker compose -f docker-compose-local.yml up
+```
+
+4. Start web apps:
+
+```bash
+pnpm dev
+```
+
+5. Open your browser to http://localhost:3001/god-mode/ and register yourself as instance admin
+6. Open up your browser to http://localhost:3000 then log in using the same credentials from the previous step
+
+That’s it! You’re all set to begin coding. Remember to refresh your browser if changes don’t auto-reload. Happy contributing! 🎉
+
+## Missing a Feature?
+
+If a feature is missing, you can directly _request_ a new one [here](https://github.com/makeplane/plane/issues/new?assignees=&labels=feature&template=feature_request.yml&title=%F0%9F%9A%80+Feature%3A+). You also can do the same by choosing "🚀 Feature" when raising a [New Issue](https://github.com/makeplane/plane/issues/new/choose) on our GitHub Repository.
+If you would like to _implement_ it, an issue with your proposal must be submitted first, to be sure that we can use it. Please consider the guidelines given below.
+
+## Coding guidelines
+
+To ensure consistency throughout the source code, please keep these rules in mind as you are working:
+
+- All features or bug fixes must be tested by one or more specs (unit-tests).
+- We use [Eslint default rule guide](https://eslint.org/docs/rules/), with minor changes. An automated formatter is available using prettier.
+
+## Ways to contribute
+
+- Try Plane Cloud and the self hosting platform and give feedback
+- Add new integrations
+- Add or update translations
+- Help with open [issues](https://github.com/makeplane/plane/issues) or [create your own](https://github.com/makeplane/plane/issues/new/choose)
+- Share your thoughts and suggestions with us
+- Help create tutorials and blog posts
+- Request a feature by submitting a proposal
+- Report a bug
+- **Improve documentation** - fix incomplete or missing [docs](https://docs.plane.so/), bad wording, examples or explanations.
+
+## Contributing to language support
+
+This guide is designed to help contributors understand how to add or update translations in the application.
+
+### Understanding translation structure
+
+#### File organization
+
+Translations are organized by language in the locales directory. Each language has its own folder containing JSON files for translations. Here's how it looks:
+
+```
+packages/i18n/src/locales/
+ ├── en/
+ │ ├── core.json # Critical translations
+ │ └── translations.json
+ ├── fr/
+ │ └── translations.json
+ └── [language]/
+ └── translations.json
+```
+
+#### Nested structure
+
+To keep translations organized, we use a nested structure for keys. This makes it easier to manage and locate specific translations. For example:
+
+```json
+{
+ "issue": {
+ "label": "Work item",
+ "title": {
+ "label": "Work item title"
+ }
+ }
+}
+```
+
+### Translation formatting guide
+
+We use [IntlMessageFormat](https://formatjs.github.io/docs/intl-messageformat/) to handle dynamic content, such as variables and pluralization. Here's how to format your translations:
+
+#### Examples
+
+- **Simple variables**
+
+ ```json
+ {
+ "greeting": "Hello, {name}!"
+ }
+ ```
+
+- **Pluralization**
+ ```json
+ {
+ "items": "{count, plural, one {Work item} other {Work items}}"
+ }
+ ```
+
+### Contributing guidelines
+
+#### Updating existing translations
+
+1. Locate the key in `locales//translations.json`.
+
+2. Update the value while ensuring the key structure remains intact.
+3. Preserve any existing ICU formats (e.g., variables, pluralization).
+
+#### Adding new translation keys
+
+1. When introducing a new key, ensure it is added to **all** language files, even if translations are not immediately available. Use English as a placeholder if needed.
+
+2. Keep the nesting structure consistent across all languages.
+
+3. If the new key requires dynamic content (e.g., variables or pluralization), ensure the ICU format is applied uniformly across all languages.
+
+### Adding new languages
+
+Adding a new language involves several steps to ensure it integrates seamlessly with the project. Follow these instructions carefully:
+
+1. **Update type definitions**
+ Add the new language to the TLanguage type in the language definitions file:
+
+```ts
+ // packages/i18n/src/types/language.ts
+ export type TLanguage = "en" | "fr" | "your-lang";
+```
+
+1. **Add language configuration**
+ Include the new language in the list of supported languages:
+```ts
+ // packages/i18n/src/constants/language.ts
+ export const SUPPORTED_LANGUAGES: ILanguageOption[] = [
+ { label: "English", value: "en" },
+ { label: "Your Language", value: "your-lang" }
+ ];
+```
+
+2. **Create translation files**
+ 1. Create a new folder for your language under locales (e.g., `locales/your-lang/`).
+
+ 2. Add a `translations.json` file inside the folder.
+
+ 3. Copy the structure from an existing translation file and translate all keys.
+
+3. **Update import logic**
+ Modify the language import logic to include your new language:
+```ts
+ private importLanguageFile(language: TLanguage): Promise {
+ switch (language) {
+ case "your-lang":
+ return import("../locales/your-lang/translations.json");
+ // ...
+ }
+ }
+```
+
+### Quality checklist
+
+Before submitting your contribution, please ensure the following:
+
+- All translation keys exist in every language file.
+- Nested structures match across all language files.
+- ICU message formats are correctly implemented.
+- All languages load without errors in the application.
+- Dynamic values and pluralization work as expected.
+- There are no missing or untranslated keys.
+
+#### Pro tips
+
+- When in doubt, refer to the English translations for context.
+- Verify pluralization works with different numbers.
+- Ensure dynamic values (e.g., `{name}`) are correctly interpolated.
+- Double-check that nested key access paths are accurate.
+
+Happy translating! 🌍✨
+
+## Need help? Questions and suggestions
+
+Questions, suggestions, and thoughts are most welcome. We can also be reached in our [Discord Server](https://discord.com/invite/A92xrEGCge).
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 00000000..5087e61e
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,661 @@
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc.
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+ Preamble
+
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
+
+ The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works. By contrast,
+our General Public Licenses are intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.
+
+ When we speak of free software, we are referring to freedom, not
+price. Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
+
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
+
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
+
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
+
+ The precise terms and conditions for copying, distribution and
+modification follow.
+
+ TERMS AND CONDITIONS
+
+ 0. Definitions.
+
+ "This License" refers to version 3 of the GNU Affero General Public License.
+
+ "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+ "The Program" refers to any copyrightable work licensed under this
+License. Each licensee is addressed as "you". "Licensees" and
+"recipients" may be individuals or organizations.
+
+ To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy. The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+ A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+ To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy. Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+ To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies. Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+ An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License. If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+ 1. Source Code.
+
+ The "source code" for a work means the preferred form of the work
+for making modifications to it. "Object code" means any non-source
+form of a work.
+
+ A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+ The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form. A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+ The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities. However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work. For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+ The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+ The Corresponding Source for a work in source code form is that
+same work.
+
+ 2. Basic Permissions.
+
+ All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met. This License explicitly affirms your unlimited
+permission to run the unmodified Program. The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work. This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+ You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force. You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright. Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+ Conveying under any other circumstances is permitted solely under
+the conditions stated below. Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+ 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+ No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+ When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+ 4. Conveying Verbatim Copies.
+
+ You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+ You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+ 5. Conveying Modified Source Versions.
+
+ You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+ a) The work must carry prominent notices stating that you modified
+ it, and giving a relevant date.
+
+ b) The work must carry prominent notices stating that it is
+ released under this License and any conditions added under section
+ 7. This requirement modifies the requirement in section 4 to
+ "keep intact all notices".
+
+ c) You must license the entire work, as a whole, under this
+ License to anyone who comes into possession of a copy. This
+ License will therefore apply, along with any applicable section 7
+ additional terms, to the whole of the work, and all its parts,
+ regardless of how they are packaged. This License gives no
+ permission to license the work in any other way, but it does not
+ invalidate such permission if you have separately received it.
+
+ d) If the work has interactive user interfaces, each must display
+ Appropriate Legal Notices; however, if the Program has interactive
+ interfaces that do not display Appropriate Legal Notices, your
+ work need not make them do so.
+
+ A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit. Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+ 6. Conveying Non-Source Forms.
+
+ You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+ a) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by the
+ Corresponding Source fixed on a durable physical medium
+ customarily used for software interchange.
+
+ b) Convey the object code in, or embodied in, a physical product
+ (including a physical distribution medium), accompanied by a
+ written offer, valid for at least three years and valid for as
+ long as you offer spare parts or customer support for that product
+ model, to give anyone who possesses the object code either (1) a
+ copy of the Corresponding Source for all the software in the
+ product that is covered by this License, on a durable physical
+ medium customarily used for software interchange, for a price no
+ more than your reasonable cost of physically performing this
+ conveying of source, or (2) access to copy the
+ Corresponding Source from a network server at no charge.
+
+ c) Convey individual copies of the object code with a copy of the
+ written offer to provide the Corresponding Source. This
+ alternative is allowed only occasionally and noncommercially, and
+ only if you received the object code with such an offer, in accord
+ with subsection 6b.
+
+ d) Convey the object code by offering access from a designated
+ place (gratis or for a charge), and offer equivalent access to the
+ Corresponding Source in the same way through the same place at no
+ further charge. You need not require recipients to copy the
+ Corresponding Source along with the object code. If the place to
+ copy the object code is a network server, the Corresponding Source
+ may be on a different server (operated by you or a third party)
+ that supports equivalent copying facilities, provided you maintain
+ clear directions next to the object code saying where to find the
+ Corresponding Source. Regardless of what server hosts the
+ Corresponding Source, you remain obligated to ensure that it is
+ available for as long as needed to satisfy these requirements.
+
+ e) Convey the object code using peer-to-peer transmission, provided
+ you inform other peers where the object code and Corresponding
+ Source of the work are being offered to the general public at no
+ charge under subsection 6d.
+
+ A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+ A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling. In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage. For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product. A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+ "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source. The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+ If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information. But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+ The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed. Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+ Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+ 7. Additional Terms.
+
+ "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law. If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+ When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it. (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.) You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+ Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+ a) Disclaiming warranty or limiting liability differently from the
+ terms of sections 15 and 16 of this License; or
+
+ b) Requiring preservation of specified reasonable legal notices or
+ author attributions in that material or in the Appropriate Legal
+ Notices displayed by works containing it; or
+
+ c) Prohibiting misrepresentation of the origin of that material, or
+ requiring that modified versions of such material be marked in
+ reasonable ways as different from the original version; or
+
+ d) Limiting the use for publicity purposes of names of licensors or
+ authors of the material; or
+
+ e) Declining to grant rights under trademark law for use of some
+ trade names, trademarks, or service marks; or
+
+ f) Requiring indemnification of licensors and authors of that
+ material by anyone who conveys the material (or modified versions of
+ it) with contractual assumptions of liability to the recipient, for
+ any liability that these contractual assumptions directly impose on
+ those licensors and authors.
+
+ All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10. If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term. If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+ If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+ Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+ 8. Termination.
+
+ You may not propagate or modify a covered work except as expressly
+provided under this License. Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+ However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+ Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+ Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License. If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+ 9. Acceptance Not Required for Having Copies.
+
+ You are not required to accept this License in order to receive or
+run a copy of the Program. Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance. However,
+nothing other than this License grants you permission to propagate or
+modify any covered work. These actions infringe copyright if you do
+not accept this License. Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+ 10. Automatic Licensing of Downstream Recipients.
+
+ Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License. You are not responsible
+for enforcing compliance by third parties with this License.
+
+ An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations. If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+ You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License. For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+ 11. Patents.
+
+ A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based. The
+work thus licensed is called the contributor's "contributor version".
+
+ A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version. For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+ Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+ In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement). To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+ If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients. "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+ If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+ A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License. You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+ Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+ 12. No Surrender of Others' Freedom.
+
+ If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all. For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
+
+ Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU General Public License into a single
+combined work, and to convey the resulting work. The terms of this
+License will continue to apply to the part which is the covered work,
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
+
+ 14. Revised Versions of this License.
+
+ The Free Software Foundation may publish revised and/or new versions of
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+ Each version is given a distinguishing version number. If the
+Program specifies that a certain numbered version of the GNU Affero General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation. If the Program does not specify a version number of the
+GNU Affero General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+ If the Program specifies that a proxy can decide which future
+versions of the GNU Affero General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+ Later license versions may give you additional or different
+permissions. However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+ 15. Disclaimer of Warranty.
+
+ THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. Limitation of Liability.
+
+ IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+ 17. Interpretation of Sections 15 and 16.
+
+ If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Programs
+
+ If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+ To do so, attach the following notices to the program. It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+
+ Copyright (C)
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU Affero General Public License as published
+ by the Free Software Foundation, either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU Affero General Public License for more details.
+
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
+
+Also add information on how to contact you by electronic and paper mail.
+
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
+
+ You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU AGPL, see
+ .
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 00000000..f6b364be
--- /dev/null
+++ b/README.md
@@ -0,0 +1,174 @@
+
+
+
+
+
+
+
+Modern project management for all teams
+
+
+
+
+
+
+
+
+
+ Website •
+ Releases •
+ Twitter •
+ Documentation
+
+
+
+
+
+
+
+
+Meet [Plane](https://plane.so/), an open-source project management tool to track issues, run ~sprints~ cycles, and manage product roadmaps without the chaos of managing the tool itself. 🧘♀️
+
+> Plane is evolving every day. Your suggestions, ideas, and reported bugs help us immensely. Do not hesitate to join in the conversation on [Discord](https://discord.com/invite/A92xrEGCge) or raise a GitHub issue. We read everything and respond to most.
+
+## 🚀 Installation
+
+Getting started with Plane is simple. Choose the setup that works best for you:
+
+- **Plane Cloud**
+ Sign up for a free account on [Plane Cloud](https://app.plane.so)—it's the fastest way to get up and running without worrying about infrastructure.
+
+- **Self-host Plane**
+ Prefer full control over your data and infrastructure? Install and run Plane on your own servers. Follow our detailed [deployment guides](https://developers.plane.so/self-hosting/overview) to get started.
+
+| Installation methods | Docs link |
+| -------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| Docker | [](https://developers.plane.so/self-hosting/methods/docker-compose) |
+| Kubernetes | [](https://developers.plane.so/self-hosting/methods/kubernetes) |
+
+`Instance admins` can configure instance settings with [God mode](https://developers.plane.so/self-hosting/govern/instance-admin).
+
+## 🌟 Features
+
+- **Issues**
+ Efficiently create and manage tasks with a robust rich text editor that supports file uploads. Enhance organization and tracking by adding sub-properties and referencing related issues.
+
+- **Cycles**
+ Maintain your team’s momentum with Cycles. Track progress effortlessly using burn-down charts and other insightful tools.
+
+- **Modules**
+ Simplify complex projects by dividing them into smaller, manageable modules.
+
+- **Views**
+ Customize your workflow by creating filters to display only the most relevant issues. Save and share these views with ease.
+
+- **Pages**
+ Capture and organize ideas using Plane Pages, complete with AI capabilities and a rich text editor. Format text, insert images, add hyperlinks, or convert your notes into actionable items.
+
+- **Analytics**
+ Access real-time insights across all your Plane data. Visualize trends, remove blockers, and keep your projects moving forward.
+
+- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
+
+## 🛠️ Local development
+
+See [CONTRIBUTING](./CONTRIBUTING.md)
+
+## ⚙️ Built with
+
+[](https://nextjs.org/)
+[](https://www.djangoproject.com/)
+[](https://nodejs.org/en)
+
+## 📸 Screenshots
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+## 📝 Documentation
+
+Explore Plane's [product documentation](https://docs.plane.so/) and [developer documentation](https://developers.plane.so/) to learn about features, setup, and usage.
+
+## ❤️ Community
+
+Join the Plane community on [GitHub Discussions](https://github.com/orgs/makeplane/discussions) and our [Discord server](https://discord.com/invite/A92xrEGCge). We follow a [Code of conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CONDUCT.md) in all our community channels.
+
+Feel free to ask questions, report bugs, participate in discussions, share ideas, request features, or showcase your projects. We’d love to hear from you!
+
+## 🛡️ Security
+
+If you discover a security vulnerability in Plane, please report it responsibly instead of opening a public issue. We take all legitimate reports seriously and will investigate them promptly. See [Security policy](https://github.com/makeplane/plane/blob/master/SECURITY.md) for more info.
+
+To disclose any security issues, please email us at security@plane.so.
+
+## 🤝 Contributing
+
+There are many ways you can contribute to Plane:
+
+- Report [bugs](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%F0%9F%90%9Bbug&projects=&template=--bug-report.yaml&title=%5Bbug%5D%3A+) or submit [feature requests](https://github.com/makeplane/plane/issues/new?assignees=srinivaspendem%2Cpushya22&labels=%E2%9C%A8feature&projects=&template=--feature-request.yaml&title=%5Bfeature%5D%3A+).
+- Review the [documentation](https://docs.plane.so/) and submit [pull requests](https://github.com/makeplane/docs) to improve it—whether it's fixing typos or adding new content.
+- Talk or write about Plane or any other ecosystem integration and [let us know](https://discord.com/invite/A92xrEGCge)!
+- Show your support by upvoting [popular feature requests](https://github.com/makeplane/plane/issues).
+
+Please read [CONTRIBUTING.md](https://github.com/makeplane/plane/blob/master/CONTRIBUTING.md) for details on the process for submitting pull requests to us.
+
+### Repo activity
+
+
+
+### We couldn't have done this without you.
+
+
+
+
+
+## License
+
+This project is licensed under the [GNU Affero General Public License v3.0](https://github.com/makeplane/plane/blob/master/LICENSE.txt).
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 00000000..0e11bbb5
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,39 @@
+# Security policy
+This document outlines the security protocols and vulnerability reporting guidelines for the Plane project. Ensuring the security of our systems is a top priority, and while we work diligently to maintain robust protection, vulnerabilities may still occur. We highly value the community’s role in identifying and reporting security concerns to uphold the integrity of our systems and safeguard our users.
+
+## Reporting a vulnerability
+If you have identified a security vulnerability, submit your findings to [security@plane.so](mailto:security@plane.so).
+Ensure your report includes all relevant information needed for us to reproduce and assess the issue. Include the IP address or URL of the affected system.
+
+To ensure a responsible and effective disclosure process, please adhere to the following:
+
+- Maintain confidentiality and refrain from publicly disclosing the vulnerability until we have had the opportunity to investigate and address the issue.
+- Refrain from running automated vulnerability scans on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
+- Do not exploit any discovered vulnerabilities for malicious purposes, such as accessing or altering user data.
+- Do not engage in physical security attacks, social engineering, distributed denial of service (DDoS) attacks, spam campaigns, or attacks on third-party applications as part of your vulnerability testing.
+
+## Out of scope
+While we appreciate all efforts to assist in improving our security, please note that the following types of vulnerabilities are considered out of scope:
+
+- Vulnerabilities requiring man-in-the-middle (MITM) attacks or physical access to a user’s device.
+- Content spoofing or text injection issues without a clear attack vector or the ability to modify HTML/CSS.
+- Issues related to email spoofing.
+- Missing DNSSEC, CAA, or CSP headers.
+- Absence of secure or HTTP-only flags on non-sensitive cookies.
+
+## Our commitment
+
+At Plane, we are committed to maintaining transparent and collaborative communication throughout the vulnerability resolution process. Here's what you can expect from us:
+
+- **Response Time**
+We will acknowledge receipt of your vulnerability report within three business days and provide an estimated timeline for resolution.
+- **Legal Protection**
+We will not initiate legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
+- **Confidentiality**
+Your report will be treated with confidentiality. We will not disclose your personal information to third parties without your consent.
+- **Recognition**
+With your permission, we are happy to publicly acknowledge your contribution to improving our security once the issue is resolved.
+- **Timely Resolution**
+We are committed to working closely with you throughout the resolution process, providing timely updates as necessary. Our goal is to address all reported vulnerabilities swiftly, and we will actively engage with you to coordinate a responsible disclosure once the issue is fully resolved.
+
+We appreciate your help in ensuring the security of our platform. Your contributions are crucial to protecting our users and maintaining a secure environment. Thank you for working with us to keep Plane safe.
\ No newline at end of file
diff --git a/apps/admin/.env.example b/apps/admin/.env.example
new file mode 100644
index 00000000..15d7a36a
--- /dev/null
+++ b/apps/admin/.env.example
@@ -0,0 +1,12 @@
+NEXT_PUBLIC_API_BASE_URL="http://localhost:8000"
+
+NEXT_PUBLIC_WEB_BASE_URL="http://localhost:3000"
+
+NEXT_PUBLIC_ADMIN_BASE_URL="http://localhost:3001"
+NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
+
+NEXT_PUBLIC_SPACE_BASE_URL="http://localhost:3002"
+NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
+
+NEXT_PUBLIC_LIVE_BASE_URL="http://localhost:3100"
+NEXT_PUBLIC_LIVE_BASE_PATH="/live"
diff --git a/apps/admin/.eslintignore b/apps/admin/.eslintignore
new file mode 100644
index 00000000..27e50ad7
--- /dev/null
+++ b/apps/admin/.eslintignore
@@ -0,0 +1,12 @@
+.next/*
+out/*
+public/*
+dist/*
+node_modules/*
+.turbo/*
+.env*
+.env
+.env.local
+.env.development
+.env.production
+.env.test
\ No newline at end of file
diff --git a/apps/admin/.eslintrc.js b/apps/admin/.eslintrc.js
new file mode 100644
index 00000000..a0bc76d5
--- /dev/null
+++ b/apps/admin/.eslintrc.js
@@ -0,0 +1,18 @@
+module.exports = {
+ root: true,
+ extends: ["@plane/eslint-config/next.js"],
+ rules: {
+ "no-duplicate-imports": "off",
+ "import/no-duplicates": ["error", { "prefer-inline": false }],
+ "import/consistent-type-specifier-style": ["error", "prefer-top-level"],
+ "@typescript-eslint/no-import-type-side-effects": "error",
+ "@typescript-eslint/consistent-type-imports": [
+ "error",
+ {
+ prefer: "type-imports",
+ fixStyle: "separate-type-imports",
+ disallowTypeAnnotations: false,
+ },
+ ],
+ },
+};
diff --git a/apps/admin/.prettierignore b/apps/admin/.prettierignore
new file mode 100644
index 00000000..3cd6b08a
--- /dev/null
+++ b/apps/admin/.prettierignore
@@ -0,0 +1,6 @@
+.next
+.vercel
+.tubro
+out/
+dist/
+build/
diff --git a/apps/admin/.prettierrc b/apps/admin/.prettierrc
new file mode 100644
index 00000000..87d988f1
--- /dev/null
+++ b/apps/admin/.prettierrc
@@ -0,0 +1,5 @@
+{
+ "printWidth": 120,
+ "tabWidth": 2,
+ "trailingComma": "es5"
+}
diff --git a/apps/admin/Dockerfile.admin b/apps/admin/Dockerfile.admin
new file mode 100644
index 00000000..6bfa0765
--- /dev/null
+++ b/apps/admin/Dockerfile.admin
@@ -0,0 +1,103 @@
+# syntax=docker/dockerfile:1.7
+FROM node:22-alpine AS base
+
+# Setup pnpm package manager with corepack and configure global bin directory for caching
+ENV PNPM_HOME="/pnpm"
+ENV PATH="$PNPM_HOME:$PATH"
+RUN corepack enable
+
+# *****************************************************************************
+# STAGE 1: Build the project
+# *****************************************************************************
+FROM base AS builder
+RUN apk add --no-cache libc6-compat
+WORKDIR /app
+
+ARG TURBO_VERSION=2.5.6
+RUN corepack enable pnpm && pnpm add -g turbo@${TURBO_VERSION}
+COPY . .
+
+RUN turbo prune --scope=admin --docker
+
+# *****************************************************************************
+# STAGE 2: Install dependencies & build the project
+# *****************************************************************************
+FROM base AS installer
+
+RUN apk add --no-cache libc6-compat
+WORKDIR /app
+
+COPY .gitignore .gitignore
+COPY --from=builder /app/out/json/ .
+COPY --from=builder /app/out/pnpm-lock.yaml ./pnpm-lock.yaml
+RUN corepack enable pnpm
+RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm fetch --store-dir=/pnpm/store
+
+COPY --from=builder /app/out/full/ .
+COPY turbo.json turbo.json
+RUN --mount=type=cache,id=pnpm-store,target=/pnpm/store pnpm install --offline --frozen-lockfile --store-dir=/pnpm/store
+
+ARG NEXT_PUBLIC_API_BASE_URL=""
+ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
+
+ARG NEXT_PUBLIC_ADMIN_BASE_URL=""
+ENV NEXT_PUBLIC_ADMIN_BASE_URL=$NEXT_PUBLIC_ADMIN_BASE_URL
+
+ARG NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
+ENV NEXT_PUBLIC_ADMIN_BASE_PATH=$NEXT_PUBLIC_ADMIN_BASE_PATH
+
+ARG NEXT_PUBLIC_SPACE_BASE_URL=""
+ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
+
+ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
+ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
+
+ARG NEXT_PUBLIC_WEB_BASE_URL=""
+ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
+
+ENV NEXT_TELEMETRY_DISABLED=1
+ENV TURBO_TELEMETRY_DISABLED=1
+
+RUN pnpm turbo run build --filter=admin
+
+# *****************************************************************************
+# STAGE 3: Copy the project and start it
+# *****************************************************************************
+FROM base AS runner
+WORKDIR /app
+
+# Don't run production as root
+RUN addgroup --system --gid 1001 nodejs
+RUN adduser --system --uid 1001 nextjs
+USER nextjs
+
+# Automatically leverage output traces to reduce image size
+# https://nextjs.org/docs/advanced-features/output-file-tracing
+COPY --from=installer /app/apps/admin/.next/standalone ./
+COPY --from=installer /app/apps/admin/.next/static ./apps/admin/.next/static
+COPY --from=installer /app/apps/admin/public ./apps/admin/public
+
+ARG NEXT_PUBLIC_API_BASE_URL=""
+ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
+
+ARG NEXT_PUBLIC_ADMIN_BASE_URL=""
+ENV NEXT_PUBLIC_ADMIN_BASE_URL=$NEXT_PUBLIC_ADMIN_BASE_URL
+
+ARG NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
+ENV NEXT_PUBLIC_ADMIN_BASE_PATH=$NEXT_PUBLIC_ADMIN_BASE_PATH
+
+ARG NEXT_PUBLIC_SPACE_BASE_URL=""
+ENV NEXT_PUBLIC_SPACE_BASE_URL=$NEXT_PUBLIC_SPACE_BASE_URL
+
+ARG NEXT_PUBLIC_SPACE_BASE_PATH="/spaces"
+ENV NEXT_PUBLIC_SPACE_BASE_PATH=$NEXT_PUBLIC_SPACE_BASE_PATH
+
+ARG NEXT_PUBLIC_WEB_BASE_URL=""
+ENV NEXT_PUBLIC_WEB_BASE_URL=$NEXT_PUBLIC_WEB_BASE_URL
+
+ENV NEXT_TELEMETRY_DISABLED=1
+ENV TURBO_TELEMETRY_DISABLED=1
+
+EXPOSE 3000
+
+CMD ["node", "apps/admin/server.js"]
diff --git a/apps/admin/Dockerfile.dev b/apps/admin/Dockerfile.dev
new file mode 100644
index 00000000..0b82669c
--- /dev/null
+++ b/apps/admin/Dockerfile.dev
@@ -0,0 +1,17 @@
+FROM node:22-alpine
+RUN apk add --no-cache libc6-compat
+# Set working directory
+WORKDIR /app
+
+COPY . .
+
+RUN corepack enable pnpm && pnpm add -g turbo
+RUN pnpm install
+
+ENV NEXT_PUBLIC_ADMIN_BASE_PATH="/god-mode"
+
+EXPOSE 3000
+
+VOLUME [ "/app/node_modules", "/app/admin/node_modules" ]
+
+CMD ["pnpm", "dev", "--filter=admin"]
diff --git a/apps/admin/app/(all)/(dashboard)/ai/form.tsx b/apps/admin/app/(all)/(dashboard)/ai/form.tsx
new file mode 100644
index 00000000..64970a54
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/ai/form.tsx
@@ -0,0 +1,136 @@
+"use client";
+import type { FC } from "react";
+import { useForm } from "react-hook-form";
+import { Lightbulb } from "lucide-react";
+import { Button } from "@plane/propel/button";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+import type { IFormattedInstanceConfiguration, TInstanceAIConfigurationKeys } from "@plane/types";
+// components
+import type { TControllerInputFormField } from "@/components/common/controller-input";
+import { ControllerInput } from "@/components/common/controller-input";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type IInstanceAIForm = {
+ config: IFormattedInstanceConfiguration;
+};
+
+type AIFormValues = Record;
+
+export const InstanceAIForm: FC = (props) => {
+ const { config } = props;
+ // store
+ const { updateInstanceConfigurations } = useInstance();
+ // form data
+ const {
+ handleSubmit,
+ control,
+ formState: { errors, isSubmitting },
+ } = useForm({
+ defaultValues: {
+ LLM_API_KEY: config["LLM_API_KEY"],
+ LLM_MODEL: config["LLM_MODEL"],
+ },
+ });
+
+ const aiFormFields: TControllerInputFormField[] = [
+ {
+ key: "LLM_MODEL",
+ type: "text",
+ label: "LLM Model",
+ description: (
+ <>
+ Choose an OpenAI engine.{" "}
+
+ Learn more
+
+ >
+ ),
+ placeholder: "gpt-4o-mini",
+ error: Boolean(errors.LLM_MODEL),
+ required: false,
+ },
+ {
+ key: "LLM_API_KEY",
+ type: "password",
+ label: "API key",
+ description: (
+ <>
+ You will find your API key{" "}
+
+ here.
+
+ >
+ ),
+ placeholder: "sk-asddassdfasdefqsdfasd23das3dasdcasd",
+ error: Boolean(errors.LLM_API_KEY),
+ required: false,
+ },
+ ];
+
+ const onSubmit = async (formData: AIFormValues) => {
+ const payload: Partial = { ...formData };
+
+ await updateInstanceConfigurations(payload)
+ .then(() =>
+ setToast({
+ type: TOAST_TYPE.SUCCESS,
+ title: "Success",
+ message: "AI Settings updated successfully",
+ })
+ )
+ .catch((err) => console.error(err));
+ };
+
+ return (
+
+
+
+
OpenAI
+
If you use ChatGPT, this is for you.
+
+
+ {aiFormFields.map((field) => (
+
+ ))}
+
+
+
+
+
+ {isSubmitting ? "Saving..." : "Save changes"}
+
+
+
+
+
+ );
+};
diff --git a/apps/admin/app/(all)/(dashboard)/ai/layout.tsx b/apps/admin/app/(all)/(dashboard)/ai/layout.tsx
new file mode 100644
index 00000000..303ed560
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/ai/layout.tsx
@@ -0,0 +1,10 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+
+export const metadata: Metadata = {
+ title: "Artificial Intelligence Settings - God Mode",
+};
+
+export default function AILayout({ children }: { children: ReactNode }) {
+ return <>{children}>;
+}
diff --git a/apps/admin/app/(all)/(dashboard)/ai/page.tsx b/apps/admin/app/(all)/(dashboard)/ai/page.tsx
new file mode 100644
index 00000000..2a074777
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/ai/page.tsx
@@ -0,0 +1,45 @@
+"use client";
+
+import { observer } from "mobx-react";
+import useSWR from "swr";
+import { Loader } from "@plane/ui";
+// hooks
+import { useInstance } from "@/hooks/store";
+// components
+import { InstanceAIForm } from "./form";
+
+const InstanceAIPage = observer(() => {
+ // store
+ const { fetchInstanceConfigurations, formattedConfig } = useInstance();
+
+ useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
+
+ return (
+ <>
+
+
+
AI features for all your workspaces
+
+ Configure your AI API credentials so Plane AI features are turned on for all your workspaces.
+
+
+
+ {formattedConfig ? (
+
+ ) : (
+
+
+
+
+
+
+
+
+ )}
+
+
+ >
+ );
+});
+
+export default InstanceAIPage;
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/github/form.tsx b/apps/admin/app/(all)/(dashboard)/authentication/github/form.tsx
new file mode 100644
index 00000000..ae0f54c4
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/github/form.tsx
@@ -0,0 +1,249 @@
+"use client";
+
+import type { FC } from "react";
+import { useState } from "react";
+import { isEmpty } from "lodash-es";
+import Link from "next/link";
+import { useForm } from "react-hook-form";
+import { Monitor } from "lucide-react";
+// plane internal packages
+import { API_BASE_URL } from "@plane/constants";
+import { Button, getButtonStyling } from "@plane/propel/button";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+import type { IFormattedInstanceConfiguration, TInstanceGithubAuthenticationConfigurationKeys } from "@plane/types";
+
+import { cn } from "@plane/utils";
+// components
+import { CodeBlock } from "@/components/common/code-block";
+import { ConfirmDiscardModal } from "@/components/common/confirm-discard-modal";
+import type { TControllerInputFormField } from "@/components/common/controller-input";
+import { ControllerInput } from "@/components/common/controller-input";
+import type { TCopyField } from "@/components/common/copy-field";
+import { CopyField } from "@/components/common/copy-field";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type Props = {
+ config: IFormattedInstanceConfiguration;
+};
+
+type GithubConfigFormValues = Record;
+
+export const InstanceGithubConfigForm: FC = (props) => {
+ const { config } = props;
+ // states
+ const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
+ // store hooks
+ const { updateInstanceConfigurations } = useInstance();
+ // form data
+ const {
+ handleSubmit,
+ control,
+ reset,
+ formState: { errors, isDirty, isSubmitting },
+ } = useForm({
+ defaultValues: {
+ GITHUB_CLIENT_ID: config["GITHUB_CLIENT_ID"],
+ GITHUB_CLIENT_SECRET: config["GITHUB_CLIENT_SECRET"],
+ GITHUB_ORGANIZATION_ID: config["GITHUB_ORGANIZATION_ID"],
+ },
+ });
+
+ const originURL = !isEmpty(API_BASE_URL) ? API_BASE_URL : typeof window !== "undefined" ? window.location.origin : "";
+
+ const GITHUB_FORM_FIELDS: TControllerInputFormField[] = [
+ {
+ key: "GITHUB_CLIENT_ID",
+ type: "text",
+ label: "Client ID",
+ description: (
+ <>
+ You will get this from your{" "}
+
+ GitHub OAuth application settings.
+
+ >
+ ),
+ placeholder: "70a44354520df8bd9bcd",
+ error: Boolean(errors.GITHUB_CLIENT_ID),
+ required: true,
+ },
+ {
+ key: "GITHUB_CLIENT_SECRET",
+ type: "password",
+ label: "Client secret",
+ description: (
+ <>
+ Your client secret is also found in your{" "}
+
+ GitHub OAuth application settings.
+
+ >
+ ),
+ placeholder: "9b0050f94ec1b744e32ce79ea4ffacd40d4119cb",
+ error: Boolean(errors.GITHUB_CLIENT_SECRET),
+ required: true,
+ },
+ {
+ key: "GITHUB_ORGANIZATION_ID",
+ type: "text",
+ label: "Organization ID",
+ description: <>The organization github ID.>,
+ placeholder: "123456789",
+ error: Boolean(errors.GITHUB_ORGANIZATION_ID),
+ required: false,
+ },
+ ];
+
+ const GITHUB_COMMON_SERVICE_DETAILS: TCopyField[] = [
+ {
+ key: "Origin_URL",
+ label: "Origin URL",
+ url: originURL,
+ description: (
+ <>
+ We will auto-generate this. Paste this into the Authorized origin URL field{" "}
+
+ here.
+
+ >
+ ),
+ },
+ ];
+
+ const GITHUB_SERVICE_DETAILS: TCopyField[] = [
+ {
+ key: "Callback_URI",
+ label: "Callback URI",
+ url: `${originURL}/auth/github/callback/`,
+ description: (
+ <>
+ We will auto-generate this. Paste this into your Authorized Callback URI {" "}
+ field{" "}
+
+ here.
+
+ >
+ ),
+ },
+ ];
+
+ const onSubmit = async (formData: GithubConfigFormValues) => {
+ const payload: Partial = { ...formData };
+
+ await updateInstanceConfigurations(payload)
+ .then((response = []) => {
+ setToast({
+ type: TOAST_TYPE.SUCCESS,
+ title: "Done!",
+ message: "Your GitHub authentication is configured. You should test it now.",
+ });
+ reset({
+ GITHUB_CLIENT_ID: response.find((item) => item.key === "GITHUB_CLIENT_ID")?.value,
+ GITHUB_CLIENT_SECRET: response.find((item) => item.key === "GITHUB_CLIENT_SECRET")?.value,
+ GITHUB_ORGANIZATION_ID: response.find((item) => item.key === "GITHUB_ORGANIZATION_ID")?.value,
+ });
+ })
+ .catch((err) => console.error(err));
+ };
+
+ const handleGoBack = (e: React.MouseEvent) => {
+ if (isDirty) {
+ e.preventDefault();
+ setIsDiscardChangesModalOpen(true);
+ }
+ };
+
+ return (
+ <>
+ setIsDiscardChangesModalOpen(false)}
+ />
+
+
+
+
GitHub-provided details for Plane
+ {GITHUB_FORM_FIELDS.map((field) => (
+
+ ))}
+
+
+
+ {isSubmitting ? "Saving..." : "Save changes"}
+
+
+ Go back
+
+
+
+
+
+
Plane-provided details for GitHub
+
+
+ {/* common service details */}
+
+ {GITHUB_COMMON_SERVICE_DETAILS.map((field) => (
+
+ ))}
+
+
+ {/* web service details */}
+
+
+
+ Web
+
+
+ {GITHUB_SERVICE_DETAILS.map((field) => (
+
+ ))}
+
+
+
+
+
+
+ >
+ );
+};
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/github/layout.tsx b/apps/admin/app/(all)/(dashboard)/authentication/github/layout.tsx
new file mode 100644
index 00000000..2da5a903
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/github/layout.tsx
@@ -0,0 +1,10 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+
+export const metadata: Metadata = {
+ title: "GitHub Authentication - God Mode",
+};
+
+export default function GitHubAuthenticationLayout({ children }: { children: ReactNode }) {
+ return <>{children}>;
+}
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/github/page.tsx b/apps/admin/app/(all)/(dashboard)/authentication/github/page.tsx
new file mode 100644
index 00000000..5709ba4b
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/github/page.tsx
@@ -0,0 +1,114 @@
+"use client";
+
+import { useState } from "react";
+import { observer } from "mobx-react";
+import Image from "next/image";
+import { useTheme } from "next-themes";
+import useSWR from "swr";
+// plane internal packages
+import { setPromiseToast } from "@plane/propel/toast";
+import { Loader, ToggleSwitch } from "@plane/ui";
+import { resolveGeneralTheme } from "@plane/utils";
+// components
+import { AuthenticationMethodCard } from "@/components/authentication/authentication-method-card";
+// hooks
+import { useInstance } from "@/hooks/store";
+// icons
+import githubLightModeImage from "@/public/logos/github-black.png";
+import githubDarkModeImage from "@/public/logos/github-white.png";
+// local components
+import { InstanceGithubConfigForm } from "./form";
+
+const InstanceGithubAuthenticationPage = observer(() => {
+ // store
+ const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
+ // state
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ // theme
+ const { resolvedTheme } = useTheme();
+ // config
+ const enableGithubConfig = formattedConfig?.IS_GITHUB_ENABLED ?? "";
+
+ useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
+
+ const updateConfig = async (key: "IS_GITHUB_ENABLED", value: string) => {
+ setIsSubmitting(true);
+
+ const payload = {
+ [key]: value,
+ };
+
+ const updateConfigPromise = updateInstanceConfigurations(payload);
+
+ setPromiseToast(updateConfigPromise, {
+ loading: "Saving Configuration...",
+ success: {
+ title: "Configuration saved",
+ message: () => `GitHub authentication is now ${value ? "active" : "disabled"}.`,
+ },
+ error: {
+ title: "Error",
+ message: () => "Failed to save configuration",
+ },
+ });
+
+ await updateConfigPromise
+ .then(() => {
+ setIsSubmitting(false);
+ })
+ .catch((err) => {
+ console.error(err);
+ setIsSubmitting(false);
+ });
+ };
+
+ const isGithubEnabled = enableGithubConfig === "1";
+
+ return (
+ <>
+
+
+
+ }
+ config={
+
{
+ updateConfig("IS_GITHUB_ENABLED", isGithubEnabled ? "0" : "1");
+ }}
+ size="sm"
+ disabled={isSubmitting || !formattedConfig}
+ />
+ }
+ disabled={isSubmitting || !formattedConfig}
+ withBorder={false}
+ />
+
+
+ {formattedConfig ? (
+
+ ) : (
+
+
+
+
+
+
+
+ )}
+
+
+ >
+ );
+});
+
+export default InstanceGithubAuthenticationPage;
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/gitlab/form.tsx b/apps/admin/app/(all)/(dashboard)/authentication/gitlab/form.tsx
new file mode 100644
index 00000000..91e4ee8e
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/gitlab/form.tsx
@@ -0,0 +1,212 @@
+import type { FC } from "react";
+import { useState } from "react";
+import { isEmpty } from "lodash-es";
+import Link from "next/link";
+import { useForm } from "react-hook-form";
+// plane internal packages
+import { API_BASE_URL } from "@plane/constants";
+import { Button, getButtonStyling } from "@plane/propel/button";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+import type { IFormattedInstanceConfiguration, TInstanceGitlabAuthenticationConfigurationKeys } from "@plane/types";
+import { cn } from "@plane/utils";
+// components
+import { CodeBlock } from "@/components/common/code-block";
+import { ConfirmDiscardModal } from "@/components/common/confirm-discard-modal";
+import type { TControllerInputFormField } from "@/components/common/controller-input";
+import { ControllerInput } from "@/components/common/controller-input";
+import type { TCopyField } from "@/components/common/copy-field";
+import { CopyField } from "@/components/common/copy-field";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type Props = {
+ config: IFormattedInstanceConfiguration;
+};
+
+type GitlabConfigFormValues = Record;
+
+export const InstanceGitlabConfigForm: FC = (props) => {
+ const { config } = props;
+ // states
+ const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
+ // store hooks
+ const { updateInstanceConfigurations } = useInstance();
+ // form data
+ const {
+ handleSubmit,
+ control,
+ reset,
+ formState: { errors, isDirty, isSubmitting },
+ } = useForm({
+ defaultValues: {
+ GITLAB_HOST: config["GITLAB_HOST"],
+ GITLAB_CLIENT_ID: config["GITLAB_CLIENT_ID"],
+ GITLAB_CLIENT_SECRET: config["GITLAB_CLIENT_SECRET"],
+ },
+ });
+
+ const originURL = !isEmpty(API_BASE_URL) ? API_BASE_URL : typeof window !== "undefined" ? window.location.origin : "";
+
+ const GITLAB_FORM_FIELDS: TControllerInputFormField[] = [
+ {
+ key: "GITLAB_HOST",
+ type: "text",
+ label: "Host",
+ description: (
+ <>
+ This is either https://gitlab.com or the domain.tld where you host GitLab.
+ >
+ ),
+ placeholder: "https://gitlab.com",
+ error: Boolean(errors.GITLAB_HOST),
+ required: true,
+ },
+ {
+ key: "GITLAB_CLIENT_ID",
+ type: "text",
+ label: "Application ID",
+ description: (
+ <>
+ Get this from your{" "}
+
+ GitLab OAuth application settings
+
+ .
+ >
+ ),
+ placeholder: "c2ef2e7fc4e9d15aa7630f5637d59e8e4a27ff01dceebdb26b0d267b9adcf3c3",
+ error: Boolean(errors.GITLAB_CLIENT_ID),
+ required: true,
+ },
+ {
+ key: "GITLAB_CLIENT_SECRET",
+ type: "password",
+ label: "Secret",
+ description: (
+ <>
+ The client secret is also found in your{" "}
+
+ GitLab OAuth application settings
+
+ .
+ >
+ ),
+ placeholder: "gloas-f79cfa9a03c97f6ffab303177a5a6778a53c61e3914ba093412f68a9298a1b28",
+ error: Boolean(errors.GITLAB_CLIENT_SECRET),
+ required: true,
+ },
+ ];
+
+ const GITLAB_SERVICE_FIELD: TCopyField[] = [
+ {
+ key: "Callback_URL",
+ label: "Callback URL",
+ url: `${originURL}/auth/gitlab/callback/`,
+ description: (
+ <>
+ We will auto-generate this. Paste this into the Redirect URI field of your{" "}
+
+ GitLab OAuth application
+
+ .
+ >
+ ),
+ },
+ ];
+
+ const onSubmit = async (formData: GitlabConfigFormValues) => {
+ const payload: Partial = { ...formData };
+
+ await updateInstanceConfigurations(payload)
+ .then((response = []) => {
+ setToast({
+ type: TOAST_TYPE.SUCCESS,
+ title: "Done!",
+ message: "Your GitLab authentication is configured. You should test it now.",
+ });
+ reset({
+ GITLAB_HOST: response.find((item) => item.key === "GITLAB_HOST")?.value,
+ GITLAB_CLIENT_ID: response.find((item) => item.key === "GITLAB_CLIENT_ID")?.value,
+ GITLAB_CLIENT_SECRET: response.find((item) => item.key === "GITLAB_CLIENT_SECRET")?.value,
+ });
+ })
+ .catch((err) => console.error(err));
+ };
+
+ const handleGoBack = (e: React.MouseEvent) => {
+ if (isDirty) {
+ e.preventDefault();
+ setIsDiscardChangesModalOpen(true);
+ }
+ };
+
+ return (
+ <>
+ setIsDiscardChangesModalOpen(false)}
+ />
+
+
+
+
GitLab-provided details for Plane
+ {GITLAB_FORM_FIELDS.map((field) => (
+
+ ))}
+
+
+
+ {isSubmitting ? "Saving..." : "Save changes"}
+
+
+ Go back
+
+
+
+
+
+
+
Plane-provided details for GitLab
+ {GITLAB_SERVICE_FIELD.map((field) => (
+
+ ))}
+
+
+
+
+ >
+ );
+};
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/gitlab/layout.tsx b/apps/admin/app/(all)/(dashboard)/authentication/gitlab/layout.tsx
new file mode 100644
index 00000000..79b5de5a
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/gitlab/layout.tsx
@@ -0,0 +1,10 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+
+export const metadata: Metadata = {
+ title: "GitLab Authentication - God Mode",
+};
+
+export default function GitlabAuthenticationLayout({ children }: { children: ReactNode }) {
+ return <>{children}>;
+}
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/gitlab/page.tsx b/apps/admin/app/(all)/(dashboard)/authentication/gitlab/page.tsx
new file mode 100644
index 00000000..ae85168a
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/gitlab/page.tsx
@@ -0,0 +1,102 @@
+"use client";
+
+import { useState } from "react";
+import { observer } from "mobx-react";
+import Image from "next/image";
+import useSWR from "swr";
+import { setPromiseToast } from "@plane/propel/toast";
+import { Loader, ToggleSwitch } from "@plane/ui";
+// components
+import { AuthenticationMethodCard } from "@/components/authentication/authentication-method-card";
+// hooks
+import { useInstance } from "@/hooks/store";
+// icons
+import GitlabLogo from "@/public/logos/gitlab-logo.svg";
+// local components
+import { InstanceGitlabConfigForm } from "./form";
+
+const InstanceGitlabAuthenticationPage = observer(() => {
+ // store
+ const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
+ // state
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ // config
+ const enableGitlabConfig = formattedConfig?.IS_GITLAB_ENABLED ?? "";
+
+ useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
+
+ const updateConfig = async (key: "IS_GITLAB_ENABLED", value: string) => {
+ setIsSubmitting(true);
+
+ const payload = {
+ [key]: value,
+ };
+
+ const updateConfigPromise = updateInstanceConfigurations(payload);
+
+ setPromiseToast(updateConfigPromise, {
+ loading: "Saving Configuration...",
+ success: {
+ title: "Configuration saved",
+ message: () => `GitLab authentication is now ${value ? "active" : "disabled"}.`,
+ },
+ error: {
+ title: "Error",
+ message: () => "Failed to save configuration",
+ },
+ });
+
+ await updateConfigPromise
+ .then(() => {
+ setIsSubmitting(false);
+ })
+ .catch((err) => {
+ console.error(err);
+ setIsSubmitting(false);
+ });
+ };
+ return (
+ <>
+
+
+
}
+ config={
+
{
+ if (Boolean(parseInt(enableGitlabConfig)) === true) {
+ updateConfig("IS_GITLAB_ENABLED", "0");
+ } else {
+ updateConfig("IS_GITLAB_ENABLED", "1");
+ }
+ }}
+ size="sm"
+ disabled={isSubmitting || !formattedConfig}
+ />
+ }
+ disabled={isSubmitting || !formattedConfig}
+ withBorder={false}
+ />
+
+
+ {formattedConfig ? (
+
+ ) : (
+
+
+
+
+
+
+
+ )}
+
+
+ >
+ );
+});
+
+export default InstanceGitlabAuthenticationPage;
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/google/form.tsx b/apps/admin/app/(all)/(dashboard)/authentication/google/form.tsx
new file mode 100644
index 00000000..d9c3646b
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/google/form.tsx
@@ -0,0 +1,235 @@
+"use client";
+import type { FC } from "react";
+import { useState } from "react";
+import { isEmpty } from "lodash-es";
+import Link from "next/link";
+import { useForm } from "react-hook-form";
+import { Monitor } from "lucide-react";
+// plane internal packages
+import { API_BASE_URL } from "@plane/constants";
+import { Button, getButtonStyling } from "@plane/propel/button";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+import type { IFormattedInstanceConfiguration, TInstanceGoogleAuthenticationConfigurationKeys } from "@plane/types";
+import { cn } from "@plane/utils";
+// components
+import { CodeBlock } from "@/components/common/code-block";
+import { ConfirmDiscardModal } from "@/components/common/confirm-discard-modal";
+import type { TControllerInputFormField } from "@/components/common/controller-input";
+import { ControllerInput } from "@/components/common/controller-input";
+import type { TCopyField } from "@/components/common/copy-field";
+import { CopyField } from "@/components/common/copy-field";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type Props = {
+ config: IFormattedInstanceConfiguration;
+};
+
+type GoogleConfigFormValues = Record;
+
+export const InstanceGoogleConfigForm: FC = (props) => {
+ const { config } = props;
+ // states
+ const [isDiscardChangesModalOpen, setIsDiscardChangesModalOpen] = useState(false);
+ // store hooks
+ const { updateInstanceConfigurations } = useInstance();
+ // form data
+ const {
+ handleSubmit,
+ control,
+ reset,
+ formState: { errors, isDirty, isSubmitting },
+ } = useForm({
+ defaultValues: {
+ GOOGLE_CLIENT_ID: config["GOOGLE_CLIENT_ID"],
+ GOOGLE_CLIENT_SECRET: config["GOOGLE_CLIENT_SECRET"],
+ },
+ });
+
+ const originURL = !isEmpty(API_BASE_URL) ? API_BASE_URL : typeof window !== "undefined" ? window.location.origin : "";
+
+ const GOOGLE_FORM_FIELDS: TControllerInputFormField[] = [
+ {
+ key: "GOOGLE_CLIENT_ID",
+ type: "text",
+ label: "Client ID",
+ description: (
+ <>
+ Your client ID lives in your Google API Console.{" "}
+
+ Learn more
+
+ >
+ ),
+ placeholder: "840195096245-0p2tstej9j5nc4l8o1ah2dqondscqc1g.apps.googleusercontent.com",
+ error: Boolean(errors.GOOGLE_CLIENT_ID),
+ required: true,
+ },
+ {
+ key: "GOOGLE_CLIENT_SECRET",
+ type: "password",
+ label: "Client secret",
+ description: (
+ <>
+ Your client secret should also be in your Google API Console.{" "}
+
+ Learn more
+
+ >
+ ),
+ placeholder: "GOCShX-ADp4cI0kPqav1gGCBg5bE02E",
+ error: Boolean(errors.GOOGLE_CLIENT_SECRET),
+ required: true,
+ },
+ ];
+
+ const GOOGLE_COMMON_SERVICE_DETAILS: TCopyField[] = [
+ {
+ key: "Origin_URL",
+ label: "Origin URL",
+ url: originURL,
+ description: (
+
+ We will auto-generate this. Paste this into your{" "}
+ Authorized JavaScript origins field. For this OAuth client{" "}
+
+ here.
+
+
+ ),
+ },
+ ];
+
+ const GOOGLE_SERVICE_DETAILS: TCopyField[] = [
+ {
+ key: "Callback_URI",
+ label: "Callback URI",
+ url: `${originURL}/auth/google/callback/`,
+ description: (
+
+ We will auto-generate this. Paste this into your Authorized Redirect URI {" "}
+ field. For this OAuth client{" "}
+
+ here.
+
+
+ ),
+ },
+ ];
+
+ const onSubmit = async (formData: GoogleConfigFormValues) => {
+ const payload: Partial = { ...formData };
+
+ await updateInstanceConfigurations(payload)
+ .then((response = []) => {
+ setToast({
+ type: TOAST_TYPE.SUCCESS,
+ title: "Done!",
+ message: "Your Google authentication is configured. You should test it now.",
+ });
+ reset({
+ GOOGLE_CLIENT_ID: response.find((item) => item.key === "GOOGLE_CLIENT_ID")?.value,
+ GOOGLE_CLIENT_SECRET: response.find((item) => item.key === "GOOGLE_CLIENT_SECRET")?.value,
+ });
+ })
+ .catch((err) => console.error(err));
+ };
+
+ const handleGoBack = (e: React.MouseEvent) => {
+ if (isDirty) {
+ e.preventDefault();
+ setIsDiscardChangesModalOpen(true);
+ }
+ };
+
+ return (
+ <>
+ setIsDiscardChangesModalOpen(false)}
+ />
+
+
+
+
Google-provided details for Plane
+ {GOOGLE_FORM_FIELDS.map((field) => (
+
+ ))}
+
+
+
+ {isSubmitting ? "Saving..." : "Save changes"}
+
+
+ Go back
+
+
+
+
+
+
Plane-provided details for Google
+
+
+ {/* common service details */}
+
+ {GOOGLE_COMMON_SERVICE_DETAILS.map((field) => (
+
+ ))}
+
+
+ {/* web service details */}
+
+
+
+ Web
+
+
+ {GOOGLE_SERVICE_DETAILS.map((field) => (
+
+ ))}
+
+
+
+
+
+
+ >
+ );
+};
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/google/layout.tsx b/apps/admin/app/(all)/(dashboard)/authentication/google/layout.tsx
new file mode 100644
index 00000000..ddc0cff4
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/google/layout.tsx
@@ -0,0 +1,10 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+
+export const metadata: Metadata = {
+ title: "Google Authentication - God Mode",
+};
+
+export default function GoogleAuthenticationLayout({ children }: { children: ReactNode }) {
+ return <>{children}>;
+}
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/google/page.tsx b/apps/admin/app/(all)/(dashboard)/authentication/google/page.tsx
new file mode 100644
index 00000000..d6ca370d
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/google/page.tsx
@@ -0,0 +1,103 @@
+"use client";
+
+import { useState } from "react";
+import { observer } from "mobx-react";
+import Image from "next/image";
+import useSWR from "swr";
+import { setPromiseToast } from "@plane/propel/toast";
+import { Loader, ToggleSwitch } from "@plane/ui";
+// components
+import { AuthenticationMethodCard } from "@/components/authentication/authentication-method-card";
+// hooks
+import { useInstance } from "@/hooks/store";
+// icons
+import GoogleLogo from "@/public/logos/google-logo.svg";
+// local components
+import { InstanceGoogleConfigForm } from "./form";
+
+const InstanceGoogleAuthenticationPage = observer(() => {
+ // store
+ const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
+ // state
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ // config
+ const enableGoogleConfig = formattedConfig?.IS_GOOGLE_ENABLED ?? "";
+
+ useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
+
+ const updateConfig = async (key: "IS_GOOGLE_ENABLED", value: string) => {
+ setIsSubmitting(true);
+
+ const payload = {
+ [key]: value,
+ };
+
+ const updateConfigPromise = updateInstanceConfigurations(payload);
+
+ setPromiseToast(updateConfigPromise, {
+ loading: "Saving Configuration...",
+ success: {
+ title: "Configuration saved",
+ message: () => `Google authentication is now ${value ? "active" : "disabled"}.`,
+ },
+ error: {
+ title: "Error",
+ message: () => "Failed to save configuration",
+ },
+ });
+
+ await updateConfigPromise
+ .then(() => {
+ setIsSubmitting(false);
+ })
+ .catch((err) => {
+ console.error(err);
+ setIsSubmitting(false);
+ });
+ };
+ return (
+ <>
+
+
+
}
+ config={
+
{
+ if (Boolean(parseInt(enableGoogleConfig)) === true) {
+ updateConfig("IS_GOOGLE_ENABLED", "0");
+ } else {
+ updateConfig("IS_GOOGLE_ENABLED", "1");
+ }
+ }}
+ size="sm"
+ disabled={isSubmitting || !formattedConfig}
+ />
+ }
+ disabled={isSubmitting || !formattedConfig}
+ withBorder={false}
+ />
+
+
+ {formattedConfig ? (
+
+ ) : (
+
+
+
+
+
+
+
+ )}
+
+
+ >
+ );
+});
+
+export default InstanceGoogleAuthenticationPage;
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/layout.tsx b/apps/admin/app/(all)/(dashboard)/authentication/layout.tsx
new file mode 100644
index 00000000..bed80f22
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/layout.tsx
@@ -0,0 +1,10 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+
+export const metadata: Metadata = {
+ title: "Authentication Settings - Plane Web",
+};
+
+export default function AuthenticationLayout({ children }: { children: ReactNode }) {
+ return <>{children}>;
+}
diff --git a/apps/admin/app/(all)/(dashboard)/authentication/page.tsx b/apps/admin/app/(all)/(dashboard)/authentication/page.tsx
new file mode 100644
index 00000000..16be71e5
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/authentication/page.tsx
@@ -0,0 +1,114 @@
+"use client";
+
+import { useState } from "react";
+import { observer } from "mobx-react";
+import useSWR from "swr";
+// plane internal packages
+import { setPromiseToast } from "@plane/propel/toast";
+import type { TInstanceConfigurationKeys } from "@plane/types";
+import { Loader, ToggleSwitch } from "@plane/ui";
+import { cn } from "@plane/utils";
+// hooks
+import { useInstance } from "@/hooks/store";
+// plane admin components
+import { AuthenticationModes } from "@/plane-admin/components/authentication";
+
+const InstanceAuthenticationPage = observer(() => {
+ // store
+ const { fetchInstanceConfigurations, formattedConfig, updateInstanceConfigurations } = useInstance();
+
+ useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
+
+ // state
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ // derived values
+ const enableSignUpConfig = formattedConfig?.ENABLE_SIGNUP ?? "";
+
+ const updateConfig = async (key: TInstanceConfigurationKeys, value: string) => {
+ setIsSubmitting(true);
+
+ const payload = {
+ [key]: value,
+ };
+
+ const updateConfigPromise = updateInstanceConfigurations(payload);
+
+ setPromiseToast(updateConfigPromise, {
+ loading: "Saving configuration",
+ success: {
+ title: "Success",
+ message: () => "Configuration saved successfully",
+ },
+ error: {
+ title: "Error",
+ message: () => "Failed to save configuration",
+ },
+ });
+
+ await updateConfigPromise
+ .then(() => {
+ setIsSubmitting(false);
+ })
+ .catch((err) => {
+ console.error(err);
+ setIsSubmitting(false);
+ });
+ };
+
+ return (
+ <>
+
+
+
Manage authentication modes for your instance
+
+ Configure authentication modes for your team and restrict sign-ups to be invite only.
+
+
+
+ {formattedConfig ? (
+
+
+
+
+
Allow anyone to sign up even without an invite
+
+ Toggling this off will only let users sign up when they are invited.
+
+
+
+
+
+ {
+ if (Boolean(parseInt(enableSignUpConfig)) === true) {
+ updateConfig("ENABLE_SIGNUP", "0");
+ } else {
+ updateConfig("ENABLE_SIGNUP", "1");
+ }
+ }}
+ size="sm"
+ disabled={isSubmitting}
+ />
+
+
+
+
Available authentication modes
+
+
+ ) : (
+
+
+
+
+
+
+
+ )}
+
+
+ >
+ );
+});
+
+export default InstanceAuthenticationPage;
diff --git a/apps/admin/app/(all)/(dashboard)/email/email-config-form.tsx b/apps/admin/app/(all)/(dashboard)/email/email-config-form.tsx
new file mode 100644
index 00000000..450a5f4e
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/email/email-config-form.tsx
@@ -0,0 +1,228 @@
+"use client";
+
+import type { FC } from "react";
+import React, { useMemo, useState } from "react";
+import { useForm } from "react-hook-form";
+// types
+import { Button } from "@plane/propel/button";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+import type { IFormattedInstanceConfiguration, TInstanceEmailConfigurationKeys } from "@plane/types";
+// ui
+import { CustomSelect } from "@plane/ui";
+// components
+import type { TControllerInputFormField } from "@/components/common/controller-input";
+import { ControllerInput } from "@/components/common/controller-input";
+// hooks
+import { useInstance } from "@/hooks/store";
+// local components
+import { SendTestEmailModal } from "./test-email-modal";
+
+type IInstanceEmailForm = {
+ config: IFormattedInstanceConfiguration;
+};
+
+type EmailFormValues = Record;
+
+type TEmailSecurityKeys = "EMAIL_USE_TLS" | "EMAIL_USE_SSL" | "NONE";
+
+const EMAIL_SECURITY_OPTIONS: { [key in TEmailSecurityKeys]: string } = {
+ EMAIL_USE_TLS: "TLS",
+ EMAIL_USE_SSL: "SSL",
+ NONE: "No email security",
+};
+
+export const InstanceEmailForm: FC = (props) => {
+ const { config } = props;
+ // states
+ const [isSendTestEmailModalOpen, setIsSendTestEmailModalOpen] = useState(false);
+ // store hooks
+ const { updateInstanceConfigurations } = useInstance();
+ // form data
+ const {
+ handleSubmit,
+ watch,
+ setValue,
+ control,
+ formState: { errors, isValid, isDirty, isSubmitting },
+ } = useForm({
+ defaultValues: {
+ EMAIL_HOST: config["EMAIL_HOST"],
+ EMAIL_PORT: config["EMAIL_PORT"],
+ EMAIL_HOST_USER: config["EMAIL_HOST_USER"],
+ EMAIL_HOST_PASSWORD: config["EMAIL_HOST_PASSWORD"],
+ EMAIL_USE_TLS: config["EMAIL_USE_TLS"],
+ EMAIL_USE_SSL: config["EMAIL_USE_SSL"],
+ EMAIL_FROM: config["EMAIL_FROM"],
+ ENABLE_SMTP: config["ENABLE_SMTP"],
+ },
+ });
+ const emailFormFields: TControllerInputFormField[] = [
+ {
+ key: "EMAIL_HOST",
+ type: "text",
+ label: "Host",
+ placeholder: "email.google.com",
+ error: Boolean(errors.EMAIL_HOST),
+ required: true,
+ },
+ {
+ key: "EMAIL_PORT",
+ type: "text",
+ label: "Port",
+ placeholder: "8080",
+ error: Boolean(errors.EMAIL_PORT),
+ required: true,
+ },
+ {
+ key: "EMAIL_FROM",
+ type: "text",
+ label: "Sender's email address",
+ description:
+ "This is the email address your users will see when getting emails from this instance. You will need to verify this address.",
+ placeholder: "no-reply@projectplane.so",
+ error: Boolean(errors.EMAIL_FROM),
+ required: true,
+ },
+ ];
+
+ const OptionalEmailFormFields: TControllerInputFormField[] = [
+ {
+ key: "EMAIL_HOST_USER",
+ type: "text",
+ label: "Username",
+ placeholder: "getitdone@projectplane.so",
+ error: Boolean(errors.EMAIL_HOST_USER),
+ required: false,
+ },
+ {
+ key: "EMAIL_HOST_PASSWORD",
+ type: "password",
+ label: "Password",
+ placeholder: "Password",
+ error: Boolean(errors.EMAIL_HOST_PASSWORD),
+ required: false,
+ },
+ ];
+
+ const onSubmit = async (formData: EmailFormValues) => {
+ const payload: Partial = { ...formData, ENABLE_SMTP: "1" };
+
+ await updateInstanceConfigurations(payload)
+ .then(() =>
+ setToast({
+ type: TOAST_TYPE.SUCCESS,
+ title: "Success",
+ message: "Email Settings updated successfully",
+ })
+ )
+ .catch((err) => console.error(err));
+ };
+
+ const useTLSValue = watch("EMAIL_USE_TLS");
+ const useSSLValue = watch("EMAIL_USE_SSL");
+ const emailSecurityKey: TEmailSecurityKeys = useMemo(() => {
+ if (useTLSValue === "1") return "EMAIL_USE_TLS";
+ if (useSSLValue === "1") return "EMAIL_USE_SSL";
+ return "NONE";
+ }, [useTLSValue, useSSLValue]);
+
+ const handleEmailSecurityChange = (key: TEmailSecurityKeys) => {
+ if (key === "EMAIL_USE_SSL") {
+ setValue("EMAIL_USE_TLS", "0");
+ setValue("EMAIL_USE_SSL", "1");
+ }
+ if (key === "EMAIL_USE_TLS") {
+ setValue("EMAIL_USE_TLS", "1");
+ setValue("EMAIL_USE_SSL", "0");
+ }
+ if (key === "NONE") {
+ setValue("EMAIL_USE_TLS", "0");
+ setValue("EMAIL_USE_SSL", "0");
+ }
+ };
+
+ return (
+
+
+
setIsSendTestEmailModalOpen(false)} />
+
+ {emailFormFields.map((field) => (
+
+ ))}
+
+
Email security
+
+ {Object.entries(EMAIL_SECURITY_OPTIONS).map(([key, value]) => (
+
+ {value}
+
+ ))}
+
+
+
+
+
+
+
+
Authentication
+
+ This is optional, but we recommend setting up a username and a password for your SMTP server.
+
+
+
+
+
+ {OptionalEmailFormFields.map((field) => (
+
+ ))}
+
+
+
+
+
+ {isSubmitting ? "Saving..." : "Save changes"}
+
+ setIsSendTestEmailModalOpen(true)}
+ loading={isSubmitting}
+ disabled={!isValid}
+ >
+ Send test email
+
+
+
+ );
+};
diff --git a/apps/admin/app/(all)/(dashboard)/email/layout.tsx b/apps/admin/app/(all)/(dashboard)/email/layout.tsx
new file mode 100644
index 00000000..0e6fc06c
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/email/layout.tsx
@@ -0,0 +1,14 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+
+interface EmailLayoutProps {
+ children: ReactNode;
+}
+
+export const metadata: Metadata = {
+ title: "Email Settings - God Mode",
+};
+
+export default function EmailLayout({ children }: EmailLayoutProps) {
+ return <>{children}>;
+}
diff --git a/apps/admin/app/(all)/(dashboard)/email/page.tsx b/apps/admin/app/(all)/(dashboard)/email/page.tsx
new file mode 100644
index 00000000..a509f6d2
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/email/page.tsx
@@ -0,0 +1,94 @@
+"use client";
+
+import { useEffect, useState } from "react";
+import { observer } from "mobx-react";
+import useSWR from "swr";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+import { Loader, ToggleSwitch } from "@plane/ui";
+// hooks
+import { useInstance } from "@/hooks/store";
+// components
+import { InstanceEmailForm } from "./email-config-form";
+
+const InstanceEmailPage: React.FC = observer(() => {
+ // store
+ const { fetchInstanceConfigurations, formattedConfig, disableEmail } = useInstance();
+
+ const { isLoading } = useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
+
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ const [isSMTPEnabled, setIsSMTPEnabled] = useState(false);
+
+ const handleToggle = async () => {
+ if (isSMTPEnabled) {
+ setIsSubmitting(true);
+ try {
+ await disableEmail();
+ setIsSMTPEnabled(false);
+ setToast({
+ title: "Email feature disabled",
+ message: "Email feature has been disabled",
+ type: TOAST_TYPE.SUCCESS,
+ });
+ } catch (_error) {
+ setToast({
+ title: "Error disabling email",
+ message: "Failed to disable email feature. Please try again.",
+ type: TOAST_TYPE.ERROR,
+ });
+ } finally {
+ setIsSubmitting(false);
+ }
+ return;
+ }
+ setIsSMTPEnabled(true);
+ };
+ useEffect(() => {
+ if (formattedConfig) {
+ setIsSMTPEnabled(formattedConfig.ENABLE_SMTP === "1");
+ }
+ }, [formattedConfig]);
+
+ return (
+ <>
+
+
+
+
Secure emails from your own instance
+
+ Plane can send useful emails to you and your users from your own instance without talking to the Internet.
+
+ Set it up below and please test your settings before you save them.
+ Misconfigs can lead to email bounces and errors.
+
+
+
+ {isLoading ? (
+
+
+
+ ) : (
+
+ )}
+
+ {isSMTPEnabled && !isLoading && (
+
+ {formattedConfig ? (
+
+ ) : (
+
+
+
+
+
+
+
+ )}
+
+ )}
+
+ >
+ );
+});
+
+export default InstanceEmailPage;
diff --git a/apps/admin/app/(all)/(dashboard)/email/test-email-modal.tsx b/apps/admin/app/(all)/(dashboard)/email/test-email-modal.tsx
new file mode 100644
index 00000000..09117096
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/email/test-email-modal.tsx
@@ -0,0 +1,137 @@
+import type { FC } from "react";
+import React, { useEffect, useState } from "react";
+import { Dialog, Transition } from "@headlessui/react";
+// plane imports
+import { Button } from "@plane/propel/button";
+import { InstanceService } from "@plane/services";
+// ui
+import { Input } from "@plane/ui";
+
+type Props = {
+ isOpen: boolean;
+ handleClose: () => void;
+};
+
+enum ESendEmailSteps {
+ SEND_EMAIL = "SEND_EMAIL",
+ SUCCESS = "SUCCESS",
+ FAILED = "FAILED",
+}
+
+const instanceService = new InstanceService();
+
+export const SendTestEmailModal: FC = (props) => {
+ const { isOpen, handleClose } = props;
+
+ // state
+ const [receiverEmail, setReceiverEmail] = useState("");
+ const [sendEmailStep, setSendEmailStep] = useState(ESendEmailSteps.SEND_EMAIL);
+ const [isLoading, setIsLoading] = useState(false);
+ const [error, setError] = useState("");
+
+ // reset state
+ const resetState = () => {
+ setReceiverEmail("");
+ setSendEmailStep(ESendEmailSteps.SEND_EMAIL);
+ setIsLoading(false);
+ setError("");
+ };
+
+ useEffect(() => {
+ if (!isOpen) {
+ resetState();
+ }
+ }, [isOpen]);
+
+ const handleSubmit = async (e: React.MouseEvent) => {
+ e.preventDefault();
+
+ setIsLoading(true);
+ await instanceService
+ .sendTestEmail(receiverEmail)
+ .then(() => {
+ setSendEmailStep(ESendEmailSteps.SUCCESS);
+ })
+ .catch((error) => {
+ setError(error?.error || "Failed to send email");
+ setSendEmailStep(ESendEmailSteps.FAILED);
+ })
+ .finally(() => {
+ setIsLoading(false);
+ });
+ };
+
+ return (
+
+
+
+
+
+
+
+
+
+
+ {sendEmailStep === ESendEmailSteps.SEND_EMAIL
+ ? "Send test email"
+ : sendEmailStep === ESendEmailSteps.SUCCESS
+ ? "Email send"
+ : "Failed"}{" "}
+
+
+ {sendEmailStep === ESendEmailSteps.SEND_EMAIL && (
+
setReceiverEmail(e.target.value)}
+ placeholder="Receiver email"
+ className="w-full resize-none text-lg"
+ tabIndex={1}
+ />
+ )}
+ {sendEmailStep === ESendEmailSteps.SUCCESS && (
+
+
+ We have sent the test email to {receiverEmail}. Please check your spam folder if you cannot find
+ it.
+
+
If you still cannot find it, recheck your SMTP configuration and trigger a new test email.
+
+ )}
+ {sendEmailStep === ESendEmailSteps.FAILED &&
{error}
}
+
+
+ {sendEmailStep === ESendEmailSteps.SEND_EMAIL ? "Cancel" : "Close"}
+
+ {sendEmailStep === ESendEmailSteps.SEND_EMAIL && (
+
+ {isLoading ? "Sending email..." : "Send email"}
+
+ )}
+
+
+
+
+
+
+
+
+ );
+};
diff --git a/apps/admin/app/(all)/(dashboard)/general/form.tsx b/apps/admin/app/(all)/(dashboard)/general/form.tsx
new file mode 100644
index 00000000..c91069b5
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/general/form.tsx
@@ -0,0 +1,157 @@
+"use client";
+import type { FC } from "react";
+import { observer } from "mobx-react";
+import { Controller, useForm } from "react-hook-form";
+import { Telescope } from "lucide-react";
+// types
+import { Button } from "@plane/propel/button";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+import type { IInstance, IInstanceAdmin } from "@plane/types";
+// ui
+import { Input, ToggleSwitch } from "@plane/ui";
+// components
+import { ControllerInput } from "@/components/common/controller-input";
+import { useInstance } from "@/hooks/store";
+import { IntercomConfig } from "./intercom";
+// hooks
+
+export interface IGeneralConfigurationForm {
+ instance: IInstance;
+ instanceAdmins: IInstanceAdmin[];
+}
+
+export const GeneralConfigurationForm: FC = observer((props) => {
+ const { instance, instanceAdmins } = props;
+ // hooks
+ const { instanceConfigurations, updateInstanceInfo, updateInstanceConfigurations } = useInstance();
+
+ // form data
+ const {
+ handleSubmit,
+ control,
+ watch,
+ formState: { errors, isSubmitting },
+ } = useForm>({
+ defaultValues: {
+ instance_name: instance?.instance_name,
+ is_telemetry_enabled: instance?.is_telemetry_enabled,
+ },
+ });
+
+ const onSubmit = async (formData: Partial) => {
+ const payload: Partial = { ...formData };
+
+ // update the intercom configuration
+ const isIntercomEnabled =
+ instanceConfigurations?.find((config) => config.key === "IS_INTERCOM_ENABLED")?.value === "1";
+ if (!payload.is_telemetry_enabled && isIntercomEnabled) {
+ try {
+ await updateInstanceConfigurations({ IS_INTERCOM_ENABLED: "0" });
+ } catch (error) {
+ console.error(error);
+ }
+ }
+
+ await updateInstanceInfo(payload)
+ .then(() =>
+ setToast({
+ type: TOAST_TYPE.SUCCESS,
+ title: "Success",
+ message: "Settings updated successfully",
+ })
+ )
+ .catch((err) => console.error(err));
+ };
+
+ return (
+
+
+
Instance details
+
+
+
+
+
Email
+
+
+
+
+
Instance ID
+
+
+
+
+
+
+
Chat + telemetry
+
+
+
+
+
+
+ Let Plane collect anonymous usage data
+
+
+ No PII is collected.This anonymized data is used to understand how you use Plane and build new features
+ in line with{" "}
+
+ our Telemetry Policy.
+
+
+
+
+
+ (
+
+ )}
+ />
+
+
+
+
+
+
+ {isSubmitting ? "Saving..." : "Save changes"}
+
+
+
+ );
+});
diff --git a/apps/admin/app/(all)/(dashboard)/general/intercom.tsx b/apps/admin/app/(all)/(dashboard)/general/intercom.tsx
new file mode 100644
index 00000000..a6f17d62
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/general/intercom.tsx
@@ -0,0 +1,83 @@
+"use client";
+
+import type { FC } from "react";
+import { useState } from "react";
+import { observer } from "mobx-react";
+import useSWR from "swr";
+import { MessageSquare } from "lucide-react";
+import type { IFormattedInstanceConfiguration } from "@plane/types";
+import { ToggleSwitch } from "@plane/ui";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type TIntercomConfig = {
+ isTelemetryEnabled: boolean;
+};
+
+export const IntercomConfig: FC = observer((props) => {
+ const { isTelemetryEnabled } = props;
+ // hooks
+ const { instanceConfigurations, updateInstanceConfigurations, fetchInstanceConfigurations } = useInstance();
+ // states
+ const [isSubmitting, setIsSubmitting] = useState(false);
+
+ // derived values
+ const isIntercomEnabled = isTelemetryEnabled
+ ? instanceConfigurations
+ ? instanceConfigurations?.find((config) => config.key === "IS_INTERCOM_ENABLED")?.value === "1"
+ ? true
+ : false
+ : undefined
+ : false;
+
+ const { isLoading } = useSWR(isTelemetryEnabled ? "INSTANCE_CONFIGURATIONS" : null, () =>
+ isTelemetryEnabled ? fetchInstanceConfigurations() : null
+ );
+
+ const initialLoader = isLoading && isIntercomEnabled === undefined;
+
+ const submitInstanceConfigurations = async (payload: Partial) => {
+ try {
+ await updateInstanceConfigurations(payload);
+ } catch (error) {
+ console.error(error);
+ } finally {
+ setIsSubmitting(false);
+ }
+ };
+
+ const enableIntercomConfig = () => {
+ submitInstanceConfigurations({ IS_INTERCOM_ENABLED: isIntercomEnabled ? "0" : "1" });
+ };
+
+ return (
+ <>
+
+
+
+
+
+
Chat with us
+
+ Let your users chat with us via Intercom or another service. Toggling Telemetry off turns this off
+ automatically.
+
+
+
+
+
+
+
+
+ >
+ );
+});
diff --git a/apps/admin/app/(all)/(dashboard)/general/layout.tsx b/apps/admin/app/(all)/(dashboard)/general/layout.tsx
new file mode 100644
index 00000000..f5167e75
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/general/layout.tsx
@@ -0,0 +1,10 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+
+export const metadata: Metadata = {
+ title: "General Settings - God Mode",
+};
+
+export default function GeneralLayout({ children }: { children: ReactNode }) {
+ return <>{children}>;
+}
diff --git a/apps/admin/app/(all)/(dashboard)/general/page.tsx b/apps/admin/app/(all)/(dashboard)/general/page.tsx
new file mode 100644
index 00000000..f0d32f26
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/general/page.tsx
@@ -0,0 +1,31 @@
+"use client";
+import { observer } from "mobx-react";
+// hooks
+import { useInstance } from "@/hooks/store";
+// components
+import { GeneralConfigurationForm } from "./form";
+
+function GeneralPage() {
+ const { instance, instanceAdmins } = useInstance();
+
+ return (
+ <>
+
+
+
General settings
+
+ Change the name of your instance and instance admin e-mail addresses. Enable or disable telemetry in your
+ instance.
+
+
+
+ {instance && instanceAdmins && (
+
+ )}
+
+
+ >
+ );
+}
+
+export default observer(GeneralPage);
diff --git a/apps/admin/app/(all)/(dashboard)/header.tsx b/apps/admin/app/(all)/(dashboard)/header.tsx
new file mode 100644
index 00000000..82d7241f
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/header.tsx
@@ -0,0 +1,105 @@
+"use client";
+
+import type { FC } from "react";
+import { observer } from "mobx-react";
+import { usePathname } from "next/navigation";
+import { Menu, Settings } from "lucide-react";
+// icons
+import { Breadcrumbs } from "@plane/ui";
+// components
+import { BreadcrumbLink } from "@/components/common/breadcrumb-link";
+// hooks
+import { useTheme } from "@/hooks/store";
+
+export const HamburgerToggle: FC = observer(() => {
+ const { isSidebarCollapsed, toggleSidebar } = useTheme();
+ return (
+ toggleSidebar(!isSidebarCollapsed)}
+ >
+
+
+ );
+});
+
+export const AdminHeader: FC = observer(() => {
+ const pathName = usePathname();
+
+ const getHeaderTitle = (pathName: string) => {
+ switch (pathName) {
+ case "general":
+ return "General";
+ case "ai":
+ return "Artificial Intelligence";
+ case "email":
+ return "Email";
+ case "authentication":
+ return "Authentication";
+ case "image":
+ return "Image";
+ case "google":
+ return "Google";
+ case "github":
+ return "GitHub";
+ case "gitlab":
+ return "GitLab";
+ case "workspace":
+ return "Workspace";
+ case "create":
+ return "Create";
+ default:
+ return pathName.toUpperCase();
+ }
+ };
+
+ // Function to dynamically generate breadcrumb items based on pathname
+ const generateBreadcrumbItems = (pathname: string) => {
+ const pathSegments = pathname.split("/").slice(1); // removing the first empty string.
+ pathSegments.pop();
+
+ let currentUrl = "";
+ const breadcrumbItems = pathSegments.map((segment) => {
+ currentUrl += "/" + segment;
+ return {
+ title: getHeaderTitle(segment),
+ href: currentUrl,
+ };
+ });
+ return breadcrumbItems;
+ };
+
+ const breadcrumbItems = generateBreadcrumbItems(pathName);
+
+ return (
+
+
+
+ {breadcrumbItems.length >= 0 && (
+
+
+ }
+ />
+ }
+ />
+ {breadcrumbItems.map(
+ (item) =>
+ item.title && (
+ }
+ />
+ )
+ )}
+
+
+ )}
+
+
+ );
+});
diff --git a/apps/admin/app/(all)/(dashboard)/image/form.tsx b/apps/admin/app/(all)/(dashboard)/image/form.tsx
new file mode 100644
index 00000000..f6adcaee
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/image/form.tsx
@@ -0,0 +1,81 @@
+"use client";
+import type { FC } from "react";
+import { useForm } from "react-hook-form";
+import { Button } from "@plane/propel/button";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+import type { IFormattedInstanceConfiguration, TInstanceImageConfigurationKeys } from "@plane/types";
+// components
+import { ControllerInput } from "@/components/common/controller-input";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type IInstanceImageConfigForm = {
+ config: IFormattedInstanceConfiguration;
+};
+
+type ImageConfigFormValues = Record;
+
+export const InstanceImageConfigForm: FC = (props) => {
+ const { config } = props;
+ // store hooks
+ const { updateInstanceConfigurations } = useInstance();
+ // form data
+ const {
+ handleSubmit,
+ control,
+ formState: { errors, isSubmitting },
+ } = useForm({
+ defaultValues: {
+ UNSPLASH_ACCESS_KEY: config["UNSPLASH_ACCESS_KEY"],
+ },
+ });
+
+ const onSubmit = async (formData: ImageConfigFormValues) => {
+ const payload: Partial = { ...formData };
+
+ await updateInstanceConfigurations(payload)
+ .then(() =>
+ setToast({
+ type: TOAST_TYPE.SUCCESS,
+ title: "Success",
+ message: "Image Configuration Settings updated successfully",
+ })
+ )
+ .catch((err) => console.error(err));
+ };
+
+ return (
+
+
+
+ You will find your access key in your Unsplash developer console.
+
+ Learn more.
+
+ >
+ }
+ placeholder="oXgq-sdfadsaeweqasdfasdf3234234rassd"
+ error={Boolean(errors.UNSPLASH_ACCESS_KEY)}
+ required
+ />
+
+
+
+
+ {isSubmitting ? "Saving..." : "Save changes"}
+
+
+
+ );
+};
diff --git a/apps/admin/app/(all)/(dashboard)/image/layout.tsx b/apps/admin/app/(all)/(dashboard)/image/layout.tsx
new file mode 100644
index 00000000..559a15f9
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/image/layout.tsx
@@ -0,0 +1,14 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+
+interface ImageLayoutProps {
+ children: ReactNode;
+}
+
+export const metadata: Metadata = {
+ title: "Images Settings - God Mode",
+};
+
+export default function ImageLayout({ children }: ImageLayoutProps) {
+ return <>{children}>;
+}
diff --git a/apps/admin/app/(all)/(dashboard)/image/page.tsx b/apps/admin/app/(all)/(dashboard)/image/page.tsx
new file mode 100644
index 00000000..ade9687d
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/image/page.tsx
@@ -0,0 +1,41 @@
+"use client";
+
+import { observer } from "mobx-react";
+import useSWR from "swr";
+import { Loader } from "@plane/ui";
+// hooks
+import { useInstance } from "@/hooks/store";
+// local
+import { InstanceImageConfigForm } from "./form";
+
+const InstanceImagePage = observer(() => {
+ // store
+ const { formattedConfig, fetchInstanceConfigurations } = useInstance();
+
+ useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
+
+ return (
+ <>
+
+
+
Third-party image libraries
+
+ Let your users search and choose images from third-party libraries
+
+
+
+ {formattedConfig ? (
+
+ ) : (
+
+
+
+
+ )}
+
+
+ >
+ );
+});
+
+export default InstanceImagePage;
diff --git a/apps/admin/app/(all)/(dashboard)/layout.tsx b/apps/admin/app/(all)/(dashboard)/layout.tsx
new file mode 100644
index 00000000..76d74f46
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/layout.tsx
@@ -0,0 +1,57 @@
+"use client";
+
+import type { FC, ReactNode } from "react";
+import { useEffect } from "react";
+import { observer } from "mobx-react";
+import { useRouter } from "next/navigation";
+// components
+import { LogoSpinner } from "@/components/common/logo-spinner";
+import { NewUserPopup } from "@/components/new-user-popup";
+// hooks
+import { useUser } from "@/hooks/store";
+// local components
+import { AdminHeader } from "./header";
+import { AdminSidebar } from "./sidebar";
+
+type TAdminLayout = {
+ children: ReactNode;
+};
+
+const AdminLayout: FC = (props) => {
+ const { children } = props;
+ // router
+ const router = useRouter();
+ // store hooks
+ const { isUserLoggedIn } = useUser();
+
+ useEffect(() => {
+ if (isUserLoggedIn === false) {
+ router.push("/");
+ }
+ }, [router, isUserLoggedIn]);
+
+ if (isUserLoggedIn === undefined) {
+ return (
+
+
+
+ );
+ }
+
+ if (isUserLoggedIn) {
+ return (
+
+ );
+ }
+
+ return <>>;
+};
+
+export default observer(AdminLayout);
diff --git a/apps/admin/app/(all)/(dashboard)/sidebar-dropdown.tsx b/apps/admin/app/(all)/(dashboard)/sidebar-dropdown.tsx
new file mode 100644
index 00000000..656d0531
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/sidebar-dropdown.tsx
@@ -0,0 +1,147 @@
+"use client";
+
+import { Fragment, useEffect, useState } from "react";
+import { observer } from "mobx-react";
+import { useTheme as useNextTheme } from "next-themes";
+import { LogOut, UserCog2, Palette } from "lucide-react";
+import { Menu, Transition } from "@headlessui/react";
+// plane internal packages
+import { API_BASE_URL } from "@plane/constants";
+import { AuthService } from "@plane/services";
+import { Avatar } from "@plane/ui";
+import { getFileURL, cn } from "@plane/utils";
+// hooks
+import { useTheme, useUser } from "@/hooks/store";
+
+// service initialization
+const authService = new AuthService();
+
+export const AdminSidebarDropdown = observer(() => {
+ // store hooks
+ const { isSidebarCollapsed } = useTheme();
+ const { currentUser, signOut } = useUser();
+ // hooks
+ const { resolvedTheme, setTheme } = useNextTheme();
+ // state
+ const [csrfToken, setCsrfToken] = useState(undefined);
+
+ const handleThemeSwitch = () => {
+ const newTheme = resolvedTheme === "dark" ? "light" : "dark";
+ setTheme(newTheme);
+ };
+
+ const handleSignOut = () => signOut();
+
+ const getSidebarMenuItems = () => (
+
+
+ {currentUser?.email}
+
+
+
+
+ Switch to {resolvedTheme === "dark" ? "light" : "dark"} mode
+
+
+
+
+
+
+ );
+
+ useEffect(() => {
+ if (csrfToken === undefined)
+ authService.requestCSRFToken().then((data) => data?.csrf_token && setCsrfToken(data.csrf_token));
+ }, [csrfToken]);
+
+ return (
+
+
+
+
+
+
+
+
+
+ {isSidebarCollapsed && (
+
+ {getSidebarMenuItems()}
+
+ )}
+
+
+ {!isSidebarCollapsed && (
+
+
Instance admin
+
+ )}
+
+
+
+ {!isSidebarCollapsed && currentUser && (
+
+
+
+
+
+
+ {getSidebarMenuItems()}
+
+
+ )}
+
+ );
+});
diff --git a/apps/admin/app/(all)/(dashboard)/sidebar-help-section.tsx b/apps/admin/app/(all)/(dashboard)/sidebar-help-section.tsx
new file mode 100644
index 00000000..cf479119
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/sidebar-help-section.tsx
@@ -0,0 +1,141 @@
+"use client";
+
+import type { FC } from "react";
+import { useState, useRef } from "react";
+import { observer } from "mobx-react";
+import Link from "next/link";
+import { ExternalLink, HelpCircle, MoveLeft } from "lucide-react";
+import { Transition } from "@headlessui/react";
+// plane internal packages
+import { WEB_BASE_URL } from "@plane/constants";
+import { DiscordIcon, GithubIcon, PageIcon } from "@plane/propel/icons";
+import { Tooltip } from "@plane/propel/tooltip";
+import { cn } from "@plane/utils";
+// hooks
+import { useTheme } from "@/hooks/store";
+// assets
+// eslint-disable-next-line import/order
+import packageJson from "package.json";
+
+const helpOptions = [
+ {
+ name: "Documentation",
+ href: "https://docs.plane.so/",
+ Icon: PageIcon,
+ },
+ {
+ name: "Join our Discord",
+ href: "https://discord.com/invite/A92xrEGCge",
+ Icon: DiscordIcon,
+ },
+ {
+ name: "Report a bug",
+ href: "https://github.com/makeplane/plane/issues/new/choose",
+ Icon: GithubIcon,
+ },
+];
+
+export const AdminSidebarHelpSection: FC = observer(() => {
+ // states
+ const [isNeedHelpOpen, setIsNeedHelpOpen] = useState(false);
+ // store
+ const { isSidebarCollapsed, toggleSidebar } = useTheme();
+ // refs
+ const helpOptionsRef = useRef(null);
+
+ const redirectionLink = encodeURI(WEB_BASE_URL + "/");
+
+ return (
+
+
+
+
+
+
+
+ {helpOptions.map(({ name, Icon, href }) => {
+ if (href)
+ return (
+
+
+
+ );
+ else
+ return (
+
+
+
+
+ {name}
+
+ );
+ })}
+
+
Version: v{packageJson.version}
+
+
+
+
+ );
+});
diff --git a/apps/admin/app/(all)/(dashboard)/sidebar-menu.tsx b/apps/admin/app/(all)/(dashboard)/sidebar-menu.tsx
new file mode 100644
index 00000000..b33ccecf
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/sidebar-menu.tsx
@@ -0,0 +1,111 @@
+"use client";
+
+import { observer } from "mobx-react";
+import Link from "next/link";
+import { usePathname } from "next/navigation";
+import { Image, BrainCog, Cog, Lock, Mail } from "lucide-react";
+// plane internal packages
+import { WorkspaceIcon } from "@plane/propel/icons";
+import { Tooltip } from "@plane/propel/tooltip";
+import { cn } from "@plane/utils";
+// hooks
+import { useTheme } from "@/hooks/store";
+
+const INSTANCE_ADMIN_LINKS = [
+ {
+ Icon: Cog,
+ name: "General",
+ description: "Identify your instances and get key details.",
+ href: `/general/`,
+ },
+ {
+ Icon: WorkspaceIcon,
+ name: "Workspaces",
+ description: "Manage all workspaces on this instance.",
+ href: `/workspace/`,
+ },
+ {
+ Icon: Mail,
+ name: "Email",
+ description: "Configure your SMTP controls.",
+ href: `/email/`,
+ },
+ {
+ Icon: Lock,
+ name: "Authentication",
+ description: "Configure authentication modes.",
+ href: `/authentication/`,
+ },
+ {
+ Icon: BrainCog,
+ name: "Artificial intelligence",
+ description: "Configure your OpenAI creds.",
+ href: `/ai/`,
+ },
+ {
+ Icon: Image,
+ name: "Images in Plane",
+ description: "Allow third-party image libraries.",
+ href: `/image/`,
+ },
+];
+
+export const AdminSidebarMenu = observer(() => {
+ // store hooks
+ const { isSidebarCollapsed, toggleSidebar } = useTheme();
+ // router
+ const pathName = usePathname();
+
+ const handleItemClick = () => {
+ if (window.innerWidth < 768) {
+ toggleSidebar(!isSidebarCollapsed);
+ }
+ };
+
+ return (
+
+ {INSTANCE_ADMIN_LINKS.map((item, index) => {
+ const isActive = item.href === pathName || pathName.includes(item.href);
+ return (
+
+
+
+
+ {
}
+ {!isSidebarCollapsed && (
+
+
+ {item.name}
+
+
+ {item.description}
+
+
+ )}
+
+
+
+
+ );
+ })}
+
+ );
+});
diff --git a/apps/admin/app/(all)/(dashboard)/sidebar.tsx b/apps/admin/app/(all)/(dashboard)/sidebar.tsx
new file mode 100644
index 00000000..e37d6eb5
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/sidebar.tsx
@@ -0,0 +1,59 @@
+"use client";
+
+import type { FC } from "react";
+import { useEffect, useRef } from "react";
+import { observer } from "mobx-react";
+// plane helpers
+import { useOutsideClickDetector } from "@plane/hooks";
+// hooks
+import { useTheme } from "@/hooks/store";
+// components
+import { AdminSidebarDropdown } from "./sidebar-dropdown";
+import { AdminSidebarHelpSection } from "./sidebar-help-section";
+import { AdminSidebarMenu } from "./sidebar-menu";
+
+export const AdminSidebar: FC = observer(() => {
+ // store
+ const { isSidebarCollapsed, toggleSidebar } = useTheme();
+
+ const ref = useRef(null);
+
+ useOutsideClickDetector(ref, () => {
+ if (isSidebarCollapsed === false) {
+ if (window.innerWidth < 768) {
+ toggleSidebar(!isSidebarCollapsed);
+ }
+ }
+ });
+
+ useEffect(() => {
+ const handleResize = () => {
+ if (window.innerWidth <= 768) {
+ toggleSidebar(true);
+ }
+ };
+ handleResize();
+ window.addEventListener("resize", handleResize);
+ return () => {
+ window.removeEventListener("resize", handleResize);
+ };
+ }, [toggleSidebar]);
+
+ return (
+
+ );
+});
diff --git a/apps/admin/app/(all)/(dashboard)/workspace/create/form.tsx b/apps/admin/app/(all)/(dashboard)/workspace/create/form.tsx
new file mode 100644
index 00000000..6ec3fe4a
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/workspace/create/form.tsx
@@ -0,0 +1,212 @@
+import { useState, useEffect } from "react";
+import Link from "next/link";
+import { useRouter } from "next/navigation";
+import { Controller, useForm } from "react-hook-form";
+// plane imports
+import { WEB_BASE_URL, ORGANIZATION_SIZE, RESTRICTED_URLS } from "@plane/constants";
+import { Button, getButtonStyling } from "@plane/propel/button";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+import { InstanceWorkspaceService } from "@plane/services";
+import type { IWorkspace } from "@plane/types";
+// components
+import { CustomSelect, Input } from "@plane/ui";
+// hooks
+import { useWorkspace } from "@/hooks/store";
+
+const instanceWorkspaceService = new InstanceWorkspaceService();
+
+export const WorkspaceCreateForm = () => {
+ // router
+ const router = useRouter();
+ // states
+ const [slugError, setSlugError] = useState(false);
+ const [invalidSlug, setInvalidSlug] = useState(false);
+ const [defaultValues, setDefaultValues] = useState>({
+ name: "",
+ slug: "",
+ organization_size: "",
+ });
+ // store hooks
+ const { createWorkspace } = useWorkspace();
+ // form info
+ const {
+ handleSubmit,
+ control,
+ setValue,
+ getValues,
+ formState: { errors, isSubmitting, isValid },
+ } = useForm({ defaultValues, mode: "onChange" });
+ // derived values
+ const workspaceBaseURL = encodeURI(WEB_BASE_URL || window.location.origin + "/");
+
+ const handleCreateWorkspace = async (formData: IWorkspace) => {
+ await instanceWorkspaceService
+ .slugCheck(formData.slug)
+ .then(async (res) => {
+ if (res.status === true && !RESTRICTED_URLS.includes(formData.slug)) {
+ setSlugError(false);
+ await createWorkspace(formData)
+ .then(async () => {
+ setToast({
+ type: TOAST_TYPE.SUCCESS,
+ title: "Success!",
+ message: "Workspace created successfully.",
+ });
+ router.push(`/workspace`);
+ })
+ .catch(() => {
+ setToast({
+ type: TOAST_TYPE.ERROR,
+ title: "Error!",
+ message: "Workspace could not be created. Please try again.",
+ });
+ });
+ } else setSlugError(true);
+ })
+ .catch(() => {
+ setToast({
+ type: TOAST_TYPE.ERROR,
+ title: "Error!",
+ message: "Some error occurred while creating workspace. Please try again.",
+ });
+ });
+ };
+
+ useEffect(
+ () => () => {
+ // when the component unmounts set the default values to whatever user typed in
+ setDefaultValues(getValues());
+ },
+ [getValues, setDefaultValues]
+ );
+
+ return (
+
+
+
+
Name your workspace
+
+
+ /^[\w\s-]*$/.test(value) ||
+ `Workspaces names can contain only (" "), ( - ), ( _ ) and alphanumeric characters.`,
+ maxLength: {
+ value: 80,
+ message: "Limit your name to 80 characters.",
+ },
+ }}
+ render={({ field: { value, ref, onChange } }) => (
+ {
+ onChange(e.target.value);
+ setValue("name", e.target.value);
+ setValue("slug", e.target.value.toLocaleLowerCase().trim().replace(/ /g, "-"), {
+ shouldValidate: true,
+ });
+ }}
+ ref={ref}
+ hasError={Boolean(errors.name)}
+ placeholder="Something familiar and recognizable is always best."
+ className="w-full"
+ />
+ )}
+ />
+ {errors?.name?.message}
+
+
+
+
Set your workspace's URL
+
+ {workspaceBaseURL}
+ (
+ {
+ if (/^[a-zA-Z0-9_-]+$/.test(e.target.value)) setInvalidSlug(false);
+ else setInvalidSlug(true);
+ onChange(e.target.value.toLowerCase());
+ }}
+ ref={ref}
+ hasError={Boolean(errors.slug)}
+ placeholder="workspace-name"
+ className="block w-full rounded-md border-none bg-transparent !px-0 py-2 text-sm"
+ />
+ )}
+ />
+
+ {slugError &&
This URL is taken. Try something else.
}
+ {invalidSlug && (
+
{`URLs can contain only ( - ), ( _ ) and alphanumeric characters.`}
+ )}
+ {errors.slug &&
{errors.slug.message} }
+
+
+
How many people will use this workspace?
+
+ (
+ c === value) ?? (
+ Select a range
+ )
+ }
+ buttonClassName="!border-[0.5px] !border-custom-border-200 !shadow-none"
+ input
+ optionsClassName="w-full"
+ >
+ {ORGANIZATION_SIZE.map((item) => (
+
+ {item}
+
+ ))}
+
+ )}
+ />
+ {errors.organization_size && (
+ {errors.organization_size.message}
+ )}
+
+
+
+
+
+ {isSubmitting ? "Creating workspace" : "Create workspace"}
+
+
+ Go back
+
+
+
+ );
+};
diff --git a/apps/admin/app/(all)/(dashboard)/workspace/create/page.tsx b/apps/admin/app/(all)/(dashboard)/workspace/create/page.tsx
new file mode 100644
index 00000000..0186286a
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/workspace/create/page.tsx
@@ -0,0 +1,21 @@
+"use client";
+
+import { observer } from "mobx-react";
+// components
+import { WorkspaceCreateForm } from "./form";
+
+const WorkspaceCreatePage = observer(() => (
+
+
+
Create a new workspace on this instance.
+
+ You will need to invite users from Workspace Settings after you create this workspace.
+
+
+
+
+
+
+));
+
+export default WorkspaceCreatePage;
diff --git a/apps/admin/app/(all)/(dashboard)/workspace/layout.tsx b/apps/admin/app/(all)/(dashboard)/workspace/layout.tsx
new file mode 100644
index 00000000..4749e2f7
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/workspace/layout.tsx
@@ -0,0 +1,10 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+
+export const metadata: Metadata = {
+ title: "Workspace Management - God Mode",
+};
+
+export default function WorkspaceManagementLayout({ children }: { children: ReactNode }) {
+ return <>{children}>;
+}
diff --git a/apps/admin/app/(all)/(dashboard)/workspace/page.tsx b/apps/admin/app/(all)/(dashboard)/workspace/page.tsx
new file mode 100644
index 00000000..a03c443d
--- /dev/null
+++ b/apps/admin/app/(all)/(dashboard)/workspace/page.tsx
@@ -0,0 +1,170 @@
+"use client";
+
+import { useState } from "react";
+import { observer } from "mobx-react";
+import Link from "next/link";
+import useSWR from "swr";
+import { Loader as LoaderIcon } from "lucide-react";
+// types
+import { Button, getButtonStyling } from "@plane/propel/button";
+import { setPromiseToast } from "@plane/propel/toast";
+import type { TInstanceConfigurationKeys } from "@plane/types";
+import { Loader, ToggleSwitch } from "@plane/ui";
+
+import { cn } from "@plane/utils";
+// components
+import { WorkspaceListItem } from "@/components/workspace/list-item";
+// hooks
+import { useInstance, useWorkspace } from "@/hooks/store";
+
+const WorkspaceManagementPage = observer(() => {
+ // states
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ // store
+ const { formattedConfig, fetchInstanceConfigurations, updateInstanceConfigurations } = useInstance();
+ const {
+ workspaceIds,
+ loader: workspaceLoader,
+ paginationInfo,
+ fetchWorkspaces,
+ fetchNextWorkspaces,
+ } = useWorkspace();
+ // derived values
+ const disableWorkspaceCreation = formattedConfig?.DISABLE_WORKSPACE_CREATION ?? "";
+ const hasNextPage = paginationInfo?.next_page_results && paginationInfo?.next_cursor !== undefined;
+
+ // fetch data
+ useSWR("INSTANCE_CONFIGURATIONS", () => fetchInstanceConfigurations());
+ useSWR("INSTANCE_WORKSPACES", () => fetchWorkspaces());
+
+ const updateConfig = async (key: TInstanceConfigurationKeys, value: string) => {
+ setIsSubmitting(true);
+
+ const payload = {
+ [key]: value,
+ };
+
+ const updateConfigPromise = updateInstanceConfigurations(payload);
+
+ setPromiseToast(updateConfigPromise, {
+ loading: "Saving configuration",
+ success: {
+ title: "Success",
+ message: () => "Configuration saved successfully",
+ },
+ error: {
+ title: "Error",
+ message: () => "Failed to save configuration",
+ },
+ });
+
+ await updateConfigPromise
+ .then(() => {
+ setIsSubmitting(false);
+ })
+ .catch((err) => {
+ console.error(err);
+ setIsSubmitting(false);
+ });
+ };
+
+ return (
+
+
+
+
Workspaces on this instance
+
+ See all workspaces and control who can create them.
+
+
+
+
+
+ {formattedConfig ? (
+
+
+
+
Prevent anyone else from creating a workspace.
+
+ Toggling this on will let only you create workspaces. You will have to invite users to new
+ workspaces.
+
+
+
+
+
+ {
+ if (Boolean(parseInt(disableWorkspaceCreation)) === true) {
+ updateConfig("DISABLE_WORKSPACE_CREATION", "0");
+ } else {
+ updateConfig("DISABLE_WORKSPACE_CREATION", "1");
+ }
+ }}
+ size="sm"
+ disabled={isSubmitting}
+ />
+
+
+
+ ) : (
+
+
+
+ )}
+ {workspaceLoader !== "init-loader" ? (
+ <>
+
+
+
+ All workspaces on this instance{" "}
+ • {workspaceIds.length}
+ {workspaceLoader && ["mutation", "pagination"].includes(workspaceLoader) && (
+
+ )}
+
+
+ You can't yet delete workspaces and you can only go to the workspace if you are an Admin or a
+ Member.
+
+
+
+
+ Create workspace
+
+
+
+
+ {workspaceIds.map((workspaceId) => (
+
+ ))}
+
+ {hasNextPage && (
+
+ fetchNextWorkspaces()}
+ disabled={workspaceLoader === "pagination"}
+ >
+ Load more
+ {workspaceLoader === "pagination" && }
+
+
+ )}
+ >
+ ) : (
+
+
+
+
+
+
+ )}
+
+
+
+ );
+});
+
+export default WorkspaceManagementPage;
diff --git a/apps/admin/app/(all)/(home)/auth-banner.tsx b/apps/admin/app/(all)/(home)/auth-banner.tsx
new file mode 100644
index 00000000..c0a9a0e9
--- /dev/null
+++ b/apps/admin/app/(all)/(home)/auth-banner.tsx
@@ -0,0 +1,29 @@
+import type { FC } from "react";
+import { Info, X } from "lucide-react";
+// plane constants
+import type { TAdminAuthErrorInfo } from "@plane/constants";
+
+type TAuthBanner = {
+ bannerData: TAdminAuthErrorInfo | undefined;
+ handleBannerData?: (bannerData: TAdminAuthErrorInfo | undefined) => void;
+};
+
+export const AuthBanner: FC = (props) => {
+ const { bannerData, handleBannerData } = props;
+
+ if (!bannerData) return <>>;
+ return (
+
+
+
+
+
{bannerData?.message}
+
handleBannerData && handleBannerData(undefined)}
+ >
+
+
+
+ );
+};
diff --git a/apps/admin/app/(all)/(home)/auth-header.tsx b/apps/admin/app/(all)/(home)/auth-header.tsx
new file mode 100644
index 00000000..115c8538
--- /dev/null
+++ b/apps/admin/app/(all)/(home)/auth-header.tsx
@@ -0,0 +1,12 @@
+"use client";
+
+import Link from "next/link";
+import { PlaneLockup } from "@plane/propel/icons";
+
+export const AuthHeader = () => (
+
+);
diff --git a/apps/admin/app/(all)/(home)/auth-helpers.tsx b/apps/admin/app/(all)/(home)/auth-helpers.tsx
new file mode 100644
index 00000000..4da6d7ec
--- /dev/null
+++ b/apps/admin/app/(all)/(home)/auth-helpers.tsx
@@ -0,0 +1,163 @@
+import type { ReactNode } from "react";
+import Image from "next/image";
+import Link from "next/link";
+import { KeyRound, Mails } from "lucide-react";
+// plane packages
+import type { TAdminAuthErrorInfo } from "@plane/constants";
+import { SUPPORT_EMAIL, EAdminAuthErrorCodes } from "@plane/constants";
+import type { TGetBaseAuthenticationModeProps, TInstanceAuthenticationModes } from "@plane/types";
+import { resolveGeneralTheme } from "@plane/utils";
+// components
+import { EmailCodesConfiguration } from "@/components/authentication/email-config-switch";
+import { GithubConfiguration } from "@/components/authentication/github-config";
+import { GitlabConfiguration } from "@/components/authentication/gitlab-config";
+import { GoogleConfiguration } from "@/components/authentication/google-config";
+import { PasswordLoginConfiguration } from "@/components/authentication/password-config-switch";
+// images
+import githubLightModeImage from "@/public/logos/github-black.png";
+import githubDarkModeImage from "@/public/logos/github-white.png";
+import GitlabLogo from "@/public/logos/gitlab-logo.svg";
+import GoogleLogo from "@/public/logos/google-logo.svg";
+
+export enum EErrorAlertType {
+ BANNER_ALERT = "BANNER_ALERT",
+ INLINE_FIRST_NAME = "INLINE_FIRST_NAME",
+ INLINE_EMAIL = "INLINE_EMAIL",
+ INLINE_PASSWORD = "INLINE_PASSWORD",
+ INLINE_EMAIL_CODE = "INLINE_EMAIL_CODE",
+}
+
+const errorCodeMessages: {
+ [key in EAdminAuthErrorCodes]: { title: string; message: (email?: string | undefined) => ReactNode };
+} = {
+ // admin
+ [EAdminAuthErrorCodes.ADMIN_ALREADY_EXIST]: {
+ title: `Admin already exists`,
+ message: () => `Admin already exists. Please try again.`,
+ },
+ [EAdminAuthErrorCodes.REQUIRED_ADMIN_EMAIL_PASSWORD_FIRST_NAME]: {
+ title: `Email, password and first name required`,
+ message: () => `Email, password and first name required. Please try again.`,
+ },
+ [EAdminAuthErrorCodes.INVALID_ADMIN_EMAIL]: {
+ title: `Invalid admin email`,
+ message: () => `Invalid admin email. Please try again.`,
+ },
+ [EAdminAuthErrorCodes.INVALID_ADMIN_PASSWORD]: {
+ title: `Invalid admin password`,
+ message: () => `Invalid admin password. Please try again.`,
+ },
+ [EAdminAuthErrorCodes.REQUIRED_ADMIN_EMAIL_PASSWORD]: {
+ title: `Email and password required`,
+ message: () => `Email and password required. Please try again.`,
+ },
+ [EAdminAuthErrorCodes.ADMIN_AUTHENTICATION_FAILED]: {
+ title: `Authentication failed`,
+ message: () => `Authentication failed. Please try again.`,
+ },
+ [EAdminAuthErrorCodes.ADMIN_USER_ALREADY_EXIST]: {
+ title: `Admin user already exists`,
+ message: () => (
+
+ Admin user already exists.
+
+ Sign In
+
+ now.
+
+ ),
+ },
+ [EAdminAuthErrorCodes.ADMIN_USER_DOES_NOT_EXIST]: {
+ title: `Admin user does not exist`,
+ message: () => (
+
+ Admin user does not exist.
+
+ Sign In
+
+ now.
+
+ ),
+ },
+ [EAdminAuthErrorCodes.ADMIN_USER_DEACTIVATED]: {
+ title: `User account deactivated`,
+ message: () => `User account deactivated. Please contact ${!!SUPPORT_EMAIL ? SUPPORT_EMAIL : "administrator"}.`,
+ },
+};
+
+export const authErrorHandler = (
+ errorCode: EAdminAuthErrorCodes,
+ email?: string | undefined
+): TAdminAuthErrorInfo | undefined => {
+ const bannerAlertErrorCodes = [
+ EAdminAuthErrorCodes.ADMIN_ALREADY_EXIST,
+ EAdminAuthErrorCodes.REQUIRED_ADMIN_EMAIL_PASSWORD_FIRST_NAME,
+ EAdminAuthErrorCodes.INVALID_ADMIN_EMAIL,
+ EAdminAuthErrorCodes.INVALID_ADMIN_PASSWORD,
+ EAdminAuthErrorCodes.REQUIRED_ADMIN_EMAIL_PASSWORD,
+ EAdminAuthErrorCodes.ADMIN_AUTHENTICATION_FAILED,
+ EAdminAuthErrorCodes.ADMIN_USER_ALREADY_EXIST,
+ EAdminAuthErrorCodes.ADMIN_USER_DOES_NOT_EXIST,
+ EAdminAuthErrorCodes.ADMIN_USER_DEACTIVATED,
+ ];
+
+ if (bannerAlertErrorCodes.includes(errorCode))
+ return {
+ type: EErrorAlertType.BANNER_ALERT,
+ code: errorCode,
+ title: errorCodeMessages[errorCode]?.title || "Error",
+ message: errorCodeMessages[errorCode]?.message(email) || "Something went wrong. Please try again.",
+ };
+
+ return undefined;
+};
+
+export const getBaseAuthenticationModes: (props: TGetBaseAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
+ disabled,
+ updateConfig,
+ resolvedTheme,
+}) => [
+ {
+ key: "unique-codes",
+ name: "Unique codes",
+ description:
+ "Log in or sign up for Plane using codes sent via email. You need to have set up SMTP to use this method.",
+ icon: ,
+ config: ,
+ },
+ {
+ key: "passwords-login",
+ name: "Passwords",
+ description: "Allow members to create accounts with passwords and use it with their email addresses to sign in.",
+ icon: ,
+ config: ,
+ },
+ {
+ key: "google",
+ name: "Google",
+ description: "Allow members to log in or sign up for Plane with their Google accounts.",
+ icon: ,
+ config: ,
+ },
+ {
+ key: "github",
+ name: "GitHub",
+ description: "Allow members to log in or sign up for Plane with their GitHub accounts.",
+ icon: (
+
+ ),
+ config: ,
+ },
+ {
+ key: "gitlab",
+ name: "GitLab",
+ description: "Allow members to log in or sign up to plane with their GitLab accounts.",
+ icon: ,
+ config: ,
+ },
+];
diff --git a/apps/admin/app/(all)/(home)/layout.tsx b/apps/admin/app/(all)/(home)/layout.tsx
new file mode 100644
index 00000000..25638c67
--- /dev/null
+++ b/apps/admin/app/(all)/(home)/layout.tsx
@@ -0,0 +1,9 @@
+"use client";
+
+export default function RootLayout({ children }: { children: React.ReactNode }) {
+ return (
+
+ {children}
+
+ );
+}
diff --git a/apps/admin/app/(all)/(home)/page.tsx b/apps/admin/app/(all)/(home)/page.tsx
new file mode 100644
index 00000000..e6ebdf45
--- /dev/null
+++ b/apps/admin/app/(all)/(home)/page.tsx
@@ -0,0 +1,40 @@
+"use client";
+
+import { observer } from "mobx-react";
+// components
+import { LogoSpinner } from "@/components/common/logo-spinner";
+import { InstanceFailureView } from "@/components/instance/failure";
+import { InstanceSetupForm } from "@/components/instance/setup-form";
+// hooks
+import { useInstance } from "@/hooks/store";
+// components
+import { InstanceSignInForm } from "./sign-in-form";
+
+const HomePage = () => {
+ // store hooks
+ const { instance, error } = useInstance();
+
+ // if instance is not fetched, show loading
+ if (!instance && !error) {
+ return (
+
+
+
+ );
+ }
+
+ // if instance fetch fails, show failure view
+ if (error) {
+ return ;
+ }
+
+ // if instance is fetched and setup is not done, show setup form
+ if (instance && !instance?.is_setup_done) {
+ return ;
+ }
+
+ // if instance is fetched and setup is done, show sign in form
+ return ;
+};
+
+export default observer(HomePage);
diff --git a/apps/admin/app/(all)/(home)/sign-in-form.tsx b/apps/admin/app/(all)/(home)/sign-in-form.tsx
new file mode 100644
index 00000000..2049bda6
--- /dev/null
+++ b/apps/admin/app/(all)/(home)/sign-in-form.tsx
@@ -0,0 +1,196 @@
+"use client";
+
+import type { FC } from "react";
+import { useEffect, useMemo, useState } from "react";
+import { useSearchParams } from "next/navigation";
+import { Eye, EyeOff } from "lucide-react";
+// plane internal packages
+import type { EAdminAuthErrorCodes, TAdminAuthErrorInfo } from "@plane/constants";
+import { API_BASE_URL } from "@plane/constants";
+import { Button } from "@plane/propel/button";
+import { AuthService } from "@plane/services";
+import { Input, Spinner } from "@plane/ui";
+// components
+import { Banner } from "@/components/common/banner";
+// local components
+import { FormHeader } from "../../../core/components/instance/form-header";
+import { AuthBanner } from "./auth-banner";
+import { AuthHeader } from "./auth-header";
+import { authErrorHandler } from "./auth-helpers";
+
+// service initialization
+const authService = new AuthService();
+
+// error codes
+enum EErrorCodes {
+ INSTANCE_NOT_CONFIGURED = "INSTANCE_NOT_CONFIGURED",
+ REQUIRED_EMAIL_PASSWORD = "REQUIRED_EMAIL_PASSWORD",
+ INVALID_EMAIL = "INVALID_EMAIL",
+ USER_DOES_NOT_EXIST = "USER_DOES_NOT_EXIST",
+ AUTHENTICATION_FAILED = "AUTHENTICATION_FAILED",
+}
+
+type TError = {
+ type: EErrorCodes | undefined;
+ message: string | undefined;
+};
+
+// form data
+type TFormData = {
+ email: string;
+ password: string;
+};
+
+const defaultFromData: TFormData = {
+ email: "",
+ password: "",
+};
+
+export const InstanceSignInForm: FC = () => {
+ // search params
+ const searchParams = useSearchParams();
+ const emailParam = searchParams.get("email") || undefined;
+ const errorCode = searchParams.get("error_code") || undefined;
+ const errorMessage = searchParams.get("error_message") || undefined;
+ // state
+ const [showPassword, setShowPassword] = useState(false);
+ const [csrfToken, setCsrfToken] = useState(undefined);
+ const [formData, setFormData] = useState(defaultFromData);
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ const [errorInfo, setErrorInfo] = useState(undefined);
+
+ const handleFormChange = (key: keyof TFormData, value: string | boolean) =>
+ setFormData((prev) => ({ ...prev, [key]: value }));
+
+ useEffect(() => {
+ if (csrfToken === undefined)
+ authService.requestCSRFToken().then((data) => data?.csrf_token && setCsrfToken(data.csrf_token));
+ }, [csrfToken]);
+
+ useEffect(() => {
+ if (emailParam) setFormData((prev) => ({ ...prev, email: emailParam }));
+ }, [emailParam]);
+
+ // derived values
+ const errorData: TError = useMemo(() => {
+ if (errorCode && errorMessage) {
+ switch (errorCode) {
+ case EErrorCodes.INSTANCE_NOT_CONFIGURED:
+ return { type: EErrorCodes.INSTANCE_NOT_CONFIGURED, message: errorMessage };
+ case EErrorCodes.REQUIRED_EMAIL_PASSWORD:
+ return { type: EErrorCodes.REQUIRED_EMAIL_PASSWORD, message: errorMessage };
+ case EErrorCodes.INVALID_EMAIL:
+ return { type: EErrorCodes.INVALID_EMAIL, message: errorMessage };
+ case EErrorCodes.USER_DOES_NOT_EXIST:
+ return { type: EErrorCodes.USER_DOES_NOT_EXIST, message: errorMessage };
+ case EErrorCodes.AUTHENTICATION_FAILED:
+ return { type: EErrorCodes.AUTHENTICATION_FAILED, message: errorMessage };
+ default:
+ return { type: undefined, message: undefined };
+ }
+ } else return { type: undefined, message: undefined };
+ }, [errorCode, errorMessage]);
+
+ const isButtonDisabled = useMemo(
+ () => (!isSubmitting && formData.email && formData.password ? false : true),
+ [formData.email, formData.password, isSubmitting]
+ );
+
+ useEffect(() => {
+ if (errorCode) {
+ const errorDetail = authErrorHandler(errorCode?.toString() as EAdminAuthErrorCodes);
+ if (errorDetail) {
+ setErrorInfo(errorDetail);
+ }
+ }
+ }, [errorCode]);
+
+ return (
+ <>
+
+
+ >
+ );
+};
diff --git a/apps/admin/app/(all)/instance.provider.tsx b/apps/admin/app/(all)/instance.provider.tsx
new file mode 100644
index 00000000..19e15ec5
--- /dev/null
+++ b/apps/admin/app/(all)/instance.provider.tsx
@@ -0,0 +1,23 @@
+import type { FC, ReactNode } from "react";
+import { observer } from "mobx-react";
+import useSWR from "swr";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type InstanceProviderProps = {
+ children: ReactNode;
+};
+
+export const InstanceProvider: FC = observer((props) => {
+ const { children } = props;
+ // store hooks
+ const { fetchInstanceInfo } = useInstance();
+ // fetching instance details
+ useSWR("INSTANCE_DETAILS", () => fetchInstanceInfo(), {
+ revalidateOnFocus: false,
+ revalidateIfStale: false,
+ errorRetryCount: 0,
+ });
+
+ return <>{children}>;
+});
diff --git a/apps/admin/app/(all)/layout.tsx b/apps/admin/app/(all)/layout.tsx
new file mode 100644
index 00000000..ddfba732
--- /dev/null
+++ b/apps/admin/app/(all)/layout.tsx
@@ -0,0 +1,33 @@
+"use client";
+
+import { ThemeProvider } from "next-themes";
+import { SWRConfig } from "swr";
+// providers
+import { InstanceProvider } from "./instance.provider";
+import { StoreProvider } from "./store.provider";
+import { ToastWithTheme } from "./toast";
+import { UserProvider } from "./user.provider";
+
+const DEFAULT_SWR_CONFIG = {
+ refreshWhenHidden: false,
+ revalidateIfStale: false,
+ revalidateOnFocus: false,
+ revalidateOnMount: true,
+ refreshInterval: 600000,
+ errorRetryCount: 3,
+};
+
+export default function InstanceLayout({ children }: { children: React.ReactNode }) {
+ return (
+
+
+
+
+
+ {children}
+
+
+
+
+ );
+}
diff --git a/apps/admin/app/(all)/store.provider.tsx b/apps/admin/app/(all)/store.provider.tsx
new file mode 100644
index 00000000..648a3711
--- /dev/null
+++ b/apps/admin/app/(all)/store.provider.tsx
@@ -0,0 +1,35 @@
+"use client";
+
+import type { ReactNode } from "react";
+import { createContext } from "react";
+// plane admin store
+import { RootStore } from "@/plane-admin/store/root.store";
+
+let rootStore = new RootStore();
+
+export const StoreContext = createContext(rootStore);
+
+function initializeStore(initialData = {}) {
+ const singletonRootStore = rootStore ?? new RootStore();
+ // If your page has Next.js data fetching methods that use a Mobx store, it will
+ // get hydrated here, check `pages/ssg.js` and `pages/ssr.js` for more details
+ if (initialData) {
+ singletonRootStore.hydrate(initialData);
+ }
+ // For SSG and SSR always create a new store
+ if (typeof window === "undefined") return singletonRootStore;
+ // Create the store once in the client
+ if (!rootStore) rootStore = singletonRootStore;
+ return singletonRootStore;
+}
+
+export type StoreProviderProps = {
+ children: ReactNode;
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ initialState?: any;
+};
+
+export const StoreProvider = ({ children, initialState = {} }: StoreProviderProps) => {
+ const store = initializeStore(initialState);
+ return {children} ;
+};
diff --git a/apps/admin/app/(all)/toast.tsx b/apps/admin/app/(all)/toast.tsx
new file mode 100644
index 00000000..9cd1c46a
--- /dev/null
+++ b/apps/admin/app/(all)/toast.tsx
@@ -0,0 +1,10 @@
+"use client";
+
+import { useTheme } from "next-themes";
+import { Toast } from "@plane/propel/toast";
+import { resolveGeneralTheme } from "@plane/utils";
+
+export const ToastWithTheme = () => {
+ const { resolvedTheme } = useTheme();
+ return ;
+};
diff --git a/apps/admin/app/(all)/user.provider.tsx b/apps/admin/app/(all)/user.provider.tsx
new file mode 100644
index 00000000..e026c31d
--- /dev/null
+++ b/apps/admin/app/(all)/user.provider.tsx
@@ -0,0 +1,33 @@
+"use client";
+
+import type { FC, ReactNode } from "react";
+import { useEffect } from "react";
+import { observer } from "mobx-react";
+import useSWR from "swr";
+// hooks
+import { useInstance, useTheme, useUser } from "@/hooks/store";
+
+interface IUserProvider {
+ children: ReactNode;
+}
+
+export const UserProvider: FC = observer(({ children }) => {
+ // hooks
+ const { isSidebarCollapsed, toggleSidebar } = useTheme();
+ const { currentUser, fetchCurrentUser } = useUser();
+ const { fetchInstanceAdmins } = useInstance();
+
+ useSWR("CURRENT_USER", () => fetchCurrentUser(), {
+ shouldRetryOnError: false,
+ });
+
+ useSWR("INSTANCE_ADMINS", () => fetchInstanceAdmins());
+
+ useEffect(() => {
+ const localValue = localStorage && localStorage.getItem("god_mode_sidebar_collapsed");
+ const localBoolValue = localValue ? (localValue === "true" ? true : false) : false;
+ if (isSidebarCollapsed === undefined && localBoolValue != isSidebarCollapsed) toggleSidebar(localBoolValue);
+ }, [isSidebarCollapsed, currentUser, toggleSidebar]);
+
+ return <>{children}>;
+});
diff --git a/apps/admin/app/error.tsx b/apps/admin/app/error.tsx
new file mode 100644
index 00000000..76794e04
--- /dev/null
+++ b/apps/admin/app/error.tsx
@@ -0,0 +1,9 @@
+"use client";
+
+export default function RootErrorPage() {
+ return (
+
+
Something went wrong.
+
+ );
+}
diff --git a/apps/admin/app/layout.tsx b/apps/admin/app/layout.tsx
new file mode 100644
index 00000000..b9cdd17c
--- /dev/null
+++ b/apps/admin/app/layout.tsx
@@ -0,0 +1,39 @@
+import type { ReactNode } from "react";
+import type { Metadata } from "next";
+// plane imports
+import { ADMIN_BASE_PATH } from "@plane/constants";
+// styles
+import "@/styles/globals.css";
+
+export const metadata: Metadata = {
+ title: "Plane | Simple, extensible, open-source project management tool.",
+ description:
+ "Open-source project management tool to manage work items, sprints, and product roadmaps with peace of mind.",
+ openGraph: {
+ title: "Plane | Simple, extensible, open-source project management tool.",
+ description:
+ "Open-source project management tool to manage work items, sprints, and product roadmaps with peace of mind.",
+ url: "https://plane.so/",
+ },
+ keywords:
+ "software development, customer feedback, software, accelerate, code management, release management, project management, work items tracking, agile, scrum, kanban, collaboration",
+ twitter: {
+ site: "@planepowers",
+ },
+};
+
+export default function RootLayout({ children }: { children: ReactNode }) {
+ const ASSET_PREFIX = ADMIN_BASE_PATH;
+ return (
+
+
+
+
+
+
+
+
+ {children}
+
+ );
+}
diff --git a/apps/admin/ce/components/authentication/authentication-modes.tsx b/apps/admin/ce/components/authentication/authentication-modes.tsx
new file mode 100644
index 00000000..386e0c05
--- /dev/null
+++ b/apps/admin/ce/components/authentication/authentication-modes.tsx
@@ -0,0 +1,121 @@
+import { observer } from "mobx-react";
+import Image from "next/image";
+import { useTheme } from "next-themes";
+import { KeyRound, Mails } from "lucide-react";
+// types
+import type {
+ TGetBaseAuthenticationModeProps,
+ TInstanceAuthenticationMethodKeys,
+ TInstanceAuthenticationModes,
+} from "@plane/types";
+import { resolveGeneralTheme } from "@plane/utils";
+// components
+import { AuthenticationMethodCard } from "@/components/authentication/authentication-method-card";
+import { EmailCodesConfiguration } from "@/components/authentication/email-config-switch";
+import { GithubConfiguration } from "@/components/authentication/github-config";
+import { GitlabConfiguration } from "@/components/authentication/gitlab-config";
+import { GoogleConfiguration } from "@/components/authentication/google-config";
+import { PasswordLoginConfiguration } from "@/components/authentication/password-config-switch";
+// plane admin components
+import { UpgradeButton } from "@/plane-admin/components/common";
+// assets
+import githubLightModeImage from "@/public/logos/github-black.png";
+import githubDarkModeImage from "@/public/logos/github-white.png";
+import GitlabLogo from "@/public/logos/gitlab-logo.svg";
+import GoogleLogo from "@/public/logos/google-logo.svg";
+import OIDCLogo from "@/public/logos/oidc-logo.svg";
+import SAMLLogo from "@/public/logos/saml-logo.svg";
+
+export type TAuthenticationModeProps = {
+ disabled: boolean;
+ updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
+};
+
+// Authentication methods
+export const getAuthenticationModes: (props: TGetBaseAuthenticationModeProps) => TInstanceAuthenticationModes[] = ({
+ disabled,
+ updateConfig,
+ resolvedTheme,
+}) => [
+ {
+ key: "unique-codes",
+ name: "Unique codes",
+ description:
+ "Log in or sign up for Plane using codes sent via email. You need to have set up SMTP to use this method.",
+ icon: ,
+ config: ,
+ },
+ {
+ key: "passwords-login",
+ name: "Passwords",
+ description: "Allow members to create accounts with passwords and use it with their email addresses to sign in.",
+ icon: ,
+ config: ,
+ },
+ {
+ key: "google",
+ name: "Google",
+ description: "Allow members to log in or sign up for Plane with their Google accounts.",
+ icon: ,
+ config: ,
+ },
+ {
+ key: "github",
+ name: "GitHub",
+ description: "Allow members to log in or sign up for Plane with their GitHub accounts.",
+ icon: (
+
+ ),
+ config: ,
+ },
+ {
+ key: "gitlab",
+ name: "GitLab",
+ description: "Allow members to log in or sign up to plane with their GitLab accounts.",
+ icon: ,
+ config: ,
+ },
+ {
+ key: "oidc",
+ name: "OIDC",
+ description: "Authenticate your users via the OpenID Connect protocol.",
+ icon: ,
+ config: ,
+ unavailable: true,
+ },
+ {
+ key: "saml",
+ name: "SAML",
+ description: "Authenticate your users via the Security Assertion Markup Language protocol.",
+ icon: ,
+ config: ,
+ unavailable: true,
+ },
+];
+
+export const AuthenticationModes: React.FC = observer((props) => {
+ const { disabled, updateConfig } = props;
+ // next-themes
+ const { resolvedTheme } = useTheme();
+
+ return (
+ <>
+ {getAuthenticationModes({ disabled, updateConfig, resolvedTheme }).map((method) => (
+
+ ))}
+ >
+ );
+});
diff --git a/apps/admin/ce/components/authentication/index.ts b/apps/admin/ce/components/authentication/index.ts
new file mode 100644
index 00000000..d2aa7485
--- /dev/null
+++ b/apps/admin/ce/components/authentication/index.ts
@@ -0,0 +1 @@
+export * from "./authentication-modes";
diff --git a/apps/admin/ce/components/common/index.ts b/apps/admin/ce/components/common/index.ts
new file mode 100644
index 00000000..c6a1da8b
--- /dev/null
+++ b/apps/admin/ce/components/common/index.ts
@@ -0,0 +1 @@
+export * from "./upgrade-button";
diff --git a/apps/admin/ce/components/common/upgrade-button.tsx b/apps/admin/ce/components/common/upgrade-button.tsx
new file mode 100644
index 00000000..14a955f2
--- /dev/null
+++ b/apps/admin/ce/components/common/upgrade-button.tsx
@@ -0,0 +1,15 @@
+"use client";
+
+import React from "react";
+// icons
+import { SquareArrowOutUpRight } from "lucide-react";
+// plane internal packages
+import { getButtonStyling } from "@plane/propel/button";
+import { cn } from "@plane/utils";
+
+export const UpgradeButton: React.FC = () => (
+
+ Upgrade
+
+
+);
diff --git a/apps/admin/ce/store/root.store.ts b/apps/admin/ce/store/root.store.ts
new file mode 100644
index 00000000..1be816f7
--- /dev/null
+++ b/apps/admin/ce/store/root.store.ts
@@ -0,0 +1,19 @@
+import { enableStaticRendering } from "mobx-react";
+// stores
+import { CoreRootStore } from "@/store/root.store";
+
+enableStaticRendering(typeof window === "undefined");
+
+export class RootStore extends CoreRootStore {
+ constructor() {
+ super();
+ }
+
+ hydrate(initialData: any) {
+ super.hydrate(initialData);
+ }
+
+ resetOnSignOut() {
+ super.resetOnSignOut();
+ }
+}
diff --git a/apps/admin/core/components/authentication/authentication-method-card.tsx b/apps/admin/core/components/authentication/authentication-method-card.tsx
new file mode 100644
index 00000000..df8e6dba
--- /dev/null
+++ b/apps/admin/core/components/authentication/authentication-method-card.tsx
@@ -0,0 +1,56 @@
+"use client";
+
+import type { FC } from "react";
+// helpers
+import { cn } from "@plane/utils";
+
+type Props = {
+ name: string;
+ description: string;
+ icon: React.ReactNode;
+ config: React.ReactNode;
+ disabled?: boolean;
+ withBorder?: boolean;
+ unavailable?: boolean;
+};
+
+export const AuthenticationMethodCard: FC = (props) => {
+ const { name, description, icon, config, disabled = false, withBorder = true, unavailable = false } = props;
+
+ return (
+
+
+
+
+
+ {name}
+
+
+ {description}
+
+
+
+
{config}
+
+ );
+};
diff --git a/apps/admin/core/components/authentication/email-config-switch.tsx b/apps/admin/core/components/authentication/email-config-switch.tsx
new file mode 100644
index 00000000..3a2a5f54
--- /dev/null
+++ b/apps/admin/core/components/authentication/email-config-switch.tsx
@@ -0,0 +1,35 @@
+"use client";
+
+import React from "react";
+import { observer } from "mobx-react";
+// hooks
+import type { TInstanceAuthenticationMethodKeys } from "@plane/types";
+import { ToggleSwitch } from "@plane/ui";
+import { useInstance } from "@/hooks/store";
+// ui
+// types
+
+type Props = {
+ disabled: boolean;
+ updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
+};
+
+export const EmailCodesConfiguration: React.FC = observer((props) => {
+ const { disabled, updateConfig } = props;
+ // store
+ const { formattedConfig } = useInstance();
+ // derived values
+ const enableMagicLogin = formattedConfig?.ENABLE_MAGIC_LINK_LOGIN ?? "";
+
+ return (
+ {
+ const newEnableMagicLogin = Boolean(parseInt(enableMagicLogin)) === true ? "0" : "1";
+ updateConfig("ENABLE_MAGIC_LINK_LOGIN", newEnableMagicLogin);
+ }}
+ size="sm"
+ disabled={disabled}
+ />
+ );
+});
diff --git a/apps/admin/core/components/authentication/github-config.tsx b/apps/admin/core/components/authentication/github-config.tsx
new file mode 100644
index 00000000..33219145
--- /dev/null
+++ b/apps/admin/core/components/authentication/github-config.tsx
@@ -0,0 +1,57 @@
+"use client";
+
+import React from "react";
+import { observer } from "mobx-react";
+import Link from "next/link";
+// icons
+import { Settings2 } from "lucide-react";
+// plane internal packages
+import { getButtonStyling } from "@plane/propel/button";
+import type { TInstanceAuthenticationMethodKeys } from "@plane/types";
+import { ToggleSwitch } from "@plane/ui";
+import { cn } from "@plane/utils";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type Props = {
+ disabled: boolean;
+ updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
+};
+
+export const GithubConfiguration: React.FC = observer((props) => {
+ const { disabled, updateConfig } = props;
+ // store
+ const { formattedConfig } = useInstance();
+ // derived values
+ const enableGithubConfig = formattedConfig?.IS_GITHUB_ENABLED ?? "";
+ const isGithubConfigured = !!formattedConfig?.GITHUB_CLIENT_ID && !!formattedConfig?.GITHUB_CLIENT_SECRET;
+
+ return (
+ <>
+ {isGithubConfigured ? (
+
+
+ Edit
+
+ {
+ const newEnableGithubConfig = Boolean(parseInt(enableGithubConfig)) === true ? "0" : "1";
+ updateConfig("IS_GITHUB_ENABLED", newEnableGithubConfig);
+ }}
+ size="sm"
+ disabled={disabled}
+ />
+
+ ) : (
+
+
+ Configure
+
+ )}
+ >
+ );
+});
diff --git a/apps/admin/core/components/authentication/gitlab-config.tsx b/apps/admin/core/components/authentication/gitlab-config.tsx
new file mode 100644
index 00000000..6f0294c3
--- /dev/null
+++ b/apps/admin/core/components/authentication/gitlab-config.tsx
@@ -0,0 +1,57 @@
+"use client";
+
+import React from "react";
+import { observer } from "mobx-react";
+import Link from "next/link";
+// icons
+import { Settings2 } from "lucide-react";
+// plane internal packages
+import { getButtonStyling } from "@plane/propel/button";
+import type { TInstanceAuthenticationMethodKeys } from "@plane/types";
+import { ToggleSwitch } from "@plane/ui";
+import { cn } from "@plane/utils";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type Props = {
+ disabled: boolean;
+ updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
+};
+
+export const GitlabConfiguration: React.FC = observer((props) => {
+ const { disabled, updateConfig } = props;
+ // store
+ const { formattedConfig } = useInstance();
+ // derived values
+ const enableGitlabConfig = formattedConfig?.IS_GITLAB_ENABLED ?? "";
+ const isGitlabConfigured = !!formattedConfig?.GITLAB_CLIENT_ID && !!formattedConfig?.GITLAB_CLIENT_SECRET;
+
+ return (
+ <>
+ {isGitlabConfigured ? (
+
+
+ Edit
+
+ {
+ const newEnableGitlabConfig = Boolean(parseInt(enableGitlabConfig)) === true ? "0" : "1";
+ updateConfig("IS_GITLAB_ENABLED", newEnableGitlabConfig);
+ }}
+ size="sm"
+ disabled={disabled}
+ />
+
+ ) : (
+
+
+ Configure
+
+ )}
+ >
+ );
+});
diff --git a/apps/admin/core/components/authentication/google-config.tsx b/apps/admin/core/components/authentication/google-config.tsx
new file mode 100644
index 00000000..ae0cecf3
--- /dev/null
+++ b/apps/admin/core/components/authentication/google-config.tsx
@@ -0,0 +1,57 @@
+"use client";
+
+import React from "react";
+import { observer } from "mobx-react";
+import Link from "next/link";
+// icons
+import { Settings2 } from "lucide-react";
+// plane internal packages
+import { getButtonStyling } from "@plane/propel/button";
+import type { TInstanceAuthenticationMethodKeys } from "@plane/types";
+import { ToggleSwitch } from "@plane/ui";
+import { cn } from "@plane/utils";
+// hooks
+import { useInstance } from "@/hooks/store";
+
+type Props = {
+ disabled: boolean;
+ updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
+};
+
+export const GoogleConfiguration: React.FC = observer((props) => {
+ const { disabled, updateConfig } = props;
+ // store
+ const { formattedConfig } = useInstance();
+ // derived values
+ const enableGoogleConfig = formattedConfig?.IS_GOOGLE_ENABLED ?? "";
+ const isGoogleConfigured = !!formattedConfig?.GOOGLE_CLIENT_ID && !!formattedConfig?.GOOGLE_CLIENT_SECRET;
+
+ return (
+ <>
+ {isGoogleConfigured ? (
+
+
+ Edit
+
+ {
+ const newEnableGoogleConfig = Boolean(parseInt(enableGoogleConfig)) === true ? "0" : "1";
+ updateConfig("IS_GOOGLE_ENABLED", newEnableGoogleConfig);
+ }}
+ size="sm"
+ disabled={disabled}
+ />
+
+ ) : (
+
+
+ Configure
+
+ )}
+ >
+ );
+});
diff --git a/apps/admin/core/components/authentication/password-config-switch.tsx b/apps/admin/core/components/authentication/password-config-switch.tsx
new file mode 100644
index 00000000..1126ff4f
--- /dev/null
+++ b/apps/admin/core/components/authentication/password-config-switch.tsx
@@ -0,0 +1,35 @@
+"use client";
+
+import React from "react";
+import { observer } from "mobx-react";
+// hooks
+import type { TInstanceAuthenticationMethodKeys } from "@plane/types";
+import { ToggleSwitch } from "@plane/ui";
+import { useInstance } from "@/hooks/store";
+// ui
+// types
+
+type Props = {
+ disabled: boolean;
+ updateConfig: (key: TInstanceAuthenticationMethodKeys, value: string) => void;
+};
+
+export const PasswordLoginConfiguration: React.FC = observer((props) => {
+ const { disabled, updateConfig } = props;
+ // store
+ const { formattedConfig } = useInstance();
+ // derived values
+ const enableEmailPassword = formattedConfig?.ENABLE_EMAIL_PASSWORD ?? "";
+
+ return (
+ {
+ const newEnableEmailPassword = Boolean(parseInt(enableEmailPassword)) === true ? "0" : "1";
+ updateConfig("ENABLE_EMAIL_PASSWORD", newEnableEmailPassword);
+ }}
+ size="sm"
+ disabled={disabled}
+ />
+ );
+});
diff --git a/apps/admin/core/components/common/banner.tsx b/apps/admin/core/components/common/banner.tsx
new file mode 100644
index 00000000..32bc5bc7
--- /dev/null
+++ b/apps/admin/core/components/common/banner.tsx
@@ -0,0 +1,32 @@
+import type { FC } from "react";
+import { AlertCircle, CheckCircle2 } from "lucide-react";
+
+type TBanner = {
+ type: "success" | "error";
+ message: string;
+};
+
+export const Banner: FC = (props) => {
+ const { type, message } = props;
+
+ return (
+
+
+
+ {type === "error" ? (
+
+
+
+ ) : (
+
+ )}
+
+
+
+
+ );
+};
diff --git a/apps/admin/core/components/common/breadcrumb-link.tsx b/apps/admin/core/components/common/breadcrumb-link.tsx
new file mode 100644
index 00000000..567b88d9
--- /dev/null
+++ b/apps/admin/core/components/common/breadcrumb-link.tsx
@@ -0,0 +1,38 @@
+"use client";
+
+import Link from "next/link";
+import { Tooltip } from "@plane/propel/tooltip";
+
+type Props = {
+ label?: string;
+ href?: string;
+ icon?: React.ReactNode | undefined;
+};
+
+export const BreadcrumbLink: React.FC = (props) => {
+ const { href, label, icon } = props;
+ return (
+
+
+
+ {href ? (
+
+ {icon && (
+
{icon}
+ )}
+
{label}
+
+ ) : (
+
+ {icon &&
{icon}
}
+
{label}
+
+ )}
+
+
+
+ );
+};
diff --git a/apps/admin/core/components/common/code-block.tsx b/apps/admin/core/components/common/code-block.tsx
new file mode 100644
index 00000000..88ad78a1
--- /dev/null
+++ b/apps/admin/core/components/common/code-block.tsx
@@ -0,0 +1,21 @@
+import { cn } from "@plane/utils";
+
+type TProps = {
+ children: React.ReactNode;
+ className?: string;
+ darkerShade?: boolean;
+};
+
+export const CodeBlock = ({ children, className, darkerShade }: TProps) => (
+
+ {children}
+
+);
diff --git a/apps/admin/core/components/common/confirm-discard-modal.tsx b/apps/admin/core/components/common/confirm-discard-modal.tsx
new file mode 100644
index 00000000..d1931f06
--- /dev/null
+++ b/apps/admin/core/components/common/confirm-discard-modal.tsx
@@ -0,0 +1,74 @@
+"use client";
+
+import React from "react";
+import Link from "next/link";
+// headless ui
+import { Dialog, Transition } from "@headlessui/react";
+// ui
+import { Button, getButtonStyling } from "@plane/propel/button";
+
+type Props = {
+ isOpen: boolean;
+ handleClose: () => void;
+ onDiscardHref: string;
+};
+
+export const ConfirmDiscardModal: React.FC = (props) => {
+ const { isOpen, handleClose, onDiscardHref } = props;
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+
+
+ You have unsaved changes
+
+
+
+ Changes you made will be lost if you go back. Do you wish to go back?
+
+
+
+
+
+
+
+ Keep editing
+
+
+ Go back
+
+
+
+
+
+
+
+
+ );
+};
diff --git a/apps/admin/core/components/common/controller-input.tsx b/apps/admin/core/components/common/controller-input.tsx
new file mode 100644
index 00000000..4b16ffd0
--- /dev/null
+++ b/apps/admin/core/components/common/controller-input.tsx
@@ -0,0 +1,84 @@
+"use client";
+
+import React, { useState } from "react";
+import type { Control } from "react-hook-form";
+import { Controller } from "react-hook-form";
+// icons
+import { Eye, EyeOff } from "lucide-react";
+// plane internal packages
+import { Input } from "@plane/ui";
+import { cn } from "@plane/utils";
+
+type Props = {
+ control: Control;
+ type: "text" | "password";
+ name: string;
+ label: string;
+ description?: string | React.ReactNode;
+ placeholder: string;
+ error: boolean;
+ required: boolean;
+};
+
+export type TControllerInputFormField = {
+ key: string;
+ type: "text" | "password";
+ label: string;
+ description?: string | React.ReactNode;
+ placeholder: string;
+ error: boolean;
+ required: boolean;
+};
+
+export const ControllerInput: React.FC = (props) => {
+ const { name, control, type, label, description, placeholder, error, required } = props;
+ // states
+ const [showPassword, setShowPassword] = useState(false);
+
+ return (
+
+
{label}
+
+ (
+
+ )}
+ />
+ {type === "password" &&
+ (showPassword ? (
+ setShowPassword(false)}
+ >
+
+
+ ) : (
+ setShowPassword(true)}
+ >
+
+
+ ))}
+
+ {description &&
{description}
}
+
+ );
+};
diff --git a/apps/admin/core/components/common/copy-field.tsx b/apps/admin/core/components/common/copy-field.tsx
new file mode 100644
index 00000000..4f4f7175
--- /dev/null
+++ b/apps/admin/core/components/common/copy-field.tsx
@@ -0,0 +1,46 @@
+"use client";
+
+import React from "react";
+// ui
+import { Copy } from "lucide-react";
+import { Button } from "@plane/propel/button";
+import { TOAST_TYPE, setToast } from "@plane/propel/toast";
+
+type Props = {
+ label: string;
+ url: string;
+ description: string | React.ReactNode;
+};
+
+export type TCopyField = {
+ key: string;
+ label: string;
+ url: string;
+ description: string | React.ReactNode;
+};
+
+export const CopyField: React.FC = (props) => {
+ const { label, url, description } = props;
+
+ return (
+
+
{label}
+
{
+ navigator.clipboard.writeText(url);
+ setToast({
+ type: TOAST_TYPE.INFO,
+ title: "Copied to clipboard",
+ message: `The ${label} has been successfully copied to your clipboard`,
+ });
+ }}
+ >
+ {url}
+
+
+
{description}
+
+ );
+};
diff --git a/apps/admin/core/components/common/empty-state.tsx b/apps/admin/core/components/common/empty-state.tsx
new file mode 100644
index 00000000..4bf291f5
--- /dev/null
+++ b/apps/admin/core/components/common/empty-state.tsx
@@ -0,0 +1,48 @@
+"use client";
+
+import React from "react";
+import Image from "next/image";
+import { Button } from "@plane/propel/button";
+
+type Props = {
+ title: string;
+ description?: React.ReactNode;
+ image?: any;
+ primaryButton?: {
+ icon?: any;
+ text: string;
+ onClick: () => void;
+ };
+ secondaryButton?: React.ReactNode;
+ disabled?: boolean;
+};
+
+export const EmptyState: React.FC = ({
+ title,
+ description,
+ image,
+ primaryButton,
+ secondaryButton,
+ disabled = false,
+}) => (
+
+
+ {image &&
}
+
{title}
+ {description &&
{description}
}
+
+ {primaryButton && (
+
+ {primaryButton.text}
+
+ )}
+ {secondaryButton}
+
+
+
+);
diff --git a/apps/admin/core/components/common/logo-spinner.tsx b/apps/admin/core/components/common/logo-spinner.tsx
new file mode 100644
index 00000000..fda44fca
--- /dev/null
+++ b/apps/admin/core/components/common/logo-spinner.tsx
@@ -0,0 +1,17 @@
+import Image from "next/image";
+import { useTheme } from "next-themes";
+// assets
+import LogoSpinnerDark from "@/public/images/logo-spinner-dark.gif";
+import LogoSpinnerLight from "@/public/images/logo-spinner-light.gif";
+
+export const LogoSpinner = () => {
+ const { resolvedTheme } = useTheme();
+
+ const logoSrc = resolvedTheme === "dark" ? LogoSpinnerLight : LogoSpinnerDark;
+
+ return (
+
+
+
+ );
+};
diff --git a/apps/admin/core/components/common/page-header.tsx b/apps/admin/core/components/common/page-header.tsx
new file mode 100644
index 00000000..a4b27b92
--- /dev/null
+++ b/apps/admin/core/components/common/page-header.tsx
@@ -0,0 +1,17 @@
+"use client";
+
+type TPageHeader = {
+ title?: string;
+ description?: string;
+};
+
+export const PageHeader: React.FC = (props) => {
+ const { title = "God Mode - Plane", description = "Plane god mode" } = props;
+
+ return (
+ <>
+ {title}
+
+ >
+ );
+};
diff --git a/apps/admin/core/components/instance/failure.tsx b/apps/admin/core/components/instance/failure.tsx
new file mode 100644
index 00000000..97ace834
--- /dev/null
+++ b/apps/admin/core/components/instance/failure.tsx
@@ -0,0 +1,42 @@
+"use client";
+import type { FC } from "react";
+import { observer } from "mobx-react";
+import Image from "next/image";
+import { useTheme } from "next-themes";
+import { Button } from "@plane/propel/button";
+// assets
+import { AuthHeader } from "@/app/(all)/(home)/auth-header";
+import InstanceFailureDarkImage from "@/public/instance/instance-failure-dark.svg";
+import InstanceFailureImage from "@/public/instance/instance-failure.svg";
+
+export const InstanceFailureView: FC = observer(() => {
+ const { resolvedTheme } = useTheme();
+
+ const instanceImage = resolvedTheme === "dark" ? InstanceFailureDarkImage : InstanceFailureImage;
+
+ const handleRetry = () => {
+ window.location.reload();
+ };
+
+ return (
+ <>
+
+
+
+
+
+
Unable to fetch instance details.
+
+ We were unable to fetch the details of the instance. Fret not, it might just be a connectivity issue.
+
+
+
+
+ Retry
+
+
+
+
+ >
+ );
+});
diff --git a/apps/admin/core/components/instance/form-header.tsx b/apps/admin/core/components/instance/form-header.tsx
new file mode 100644
index 00000000..d915ad29
--- /dev/null
+++ b/apps/admin/core/components/instance/form-header.tsx
@@ -0,0 +1,8 @@
+"use client";
+
+export const FormHeader = ({ heading, subHeading }: { heading: string; subHeading: string }) => (
+
+ {heading}
+ {subHeading}
+
+);
diff --git a/apps/admin/core/components/instance/instance-not-ready.tsx b/apps/admin/core/components/instance/instance-not-ready.tsx
new file mode 100644
index 00000000..b01d938b
--- /dev/null
+++ b/apps/admin/core/components/instance/instance-not-ready.tsx
@@ -0,0 +1,30 @@
+"use client";
+
+import type { FC } from "react";
+import Image from "next/image";
+import Link from "next/link";
+import { Button } from "@plane/propel/button";
+// assets
+import PlaneTakeOffImage from "@/public/images/plane-takeoff.png";
+
+export const InstanceNotReady: FC = () => (
+
+
+
+
Welcome aboard Plane!
+
+
+ Get started by setting up your instance and workspace
+
+
+
+
+
+
+ Get started
+
+
+
+
+
+);
diff --git a/apps/admin/core/components/instance/loading.tsx b/apps/admin/core/components/instance/loading.tsx
new file mode 100644
index 00000000..27dc4ae6
--- /dev/null
+++ b/apps/admin/core/components/instance/loading.tsx
@@ -0,0 +1,17 @@
+import Image from "next/image";
+import { useTheme } from "next-themes";
+// assets
+import LogoSpinnerDark from "@/public/images/logo-spinner-dark.gif";
+import LogoSpinnerLight from "@/public/images/logo-spinner-light.gif";
+
+export const InstanceLoading = () => {
+ const { resolvedTheme } = useTheme();
+
+ const logoSrc = resolvedTheme === "dark" ? LogoSpinnerLight : LogoSpinnerDark;
+
+ return (
+
+
+
+ );
+};
diff --git a/apps/admin/core/components/instance/setup-form.tsx b/apps/admin/core/components/instance/setup-form.tsx
new file mode 100644
index 00000000..a4d59b68
--- /dev/null
+++ b/apps/admin/core/components/instance/setup-form.tsx
@@ -0,0 +1,355 @@
+"use client";
+
+import type { FC } from "react";
+import { useEffect, useMemo, useState } from "react";
+import { useSearchParams } from "next/navigation";
+// icons
+import { Eye, EyeOff } from "lucide-react";
+// plane internal packages
+import { API_BASE_URL, E_PASSWORD_STRENGTH } from "@plane/constants";
+import { Button } from "@plane/propel/button";
+import { AuthService } from "@plane/services";
+import { Checkbox, Input, PasswordStrengthIndicator, Spinner } from "@plane/ui";
+import { getPasswordStrength } from "@plane/utils";
+// components
+import { AuthHeader } from "@/app/(all)/(home)/auth-header";
+import { Banner } from "@/components/common/banner";
+import { FormHeader } from "@/components/instance/form-header";
+
+// service initialization
+const authService = new AuthService();
+
+// error codes
+enum EErrorCodes {
+ INSTANCE_NOT_CONFIGURED = "INSTANCE_NOT_CONFIGURED",
+ ADMIN_ALREADY_EXIST = "ADMIN_ALREADY_EXIST",
+ REQUIRED_EMAIL_PASSWORD_FIRST_NAME = "REQUIRED_EMAIL_PASSWORD_FIRST_NAME",
+ INVALID_EMAIL = "INVALID_EMAIL",
+ INVALID_PASSWORD = "INVALID_PASSWORD",
+ USER_ALREADY_EXISTS = "USER_ALREADY_EXISTS",
+}
+
+type TError = {
+ type: EErrorCodes | undefined;
+ message: string | undefined;
+};
+
+// form data
+type TFormData = {
+ first_name: string;
+ last_name: string;
+ email: string;
+ company_name: string;
+ password: string;
+ confirm_password?: string;
+ is_telemetry_enabled: boolean;
+};
+
+const defaultFromData: TFormData = {
+ first_name: "",
+ last_name: "",
+ email: "",
+ company_name: "",
+ password: "",
+ is_telemetry_enabled: true,
+};
+
+export const InstanceSetupForm: FC = (props) => {
+ const {} = props;
+ // search params
+ const searchParams = useSearchParams();
+ const firstNameParam = searchParams.get("first_name") || undefined;
+ const lastNameParam = searchParams.get("last_name") || undefined;
+ const companyParam = searchParams.get("company") || undefined;
+ const emailParam = searchParams.get("email") || undefined;
+ const isTelemetryEnabledParam = (searchParams.get("is_telemetry_enabled") === "True" ? true : false) || true;
+ const errorCode = searchParams.get("error_code") || undefined;
+ const errorMessage = searchParams.get("error_message") || undefined;
+ // state
+ const [showPassword, setShowPassword] = useState({
+ password: false,
+ retypePassword: false,
+ });
+ const [csrfToken, setCsrfToken] = useState(undefined);
+ const [formData, setFormData] = useState(defaultFromData);
+ const [isPasswordInputFocused, setIsPasswordInputFocused] = useState(false);
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ const [isRetryPasswordInputFocused, setIsRetryPasswordInputFocused] = useState(false);
+
+ const handleShowPassword = (key: keyof typeof showPassword) =>
+ setShowPassword((prev) => ({ ...prev, [key]: !prev[key] }));
+
+ const handleFormChange = (key: keyof TFormData, value: string | boolean) =>
+ setFormData((prev) => ({ ...prev, [key]: value }));
+
+ useEffect(() => {
+ if (csrfToken === undefined)
+ authService.requestCSRFToken().then((data) => data?.csrf_token && setCsrfToken(data.csrf_token));
+ }, [csrfToken]);
+
+ useEffect(() => {
+ if (firstNameParam) setFormData((prev) => ({ ...prev, first_name: firstNameParam }));
+ if (lastNameParam) setFormData((prev) => ({ ...prev, last_name: lastNameParam }));
+ if (companyParam) setFormData((prev) => ({ ...prev, company_name: companyParam }));
+ if (emailParam) setFormData((prev) => ({ ...prev, email: emailParam }));
+ if (isTelemetryEnabledParam) setFormData((prev) => ({ ...prev, is_telemetry_enabled: isTelemetryEnabledParam }));
+ }, [firstNameParam, lastNameParam, companyParam, emailParam, isTelemetryEnabledParam]);
+
+ // derived values
+ const errorData: TError = useMemo(() => {
+ if (errorCode && errorMessage) {
+ switch (errorCode) {
+ case EErrorCodes.INSTANCE_NOT_CONFIGURED:
+ return { type: EErrorCodes.INSTANCE_NOT_CONFIGURED, message: errorMessage };
+ case EErrorCodes.ADMIN_ALREADY_EXIST:
+ return { type: EErrorCodes.ADMIN_ALREADY_EXIST, message: errorMessage };
+ case EErrorCodes.REQUIRED_EMAIL_PASSWORD_FIRST_NAME:
+ return { type: EErrorCodes.REQUIRED_EMAIL_PASSWORD_FIRST_NAME, message: errorMessage };
+ case EErrorCodes.INVALID_EMAIL:
+ return { type: EErrorCodes.INVALID_EMAIL, message: errorMessage };
+ case EErrorCodes.INVALID_PASSWORD:
+ return { type: EErrorCodes.INVALID_PASSWORD, message: errorMessage };
+ case EErrorCodes.USER_ALREADY_EXISTS:
+ return { type: EErrorCodes.USER_ALREADY_EXISTS, message: errorMessage };
+ default:
+ return { type: undefined, message: undefined };
+ }
+ } else return { type: undefined, message: undefined };
+ }, [errorCode, errorMessage]);
+
+ const isButtonDisabled = useMemo(
+ () =>
+ !isSubmitting &&
+ formData.first_name &&
+ formData.email &&
+ formData.password &&
+ getPasswordStrength(formData.password) === E_PASSWORD_STRENGTH.STRENGTH_VALID &&
+ formData.password === formData.confirm_password
+ ? false
+ : true,
+ [formData.confirm_password, formData.email, formData.first_name, formData.password, isSubmitting]
+ );
+
+ const password = formData?.password ?? "";
+ const confirmPassword = formData?.confirm_password ?? "";
+ const renderPasswordMatchError = !isRetryPasswordInputFocused || confirmPassword.length >= password.length;
+
+ return (
+ <>
+
+
+
+
+ {errorData.type &&
+ errorData?.message &&
+ ![EErrorCodes.INVALID_EMAIL, EErrorCodes.INVALID_PASSWORD].includes(errorData.type) && (
+
+ )}
+
+
+
+ >
+ );
+};
diff --git a/apps/admin/core/components/new-user-popup.tsx b/apps/admin/core/components/new-user-popup.tsx
new file mode 100644
index 00000000..4f0e0236
--- /dev/null
+++ b/apps/admin/core/components/new-user-popup.tsx
@@ -0,0 +1,53 @@
+"use client";
+
+import React from "react";
+import { observer } from "mobx-react";
+import Image from "next/image";
+import Link from "next/link";
+import { useTheme as nextUseTheme } from "next-themes";
+// ui
+import { Button, getButtonStyling } from "@plane/propel/button";
+import { resolveGeneralTheme } from "@plane/utils";
+// hooks
+import { useTheme } from "@/hooks/store";
+// icons
+import TakeoffIconLight from "/public/logos/takeoff-icon-light.svg";
+import TakeoffIconDark from "/public/logos/takeoff-icon-dark.svg";
+
+export const NewUserPopup: React.FC = observer(() => {
+ // hooks
+ const { isNewUserPopup, toggleNewUserPopup } = useTheme();
+ // theme
+ const { resolvedTheme } = nextUseTheme();
+
+ if (!isNewUserPopup) return <>>;
+ return (
+
+
+
+
Create workspace
+
+ Instance setup done! Welcome to Plane instance portal. Start your journey with by creating your first
+ workspace.
+
+
+
+ Create workspace
+
+
+ Close
+
+
+
+
+
+
+
+
+ );
+});
diff --git a/apps/admin/core/components/workspace/list-item.tsx b/apps/admin/core/components/workspace/list-item.tsx
new file mode 100644
index 00000000..85a2b3c6
--- /dev/null
+++ b/apps/admin/core/components/workspace/list-item.tsx
@@ -0,0 +1,81 @@
+import { observer } from "mobx-react";
+import { ExternalLink } from "lucide-react";
+// plane internal packages
+import { WEB_BASE_URL } from "@plane/constants";
+import { Tooltip } from "@plane/propel/tooltip";
+import { getFileURL } from "@plane/utils";
+// hooks
+import { useWorkspace } from "@/hooks/store";
+
+type TWorkspaceListItemProps = {
+ workspaceId: string;
+};
+
+export const WorkspaceListItem = observer(({ workspaceId }: TWorkspaceListItemProps) => {
+ // store hooks
+ const { getWorkspaceById } = useWorkspace();
+ // derived values
+ const workspace = getWorkspaceById(workspaceId);
+
+ if (!workspace) return null;
+ return (
+
+
+
+ {workspace?.logo_url && workspace.logo_url !== "" ? (
+
+ ) : (
+ (workspace?.name?.[0] ?? "...")
+ )}
+
+
+
+
{workspace.name} /
+
+ [{workspace.slug}]
+
+
+ {workspace.owner.email && (
+
+
Owned by:
+ {workspace.owner.email}
+
+ )}
+
+ {workspace.total_projects !== null && (
+
+ Total projects:
+ {workspace.total_projects}
+
+ )}
+ {workspace.total_members !== null && (
+ <>
+ •
+
+ Total members:
+ {workspace.total_members}
+
+ >
+ )}
+
+
+
+
+
+
+
+ );
+});
diff --git a/apps/admin/core/hooks/store/index.ts b/apps/admin/core/hooks/store/index.ts
new file mode 100644
index 00000000..ed178129
--- /dev/null
+++ b/apps/admin/core/hooks/store/index.ts
@@ -0,0 +1,4 @@
+export * from "./use-theme";
+export * from "./use-instance";
+export * from "./use-user";
+export * from "./use-workspace";
diff --git a/apps/admin/core/hooks/store/use-instance.tsx b/apps/admin/core/hooks/store/use-instance.tsx
new file mode 100644
index 00000000..5917df3f
--- /dev/null
+++ b/apps/admin/core/hooks/store/use-instance.tsx
@@ -0,0 +1,10 @@
+import { useContext } from "react";
+// store
+import { StoreContext } from "@/app/(all)/store.provider";
+import type { IInstanceStore } from "@/store/instance.store";
+
+export const useInstance = (): IInstanceStore => {
+ const context = useContext(StoreContext);
+ if (context === undefined) throw new Error("useInstance must be used within StoreProvider");
+ return context.instance;
+};
diff --git a/apps/admin/core/hooks/store/use-theme.tsx b/apps/admin/core/hooks/store/use-theme.tsx
new file mode 100644
index 00000000..d5a1e820
--- /dev/null
+++ b/apps/admin/core/hooks/store/use-theme.tsx
@@ -0,0 +1,10 @@
+import { useContext } from "react";
+// store
+import { StoreContext } from "@/app/(all)/store.provider";
+import type { IThemeStore } from "@/store/theme.store";
+
+export const useTheme = (): IThemeStore => {
+ const context = useContext(StoreContext);
+ if (context === undefined) throw new Error("useTheme must be used within StoreProvider");
+ return context.theme;
+};
diff --git a/apps/admin/core/hooks/store/use-user.tsx b/apps/admin/core/hooks/store/use-user.tsx
new file mode 100644
index 00000000..56b988eb
--- /dev/null
+++ b/apps/admin/core/hooks/store/use-user.tsx
@@ -0,0 +1,10 @@
+import { useContext } from "react";
+// store
+import { StoreContext } from "@/app/(all)/store.provider";
+import type { IUserStore } from "@/store/user.store";
+
+export const useUser = (): IUserStore => {
+ const context = useContext(StoreContext);
+ if (context === undefined) throw new Error("useUser must be used within StoreProvider");
+ return context.user;
+};
diff --git a/apps/admin/core/hooks/store/use-workspace.tsx b/apps/admin/core/hooks/store/use-workspace.tsx
new file mode 100644
index 00000000..c4578c91
--- /dev/null
+++ b/apps/admin/core/hooks/store/use-workspace.tsx
@@ -0,0 +1,10 @@
+import { useContext } from "react";
+// store
+import { StoreContext } from "@/app/(all)/store.provider";
+import type { IWorkspaceStore } from "@/store/workspace.store";
+
+export const useWorkspace = (): IWorkspaceStore => {
+ const context = useContext(StoreContext);
+ if (context === undefined) throw new Error("useWorkspace must be used within StoreProvider");
+ return context.workspace;
+};
diff --git a/apps/admin/core/store/instance.store.ts b/apps/admin/core/store/instance.store.ts
new file mode 100644
index 00000000..ec892292
--- /dev/null
+++ b/apps/admin/core/store/instance.store.ts
@@ -0,0 +1,218 @@
+import { set } from "lodash-es";
+import { observable, action, computed, makeObservable, runInAction } from "mobx";
+// plane internal packages
+import type { TInstanceStatus } from "@plane/constants";
+import { EInstanceStatus } from "@plane/constants";
+import { InstanceService } from "@plane/services";
+import type {
+ IInstance,
+ IInstanceAdmin,
+ IInstanceConfiguration,
+ IFormattedInstanceConfiguration,
+ IInstanceInfo,
+ IInstanceConfig,
+} from "@plane/types";
+// root store
+import type { CoreRootStore } from "@/store/root.store";
+
+export interface IInstanceStore {
+ // issues
+ isLoading: boolean;
+ error: any;
+ instanceStatus: TInstanceStatus | undefined;
+ instance: IInstance | undefined;
+ config: IInstanceConfig | undefined;
+ instanceAdmins: IInstanceAdmin[] | undefined;
+ instanceConfigurations: IInstanceConfiguration[] | undefined;
+ // computed
+ formattedConfig: IFormattedInstanceConfiguration | undefined;
+ // action
+ hydrate: (data: IInstanceInfo) => void;
+ fetchInstanceInfo: () => Promise;
+ updateInstanceInfo: (data: Partial) => Promise;
+ fetchInstanceAdmins: () => Promise;
+ fetchInstanceConfigurations: () => Promise;
+ updateInstanceConfigurations: (data: Partial) => Promise;
+ disableEmail: () => Promise;
+}
+
+export class InstanceStore implements IInstanceStore {
+ isLoading: boolean = true;
+ error: any = undefined;
+ instanceStatus: TInstanceStatus | undefined = undefined;
+ instance: IInstance | undefined = undefined;
+ config: IInstanceConfig | undefined = undefined;
+ instanceAdmins: IInstanceAdmin[] | undefined = undefined;
+ instanceConfigurations: IInstanceConfiguration[] | undefined = undefined;
+ // service
+ instanceService;
+
+ constructor(private store: CoreRootStore) {
+ makeObservable(this, {
+ // observable
+ isLoading: observable.ref,
+ error: observable.ref,
+ instanceStatus: observable,
+ instance: observable,
+ instanceAdmins: observable,
+ instanceConfigurations: observable,
+ // computed
+ formattedConfig: computed,
+ // actions
+ hydrate: action,
+ fetchInstanceInfo: action,
+ fetchInstanceAdmins: action,
+ updateInstanceInfo: action,
+ fetchInstanceConfigurations: action,
+ updateInstanceConfigurations: action,
+ });
+
+ this.instanceService = new InstanceService();
+ }
+
+ hydrate = (data: IInstanceInfo) => {
+ if (data) {
+ this.instance = data.instance;
+ this.config = data.config;
+ }
+ };
+
+ /**
+ * computed value for instance configurations data for forms.
+ * @returns configurations in the form of {key, value} pair.
+ */
+ get formattedConfig() {
+ if (!this.instanceConfigurations) return undefined;
+ return this.instanceConfigurations?.reduce((formData: IFormattedInstanceConfiguration, config) => {
+ formData[config.key] = config.value;
+ return formData;
+ }, {} as IFormattedInstanceConfiguration);
+ }
+
+ /**
+ * @description fetching instance configuration
+ * @returns {IInstance} instance
+ */
+ fetchInstanceInfo = async () => {
+ try {
+ if (this.instance === undefined) this.isLoading = true;
+ this.error = undefined;
+ const instanceInfo = await this.instanceService.info();
+ // handling the new user popup toggle
+ if (this.instance === undefined && !instanceInfo?.instance?.workspaces_exist)
+ this.store.theme.toggleNewUserPopup();
+ runInAction(() => {
+ // console.log("instanceInfo: ", instanceInfo);
+ this.isLoading = false;
+ this.instance = instanceInfo.instance;
+ this.config = instanceInfo.config;
+ });
+ return instanceInfo;
+ } catch (error) {
+ console.error("Error fetching the instance info");
+ this.isLoading = false;
+ this.error = { message: "Failed to fetch the instance info" };
+ this.instanceStatus = {
+ status: EInstanceStatus.ERROR,
+ };
+ throw error;
+ }
+ };
+
+ /**
+ * @description updating instance information
+ * @param {Partial} data
+ * @returns void
+ */
+ updateInstanceInfo = async (data: Partial) => {
+ try {
+ const instanceResponse = await this.instanceService.update(data);
+ if (instanceResponse) {
+ runInAction(() => {
+ if (this.instance) set(this.instance, "instance", instanceResponse);
+ });
+ }
+ return instanceResponse;
+ } catch (error) {
+ console.error("Error updating the instance info");
+ throw error;
+ }
+ };
+
+ /**
+ * @description fetching instance admins
+ * @return {IInstanceAdmin[]} instanceAdmins
+ */
+ fetchInstanceAdmins = async () => {
+ try {
+ const instanceAdmins = await this.instanceService.admins();
+ if (instanceAdmins) runInAction(() => (this.instanceAdmins = instanceAdmins));
+ return instanceAdmins;
+ } catch (error) {
+ console.error("Error fetching the instance admins");
+ throw error;
+ }
+ };
+
+ /**
+ * @description fetching instance configurations
+ * @return {IInstanceAdmin[]} instanceConfigurations
+ */
+ fetchInstanceConfigurations = async () => {
+ try {
+ const instanceConfigurations = await this.instanceService.configurations();
+ if (instanceConfigurations) runInAction(() => (this.instanceConfigurations = instanceConfigurations));
+ return instanceConfigurations;
+ } catch (error) {
+ console.error("Error fetching the instance configurations");
+ throw error;
+ }
+ };
+
+ /**
+ * @description updating instance configurations
+ * @param data
+ */
+ updateInstanceConfigurations = async (data: Partial) => {
+ try {
+ const response = await this.instanceService.updateConfigurations(data);
+ runInAction(() => {
+ this.instanceConfigurations = this.instanceConfigurations?.map((config) => {
+ const item = response.find((item) => item.key === config.key);
+ if (item) return item;
+ return config;
+ });
+ });
+ return response;
+ } catch (error) {
+ console.error("Error updating the instance configurations");
+ throw error;
+ }
+ };
+
+ disableEmail = async () => {
+ const instanceConfigurations = this.instanceConfigurations;
+ try {
+ runInAction(() => {
+ this.instanceConfigurations = this.instanceConfigurations?.map((config) => {
+ if (
+ [
+ "EMAIL_HOST",
+ "EMAIL_PORT",
+ "EMAIL_HOST_USER",
+ "EMAIL_HOST_PASSWORD",
+ "EMAIL_FROM",
+ "ENABLE_SMTP",
+ ].includes(config.key)
+ )
+ return { ...config, value: "" };
+ return config;
+ });
+ });
+ await this.instanceService.disableEmail();
+ } catch (_error) {
+ console.error("Error disabling the email");
+ this.instanceConfigurations = instanceConfigurations;
+ }
+ };
+}
diff --git a/apps/admin/core/store/root.store.ts b/apps/admin/core/store/root.store.ts
new file mode 100644
index 00000000..68d11885
--- /dev/null
+++ b/apps/admin/core/store/root.store.ts
@@ -0,0 +1,41 @@
+import { enableStaticRendering } from "mobx-react";
+// stores
+import type { IInstanceStore } from "./instance.store";
+import { InstanceStore } from "./instance.store";
+import type { IThemeStore } from "./theme.store";
+import { ThemeStore } from "./theme.store";
+import type { IUserStore } from "./user.store";
+import { UserStore } from "./user.store";
+import type { IWorkspaceStore } from "./workspace.store";
+import { WorkspaceStore } from "./workspace.store";
+
+enableStaticRendering(typeof window === "undefined");
+
+export abstract class CoreRootStore {
+ theme: IThemeStore;
+ instance: IInstanceStore;
+ user: IUserStore;
+ workspace: IWorkspaceStore;
+
+ constructor() {
+ this.theme = new ThemeStore(this);
+ this.instance = new InstanceStore(this);
+ this.user = new UserStore(this);
+ this.workspace = new WorkspaceStore(this);
+ }
+
+ hydrate(initialData: any) {
+ this.theme.hydrate(initialData.theme);
+ this.instance.hydrate(initialData.instance);
+ this.user.hydrate(initialData.user);
+ this.workspace.hydrate(initialData.workspace);
+ }
+
+ resetOnSignOut() {
+ localStorage.setItem("theme", "system");
+ this.instance = new InstanceStore(this);
+ this.user = new UserStore(this);
+ this.theme = new ThemeStore(this);
+ this.workspace = new WorkspaceStore(this);
+ }
+}
diff --git a/apps/admin/core/store/theme.store.ts b/apps/admin/core/store/theme.store.ts
new file mode 100644
index 00000000..4512facd
--- /dev/null
+++ b/apps/admin/core/store/theme.store.ts
@@ -0,0 +1,68 @@
+import { action, observable, makeObservable } from "mobx";
+// root store
+import type { CoreRootStore } from "@/store/root.store";
+
+type TTheme = "dark" | "light";
+export interface IThemeStore {
+ // observables
+ isNewUserPopup: boolean;
+ theme: string | undefined;
+ isSidebarCollapsed: boolean | undefined;
+ // actions
+ hydrate: (data: any) => void;
+ toggleNewUserPopup: () => void;
+ toggleSidebar: (collapsed: boolean) => void;
+ setTheme: (currentTheme: TTheme) => void;
+}
+
+export class ThemeStore implements IThemeStore {
+ // observables
+ isNewUserPopup: boolean = false;
+ isSidebarCollapsed: boolean | undefined = undefined;
+ theme: string | undefined = undefined;
+
+ constructor(private store: CoreRootStore) {
+ makeObservable(this, {
+ // observables
+ isNewUserPopup: observable.ref,
+ isSidebarCollapsed: observable.ref,
+ theme: observable.ref,
+ // action
+ toggleNewUserPopup: action,
+ toggleSidebar: action,
+ setTheme: action,
+ });
+ }
+
+ hydrate = (data: any) => {
+ if (data) this.theme = data;
+ };
+
+ /**
+ * @description Toggle the new user popup modal
+ */
+ toggleNewUserPopup = () => (this.isNewUserPopup = !this.isNewUserPopup);
+
+ /**
+ * @description Toggle the sidebar collapsed state
+ * @param isCollapsed
+ */
+ toggleSidebar = (isCollapsed: boolean) => {
+ if (isCollapsed === undefined) this.isSidebarCollapsed = !this.isSidebarCollapsed;
+ else this.isSidebarCollapsed = isCollapsed;
+ localStorage.setItem("god_mode_sidebar_collapsed", isCollapsed.toString());
+ };
+
+ /**
+ * @description Sets the user theme and applies it to the platform
+ * @param currentTheme
+ */
+ setTheme = async (currentTheme: TTheme) => {
+ try {
+ localStorage.setItem("theme", currentTheme);
+ this.theme = currentTheme;
+ } catch (error) {
+ console.error("setting user theme error", error);
+ }
+ };
+}
diff --git a/apps/admin/core/store/user.store.ts b/apps/admin/core/store/user.store.ts
new file mode 100644
index 00000000..1187355a
--- /dev/null
+++ b/apps/admin/core/store/user.store.ts
@@ -0,0 +1,103 @@
+import { action, observable, runInAction, makeObservable } from "mobx";
+// plane internal packages
+import type { TUserStatus } from "@plane/constants";
+import { EUserStatus } from "@plane/constants";
+import { AuthService, UserService } from "@plane/services";
+import type { IUser } from "@plane/types";
+// root store
+import type { CoreRootStore } from "@/store/root.store";
+
+export interface IUserStore {
+ // observables
+ isLoading: boolean;
+ userStatus: TUserStatus | undefined;
+ isUserLoggedIn: boolean | undefined;
+ currentUser: IUser | undefined;
+ // fetch actions
+ hydrate: (data: any) => void;
+ fetchCurrentUser: () => Promise;
+ reset: () => void;
+ signOut: () => void;
+}
+
+export class UserStore implements IUserStore {
+ // observables
+ isLoading: boolean = true;
+ userStatus: TUserStatus | undefined = undefined;
+ isUserLoggedIn: boolean | undefined = undefined;
+ currentUser: IUser | undefined = undefined;
+ // services
+ userService;
+ authService;
+
+ constructor(private store: CoreRootStore) {
+ makeObservable(this, {
+ // observables
+ isLoading: observable.ref,
+ userStatus: observable,
+ isUserLoggedIn: observable.ref,
+ currentUser: observable,
+ // action
+ fetchCurrentUser: action,
+ reset: action,
+ signOut: action,
+ });
+ this.userService = new UserService();
+ this.authService = new AuthService();
+ }
+
+ hydrate = (data: any) => {
+ if (data) this.currentUser = data;
+ };
+
+ /**
+ * @description Fetches the current user
+ * @returns Promise
+ */
+ fetchCurrentUser = async () => {
+ try {
+ if (this.currentUser === undefined) this.isLoading = true;
+ const currentUser = await this.userService.adminDetails();
+ if (currentUser) {
+ await this.store.instance.fetchInstanceAdmins();
+ runInAction(() => {
+ this.isUserLoggedIn = true;
+ this.currentUser = currentUser;
+ this.isLoading = false;
+ });
+ } else {
+ runInAction(() => {
+ this.isUserLoggedIn = false;
+ this.currentUser = undefined;
+ this.isLoading = false;
+ });
+ }
+ return currentUser;
+ } catch (error: any) {
+ this.isLoading = false;
+ this.isUserLoggedIn = false;
+ if (error.status === 403)
+ this.userStatus = {
+ status: EUserStatus.AUTHENTICATION_NOT_DONE,
+ message: error?.message || "",
+ };
+ else
+ this.userStatus = {
+ status: EUserStatus.ERROR,
+ message: error?.message || "",
+ };
+ throw error;
+ }
+ };
+
+ reset = async () => {
+ this.isUserLoggedIn = false;
+ this.currentUser = undefined;
+ this.isLoading = false;
+ this.userStatus = undefined;
+ };
+
+ signOut = async () => {
+ this.store.resetOnSignOut();
+ };
+}
diff --git a/apps/admin/core/store/workspace.store.ts b/apps/admin/core/store/workspace.store.ts
new file mode 100644
index 00000000..f9203ed4
--- /dev/null
+++ b/apps/admin/core/store/workspace.store.ts
@@ -0,0 +1,150 @@
+import { set } from "lodash-es";
+import { action, observable, runInAction, makeObservable, computed } from "mobx";
+// plane imports
+import { InstanceWorkspaceService } from "@plane/services";
+import type { IWorkspace, TLoader, TPaginationInfo } from "@plane/types";
+// root store
+import type { CoreRootStore } from "@/store/root.store";
+
+export interface IWorkspaceStore {
+ // observables
+ loader: TLoader;
+ workspaces: Record;
+ paginationInfo: TPaginationInfo | undefined;
+ // computed
+ workspaceIds: string[];
+ // helper actions
+ hydrate: (data: Record) => void;
+ getWorkspaceById: (workspaceId: string) => IWorkspace | undefined;
+ // fetch actions
+ fetchWorkspaces: () => Promise;
+ fetchNextWorkspaces: () => Promise;
+ // curd actions
+ createWorkspace: (data: IWorkspace) => Promise;
+}
+
+export class WorkspaceStore implements IWorkspaceStore {
+ // observables
+ loader: TLoader = "init-loader";
+ workspaces: Record = {};
+ paginationInfo: TPaginationInfo | undefined = undefined;
+ // services
+ instanceWorkspaceService;
+
+ constructor(private store: CoreRootStore) {
+ makeObservable(this, {
+ // observables
+ loader: observable,
+ workspaces: observable,
+ paginationInfo: observable,
+ // computed
+ workspaceIds: computed,
+ // helper actions
+ hydrate: action,
+ getWorkspaceById: action,
+ // fetch actions
+ fetchWorkspaces: action,
+ fetchNextWorkspaces: action,
+ // curd actions
+ createWorkspace: action,
+ });
+ this.instanceWorkspaceService = new InstanceWorkspaceService();
+ }
+
+ // computed
+ get workspaceIds() {
+ return Object.keys(this.workspaces);
+ }
+
+ // helper actions
+ /**
+ * @description Hydrates the workspaces
+ * @param data - Record
+ */
+ hydrate = (data: Record) => {
+ if (data) this.workspaces = data;
+ };
+
+ /**
+ * @description Gets a workspace by id
+ * @param workspaceId - string
+ * @returns IWorkspace | undefined
+ */
+ getWorkspaceById = (workspaceId: string) => this.workspaces[workspaceId];
+
+ // fetch actions
+ /**
+ * @description Fetches all workspaces
+ * @returns Promise<>
+ */
+ fetchWorkspaces = async (): Promise => {
+ try {
+ if (this.workspaceIds.length > 0) {
+ this.loader = "mutation";
+ } else {
+ this.loader = "init-loader";
+ }
+ const paginatedWorkspaceData = await this.instanceWorkspaceService.list();
+ runInAction(() => {
+ const { results, ...paginationInfo } = paginatedWorkspaceData;
+ results.forEach((workspace: IWorkspace) => {
+ set(this.workspaces, [workspace.id], workspace);
+ });
+ set(this, "paginationInfo", paginationInfo);
+ });
+ return paginatedWorkspaceData.results;
+ } catch (error) {
+ console.error("Error fetching workspaces", error);
+ throw error;
+ } finally {
+ this.loader = "loaded";
+ }
+ };
+
+ /**
+ * @description Fetches the next page of workspaces
+ * @returns Promise
+ */
+ fetchNextWorkspaces = async (): Promise => {
+ if (!this.paginationInfo || this.paginationInfo.next_page_results === false) return [];
+ try {
+ this.loader = "pagination";
+ const paginatedWorkspaceData = await this.instanceWorkspaceService.list(this.paginationInfo.next_cursor);
+ runInAction(() => {
+ const { results, ...paginationInfo } = paginatedWorkspaceData;
+ results.forEach((workspace: IWorkspace) => {
+ set(this.workspaces, [workspace.id], workspace);
+ });
+ set(this, "paginationInfo", paginationInfo);
+ });
+ return paginatedWorkspaceData.results;
+ } catch (error) {
+ console.error("Error fetching next workspaces", error);
+ throw error;
+ } finally {
+ this.loader = "loaded";
+ }
+ };
+
+ // curd actions
+ /**
+ * @description Creates a new workspace
+ * @param data - IWorkspace
+ * @returns Promise
+ */
+ createWorkspace = async (data: IWorkspace): Promise => {
+ try {
+ this.loader = "mutation";
+ const workspace = await this.instanceWorkspaceService.create(data);
+ runInAction(() => {
+ set(this.workspaces, [workspace.id], workspace);
+ });
+ return workspace;
+ } catch (error) {
+ console.error("Error creating workspace", error);
+ throw error;
+ } finally {
+ this.loader = "loaded";
+ }
+ };
+}
diff --git a/apps/admin/ee/components/authentication/authentication-modes.tsx b/apps/admin/ee/components/authentication/authentication-modes.tsx
new file mode 100644
index 00000000..4e3b05a5
--- /dev/null
+++ b/apps/admin/ee/components/authentication/authentication-modes.tsx
@@ -0,0 +1 @@
+export * from "ce/components/authentication/authentication-modes";
diff --git a/apps/admin/ee/components/authentication/index.ts b/apps/admin/ee/components/authentication/index.ts
new file mode 100644
index 00000000..d2aa7485
--- /dev/null
+++ b/apps/admin/ee/components/authentication/index.ts
@@ -0,0 +1 @@
+export * from "./authentication-modes";
diff --git a/apps/admin/ee/components/common/index.ts b/apps/admin/ee/components/common/index.ts
new file mode 100644
index 00000000..60441ee2
--- /dev/null
+++ b/apps/admin/ee/components/common/index.ts
@@ -0,0 +1 @@
+export * from "ce/components/common";
diff --git a/apps/admin/ee/store/root.store.ts b/apps/admin/ee/store/root.store.ts
new file mode 100644
index 00000000..c514c4c2
--- /dev/null
+++ b/apps/admin/ee/store/root.store.ts
@@ -0,0 +1 @@
+export * from "ce/store/root.store";
diff --git a/apps/admin/next-env.d.ts b/apps/admin/next-env.d.ts
new file mode 100644
index 00000000..40c3d680
--- /dev/null
+++ b/apps/admin/next-env.d.ts
@@ -0,0 +1,5 @@
+///
+///
+
+// NOTE: This file should not be edited
+// see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information.
diff --git a/apps/admin/next.config.js b/apps/admin/next.config.js
new file mode 100644
index 00000000..c848e0b9
--- /dev/null
+++ b/apps/admin/next.config.js
@@ -0,0 +1,29 @@
+/** @type {import('next').NextConfig} */
+
+const nextConfig = {
+ trailingSlash: true,
+ reactStrictMode: false,
+ swcMinify: true,
+ output: "standalone",
+ images: {
+ unoptimized: true,
+ },
+ basePath: process.env.NEXT_PUBLIC_ADMIN_BASE_PATH || "",
+ experimental: {
+ optimizePackageImports: [
+ "@plane/constants",
+ "@plane/editor",
+ "@plane/hooks",
+ "@plane/i18n",
+ "@plane/logger",
+ "@plane/propel",
+ "@plane/services",
+ "@plane/shared-state",
+ "@plane/types",
+ "@plane/ui",
+ "@plane/utils",
+ ],
+ },
+};
+
+module.exports = nextConfig;
diff --git a/apps/admin/package.json b/apps/admin/package.json
new file mode 100644
index 00000000..dfa57a7c
--- /dev/null
+++ b/apps/admin/package.json
@@ -0,0 +1,54 @@
+{
+ "name": "admin",
+ "description": "Admin UI for Plane",
+ "version": "1.1.0",
+ "license": "AGPL-3.0",
+ "private": true,
+ "scripts": {
+ "dev": "next dev --port 3001",
+ "build": "next build",
+ "preview": "next build && next start",
+ "start": "next start",
+ "clean": "rm -rf .turbo && rm -rf .next && rm -rf node_modules && rm -rf dist",
+ "check:lint": "eslint . --max-warnings 19",
+ "check:types": "tsc --noEmit",
+ "check:format": "prettier --check \"**/*.{ts,tsx,md,json,css,scss}\"",
+ "fix:lint": "eslint . --fix",
+ "fix:format": "prettier --write \"**/*.{ts,tsx,md,json,css,scss}\""
+ },
+ "dependencies": {
+ "@headlessui/react": "^1.7.19",
+ "@plane/constants": "workspace:*",
+ "@plane/hooks": "workspace:*",
+ "@plane/propel": "workspace:*",
+ "@plane/services": "workspace:*",
+ "@plane/types": "workspace:*",
+ "@plane/ui": "workspace:*",
+ "@plane/utils": "workspace:*",
+ "autoprefixer": "10.4.14",
+ "axios": "catalog:",
+ "lodash-es": "catalog:",
+ "lucide-react": "catalog:",
+ "mobx": "catalog:",
+ "mobx-react": "catalog:",
+ "next": "catalog:",
+ "next-themes": "^0.2.1",
+ "postcss": "^8.4.49",
+ "react": "catalog:",
+ "react-dom": "catalog:",
+ "react-hook-form": "7.51.5",
+ "sharp": "catalog:",
+ "swr": "catalog:",
+ "uuid": "catalog:"
+ },
+ "devDependencies": {
+ "@plane/eslint-config": "workspace:*",
+ "@plane/tailwind-config": "workspace:*",
+ "@plane/typescript-config": "workspace:*",
+ "@types/lodash-es": "catalog:",
+ "@types/node": "catalog:",
+ "@types/react": "catalog:",
+ "@types/react-dom": "catalog:",
+ "typescript": "catalog:"
+ }
+}
diff --git a/apps/admin/postcss.config.js b/apps/admin/postcss.config.js
new file mode 100644
index 00000000..9b1e55fc
--- /dev/null
+++ b/apps/admin/postcss.config.js
@@ -0,0 +1,2 @@
+// eslint-disable-next-line @typescript-eslint/no-require-imports
+module.exports = require("@plane/tailwind-config/postcss.config.js");
diff --git a/apps/admin/public/auth/background-pattern-dark.svg b/apps/admin/public/auth/background-pattern-dark.svg
new file mode 100644
index 00000000..c258cbab
--- /dev/null
+++ b/apps/admin/public/auth/background-pattern-dark.svg
@@ -0,0 +1,68 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/admin/public/auth/background-pattern.svg b/apps/admin/public/auth/background-pattern.svg
new file mode 100644
index 00000000..5fcbeec2
--- /dev/null
+++ b/apps/admin/public/auth/background-pattern.svg
@@ -0,0 +1,68 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/admin/public/favicon/android-chrome-192x192.png b/apps/admin/public/favicon/android-chrome-192x192.png
new file mode 100644
index 00000000..4a005e54
Binary files /dev/null and b/apps/admin/public/favicon/android-chrome-192x192.png differ
diff --git a/apps/admin/public/favicon/android-chrome-512x512.png b/apps/admin/public/favicon/android-chrome-512x512.png
new file mode 100644
index 00000000..27fafe82
Binary files /dev/null and b/apps/admin/public/favicon/android-chrome-512x512.png differ
diff --git a/apps/admin/public/favicon/apple-touch-icon.png b/apps/admin/public/favicon/apple-touch-icon.png
new file mode 100644
index 00000000..a6312678
Binary files /dev/null and b/apps/admin/public/favicon/apple-touch-icon.png differ
diff --git a/apps/admin/public/favicon/favicon-16x16.png b/apps/admin/public/favicon/favicon-16x16.png
new file mode 100644
index 00000000..af59ef01
Binary files /dev/null and b/apps/admin/public/favicon/favicon-16x16.png differ
diff --git a/apps/admin/public/favicon/favicon-32x32.png b/apps/admin/public/favicon/favicon-32x32.png
new file mode 100644
index 00000000..16a1271a
Binary files /dev/null and b/apps/admin/public/favicon/favicon-32x32.png differ
diff --git a/apps/admin/public/favicon/favicon.ico b/apps/admin/public/favicon/favicon.ico
new file mode 100644
index 00000000..613b1a31
Binary files /dev/null and b/apps/admin/public/favicon/favicon.ico differ
diff --git a/apps/admin/public/favicon/site.webmanifest b/apps/admin/public/favicon/site.webmanifest
new file mode 100644
index 00000000..1d410578
--- /dev/null
+++ b/apps/admin/public/favicon/site.webmanifest
@@ -0,0 +1,11 @@
+{
+ "name": "",
+ "short_name": "",
+ "icons": [
+ { "src": "/favicon/android-chrome-192x192.png", "sizes": "192x192", "type": "image/png" },
+ { "src": "/favicon/android-chrome-512x512.png", "sizes": "512x512", "type": "image/png" }
+ ],
+ "theme_color": "#ffffff",
+ "background_color": "#ffffff",
+ "display": "standalone"
+}
diff --git a/apps/admin/public/images/logo-spinner-dark.gif b/apps/admin/public/images/logo-spinner-dark.gif
new file mode 100644
index 00000000..8bd08325
Binary files /dev/null and b/apps/admin/public/images/logo-spinner-dark.gif differ
diff --git a/apps/admin/public/images/logo-spinner-light.gif b/apps/admin/public/images/logo-spinner-light.gif
new file mode 100644
index 00000000..8b571031
Binary files /dev/null and b/apps/admin/public/images/logo-spinner-light.gif differ
diff --git a/apps/admin/public/images/plane-takeoff.png b/apps/admin/public/images/plane-takeoff.png
new file mode 100644
index 00000000..417ff829
Binary files /dev/null and b/apps/admin/public/images/plane-takeoff.png differ
diff --git a/apps/admin/public/instance/instance-failure-dark.svg b/apps/admin/public/instance/instance-failure-dark.svg
new file mode 100644
index 00000000..58d69170
--- /dev/null
+++ b/apps/admin/public/instance/instance-failure-dark.svg
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/admin/public/instance/instance-failure.svg b/apps/admin/public/instance/instance-failure.svg
new file mode 100644
index 00000000..a5986228
--- /dev/null
+++ b/apps/admin/public/instance/instance-failure.svg
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/admin/public/instance/plane-takeoff.png b/apps/admin/public/instance/plane-takeoff.png
new file mode 100644
index 00000000..417ff829
Binary files /dev/null and b/apps/admin/public/instance/plane-takeoff.png differ
diff --git a/apps/admin/public/logos/github-black.png b/apps/admin/public/logos/github-black.png
new file mode 100644
index 00000000..7a7a8247
Binary files /dev/null and b/apps/admin/public/logos/github-black.png differ
diff --git a/apps/admin/public/logos/github-white.png b/apps/admin/public/logos/github-white.png
new file mode 100644
index 00000000..dbb2b578
Binary files /dev/null and b/apps/admin/public/logos/github-white.png differ
diff --git a/apps/admin/public/logos/gitlab-logo.svg b/apps/admin/public/logos/gitlab-logo.svg
new file mode 100644
index 00000000..dab4d8b7
--- /dev/null
+++ b/apps/admin/public/logos/gitlab-logo.svg
@@ -0,0 +1 @@
+
diff --git a/apps/admin/public/logos/google-logo.svg b/apps/admin/public/logos/google-logo.svg
new file mode 100644
index 00000000..088288fa
--- /dev/null
+++ b/apps/admin/public/logos/google-logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/apps/admin/public/logos/oidc-logo.svg b/apps/admin/public/logos/oidc-logo.svg
new file mode 100644
index 00000000..68bc72d0
--- /dev/null
+++ b/apps/admin/public/logos/oidc-logo.svg
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/admin/public/logos/saml-logo.svg b/apps/admin/public/logos/saml-logo.svg
new file mode 100644
index 00000000..4cbb4f81
--- /dev/null
+++ b/apps/admin/public/logos/saml-logo.svg
@@ -0,0 +1,17 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/admin/public/logos/takeoff-icon-dark.svg b/apps/admin/public/logos/takeoff-icon-dark.svg
new file mode 100644
index 00000000..d3ef1911
--- /dev/null
+++ b/apps/admin/public/logos/takeoff-icon-dark.svg
@@ -0,0 +1,35 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/admin/public/logos/takeoff-icon-light.svg b/apps/admin/public/logos/takeoff-icon-light.svg
new file mode 100644
index 00000000..97cf43fe
--- /dev/null
+++ b/apps/admin/public/logos/takeoff-icon-light.svg
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/apps/admin/public/site.webmanifest.json b/apps/admin/public/site.webmanifest.json
new file mode 100644
index 00000000..6e5e438f
--- /dev/null
+++ b/apps/admin/public/site.webmanifest.json
@@ -0,0 +1,13 @@
+{
+ "name": "Plane God Mode",
+ "short_name": "Plane God Mode",
+ "description": "Plane helps you plan your issues, cycles, and product modules.",
+ "start_url": ".",
+ "display": "standalone",
+ "background_color": "#f9fafb",
+ "theme_color": "#3f76ff",
+ "icons": [
+ { "src": "/favicon/android-chrome-192x192.png", "sizes": "192x192", "type": "image/png" },
+ { "src": "/favicon/android-chrome-512x512.png", "sizes": "512x512", "type": "image/png" }
+ ]
+}
diff --git a/apps/admin/styles/globals.css b/apps/admin/styles/globals.css
new file mode 100644
index 00000000..86a0b851
--- /dev/null
+++ b/apps/admin/styles/globals.css
@@ -0,0 +1,396 @@
+@import "@plane/propel/styles/fonts";
+
+@tailwind base;
+@tailwind components;
+@tailwind utilities;
+
+@layer components {
+ .text-1\.5xl {
+ font-size: 1.375rem;
+ line-height: 1.875rem;
+ }
+
+ .text-2\.5xl {
+ font-size: 1.75rem;
+ line-height: 2.25rem;
+ }
+}
+
+@layer base {
+ html {
+ font-family: "Inter", sans-serif;
+ }
+
+ :root {
+ color-scheme: light !important;
+
+ --color-primary-10: 229, 243, 250;
+ --color-primary-20: 216, 237, 248;
+ --color-primary-30: 199, 229, 244;
+ --color-primary-40: 169, 214, 239;
+ --color-primary-50: 144, 202, 234;
+ --color-primary-60: 109, 186, 227;
+ --color-primary-70: 75, 170, 221;
+ --color-primary-80: 41, 154, 214;
+ --color-primary-90: 34, 129, 180;
+ --color-primary-100: 0, 99, 153;
+ --color-primary-200: 0, 92, 143;
+ --color-primary-300: 0, 86, 133;
+ --color-primary-400: 0, 77, 117;
+ --color-primary-500: 0, 66, 102;
+ --color-primary-600: 0, 53, 82;
+ --color-primary-700: 0, 43, 66;
+ --color-primary-800: 0, 33, 51;
+ --color-primary-900: 0, 23, 36;
+
+ --color-background-100: 255, 255, 255; /* primary bg */
+ --color-background-90: 247, 247, 247; /* secondary bg */
+ --color-background-80: 232, 232, 232; /* tertiary bg */
+
+ --color-text-100: 23, 23, 23; /* primary text */
+ --color-text-200: 58, 58, 58; /* secondary text */
+ --color-text-300: 82, 82, 82; /* tertiary text */
+ --color-text-400: 163, 163, 163; /* placeholder text */
+
+ --color-scrollbar: 163, 163, 163; /* scrollbar thumb */
+
+ --color-border-100: 245, 245, 245; /* subtle border= 1 */
+ --color-border-200: 229, 229, 229; /* subtle border- 2 */
+ --color-border-300: 212, 212, 212; /* strong border- 1 */
+ --color-border-400: 185, 185, 185; /* strong border- 2 */
+
+ --color-shadow-2xs:
+ 0px 0px 1px 0px rgba(23, 23, 23, 0.06), 0px 1px 2px 0px rgba(23, 23, 23, 0.06),
+ 0px 1px 2px 0px rgba(23, 23, 23, 0.14);
+ --color-shadow-xs:
+ 0px 1px 2px 0px rgba(0, 0, 0, 0.16), 0px 2px 4px 0px rgba(16, 24, 40, 0.12),
+ 0px 1px 8px -1px rgba(16, 24, 40, 0.1);
+ --color-shadow-sm:
+ 0px 1px 4px 0px rgba(0, 0, 0, 0.01), 0px 4px 8px 0px rgba(0, 0, 0, 0.02), 0px 1px 12px 0px rgba(0, 0, 0, 0.12);
+ --color-shadow-rg:
+ 0px 3px 6px 0px rgba(0, 0, 0, 0.1), 0px 4px 4px 0px rgba(16, 24, 40, 0.08),
+ 0px 1px 12px 0px rgba(16, 24, 40, 0.04);
+ --color-shadow-md:
+ 0px 4px 8px 0px rgba(0, 0, 0, 0.12), 0px 6px 12px 0px rgba(16, 24, 40, 0.12),
+ 0px 1px 16px 0px rgba(16, 24, 40, 0.12);
+ --color-shadow-lg:
+ 0px 6px 12px 0px rgba(0, 0, 0, 0.12), 0px 8px 16px 0px rgba(0, 0, 0, 0.12),
+ 0px 1px 24px 0px rgba(16, 24, 40, 0.12);
+ --color-shadow-xl:
+ 0px 0px 18px 0px rgba(0, 0, 0, 0.16), 0px 0px 24px 0px rgba(16, 24, 40, 0.16),
+ 0px 0px 52px 0px rgba(16, 24, 40, 0.16);
+ --color-shadow-2xl:
+ 0px 8px 16px 0px rgba(0, 0, 0, 0.12), 0px 12px 24px 0px rgba(16, 24, 40, 0.12),
+ 0px 1px 32px 0px rgba(16, 24, 40, 0.12);
+ --color-shadow-3xl:
+ 0px 12px 24px 0px rgba(0, 0, 0, 0.12), 0px 16px 32px 0px rgba(0, 0, 0, 0.12),
+ 0px 1px 48px 0px rgba(16, 24, 40, 0.12);
+ --color-shadow-4xl: 0px 8px 40px 0px rgba(0, 0, 61, 0.05), 0px 12px 32px -16px rgba(0, 0, 0, 0.05);
+
+ --color-sidebar-background-100: var(--color-background-100); /* primary sidebar bg */
+ --color-sidebar-background-90: var(--color-background-90); /* secondary sidebar bg */
+ --color-sidebar-background-80: var(--color-background-80); /* tertiary sidebar bg */
+
+ --color-sidebar-text-100: var(--color-text-100); /* primary sidebar text */
+ --color-sidebar-text-200: var(--color-text-200); /* secondary sidebar text */
+ --color-sidebar-text-300: var(--color-text-300); /* tertiary sidebar text */
+ --color-sidebar-text-400: var(--color-text-400); /* sidebar placeholder text */
+
+ --color-sidebar-border-100: var(--color-border-100); /* subtle sidebar border= 1 */
+ --color-sidebar-border-200: var(--color-border-100); /* subtle sidebar border- 2 */
+ --color-sidebar-border-300: var(--color-border-100); /* strong sidebar border- 1 */
+ --color-sidebar-border-400: var(--color-border-100); /* strong sidebar border- 2 */
+
+ --color-sidebar-shadow-2xs: var(--color-shadow-2xs);
+ --color-sidebar-shadow-xs: var(--color-shadow-xs);
+ --color-sidebar-shadow-sm: var(--color-shadow-sm);
+ --color-sidebar-shadow-rg: var(--color-shadow-rg);
+ --color-sidebar-shadow-md: var(--color-shadow-md);
+ --color-sidebar-shadow-lg: var(--color-shadow-lg);
+ --color-sidebar-shadow-xl: var(--color-shadow-xl);
+ --color-sidebar-shadow-2xl: var(--color-shadow-2xl);
+ --color-sidebar-shadow-3xl: var(--color-shadow-3xl);
+ --color-sidebar-shadow-4xl: var(--color-shadow-4xl);
+
+ /* toast theme */
+ --color-toast-success-text: 178, 221, 181;
+ --color-toast-error-text: 206, 44, 49;
+ --color-toast-warning-text: 255, 186, 24;
+ --color-toast-info-text: 141, 164, 239;
+ --color-toast-loading-text: 255, 255, 255;
+ --color-toast-secondary-text: 185, 187, 198;
+ --color-toast-tertiary-text: 139, 141, 152;
+
+ --color-toast-success-background: 46, 46, 46;
+ --color-toast-error-background: 46, 46, 46;
+ --color-toast-warning-background: 46, 46, 46;
+ --color-toast-info-background: 46, 46, 46;
+ --color-toast-loading-background: 46, 46, 46;
+
+ --color-toast-success-border: 42, 126, 59;
+ --color-toast-error-border: 100, 23, 35;
+ --color-toast-warning-border: 79, 52, 34;
+ --color-toast-info-border: 58, 91, 199;
+ --color-toast-loading-border: 96, 100, 108;
+ }
+
+ [data-theme="light"],
+ [data-theme="light-contrast"] {
+ color-scheme: light !important;
+
+ --color-background-100: 255, 255, 255; /* primary bg */
+ --color-background-90: 247, 247, 247; /* secondary bg */
+ --color-background-80: 232, 232, 232; /* tertiary bg */
+ }
+
+ [data-theme="light"] {
+ --color-text-100: 23, 23, 23; /* primary text */
+ --color-text-200: 58, 58, 58; /* secondary text */
+ --color-text-300: 82, 82, 82; /* tertiary text */
+ --color-text-400: 163, 163, 163; /* placeholder text */
+
+ --color-scrollbar: 163, 163, 163; /* scrollbar thumb */
+
+ --color-border-100: 245, 245, 245; /* subtle border= 1 */
+ --color-border-200: 229, 229, 229; /* subtle border- 2 */
+ --color-border-300: 212, 212, 212; /* strong border- 1 */
+ --color-border-400: 185, 185, 185; /* strong border- 2 */
+
+ /* toast theme */
+ --color-toast-success-text: 62, 155, 79;
+ --color-toast-error-text: 220, 62, 66;
+ --color-toast-warning-text: 255, 186, 24;
+ --color-toast-info-text: 51, 88, 212;
+ --color-toast-loading-text: 28, 32, 36;
+ --color-toast-secondary-text: 128, 131, 141;
+ --color-toast-tertiary-text: 96, 100, 108;
+
+ --color-toast-success-background: 253, 253, 254;
+ --color-toast-error-background: 255, 252, 252;
+ --color-toast-warning-background: 254, 253, 251;
+ --color-toast-info-background: 253, 253, 254;
+ --color-toast-loading-background: 253, 253, 254;
+
+ --color-toast-success-border: 218, 241, 219;
+ --color-toast-error-border: 255, 219, 220;
+ --color-toast-warning-border: 255, 247, 194;
+ --color-toast-info-border: 210, 222, 255;
+ --color-toast-loading-border: 224, 225, 230;
+ }
+
+ [data-theme="light-contrast"] {
+ --color-text-100: 11, 11, 11; /* primary text */
+ --color-text-200: 38, 38, 38; /* secondary text */
+ --color-text-300: 58, 58, 58; /* tertiary text */
+ --color-text-400: 115, 115, 115; /* placeholder text */
+
+ --color-scrollbar: 115, 115, 115; /* scrollbar thumb */
+
+ --color-border-100: 34, 34, 34; /* subtle border= 1 */
+ --color-border-200: 38, 38, 38; /* subtle border- 2 */
+ --color-border-300: 46, 46, 46; /* strong border- 1 */
+ --color-border-400: 58, 58, 58; /* strong border- 2 */
+ }
+
+ [data-theme="dark"],
+ [data-theme="dark-contrast"] {
+ color-scheme: dark !important;
+
+ --color-primary-10: 8, 31, 43;
+ --color-primary-20: 10, 37, 51;
+ --color-primary-30: 13, 49, 69;
+ --color-primary-40: 16, 58, 81;
+ --color-primary-50: 18, 68, 94;
+ --color-primary-60: 23, 86, 120;
+ --color-primary-70: 28, 104, 146;
+ --color-primary-80: 31, 116, 163;
+ --color-primary-90: 34, 129, 180;
+ --color-primary-100: 40, 146, 204;
+ --color-primary-200: 41, 154, 214;
+ --color-primary-300: 75, 170, 221;
+ --color-primary-400: 109, 186, 227;
+ --color-primary-500: 144, 202, 234;
+ --color-primary-600: 169, 214, 239;
+ --color-primary-700: 199, 229, 244;
+ --color-primary-800: 216, 237, 248;
+ --color-primary-900: 229, 243, 250;
+
+ --color-background-100: 25, 25, 25; /* primary bg */
+ --color-background-90: 32, 32, 32; /* secondary bg */
+ --color-background-80: 44, 44, 44; /* tertiary bg */
+
+ --color-shadow-2xs: 0px 0px 1px 0px rgba(0, 0, 0, 0.15), 0px 1px 3px 0px rgba(0, 0, 0, 0.5);
+ --color-shadow-xs: 0px 0px 2px 0px rgba(0, 0, 0, 0.2), 0px 2px 4px 0px rgba(0, 0, 0, 0.5);
+ --color-shadow-sm: 0px 0px 4px 0px rgba(0, 0, 0, 0.2), 0px 2px 6px 0px rgba(0, 0, 0, 0.5);
+ --color-shadow-rg: 0px 0px 6px 0px rgba(0, 0, 0, 0.2), 0px 4px 6px 0px rgba(0, 0, 0, 0.5);
+ --color-shadow-md: 0px 2px 8px 0px rgba(0, 0, 0, 0.2), 0px 4px 8px 0px rgba(0, 0, 0, 0.5);
+ --color-shadow-lg: 0px 4px 12px 0px rgba(0, 0, 0, 0.25), 0px 4px 10px 0px rgba(0, 0, 0, 0.55);
+ --color-shadow-xl: 0px 0px 14px 0px rgba(0, 0, 0, 0.25), 0px 6px 10px 0px rgba(0, 0, 0, 0.55);
+ --color-shadow-2xl: 0px 0px 18px 0px rgba(0, 0, 0, 0.25), 0px 8px 12px 0px rgba(0, 0, 0, 0.6);
+ --color-shadow-3xl: 0px 4px 24px 0px rgba(0, 0, 0, 0.3), 0px 12px 40px 0px rgba(0, 0, 0, 0.65);
+ }
+
+ [data-theme="dark"] {
+ --color-text-100: 229, 229, 229; /* primary text */
+ --color-text-200: 163, 163, 163; /* secondary text */
+ --color-text-300: 115, 115, 115; /* tertiary text */
+ --color-text-400: 82, 82, 82; /* placeholder text */
+
+ --color-scrollbar: 82, 82, 82; /* scrollbar thumb */
+
+ --color-border-100: 34, 34, 34; /* subtle border= 1 */
+ --color-border-200: 38, 38, 38; /* subtle border- 2 */
+ --color-border-300: 46, 46, 46; /* strong border- 1 */
+ --color-border-400: 58, 58, 58; /* strong border- 2 */
+ }
+
+ [data-theme="dark-contrast"] {
+ --color-text-100: 250, 250, 250; /* primary text */
+ --color-text-200: 241, 241, 241; /* secondary text */
+ --color-text-300: 212, 212, 212; /* tertiary text */
+ --color-text-400: 115, 115, 115; /* placeholder text */
+
+ --color-scrollbar: 115, 115, 115; /* scrollbar thumb */
+
+ --color-border-100: 245, 245, 245; /* subtle border= 1 */
+ --color-border-200: 229, 229, 229; /* subtle border- 2 */
+ --color-border-300: 212, 212, 212; /* strong border- 1 */
+ --color-border-400: 185, 185, 185; /* strong border- 2 */
+ }
+
+ [data-theme="light"],
+ [data-theme="dark"],
+ [data-theme="light-contrast"],
+ [data-theme="dark-contrast"] {
+ --color-sidebar-background-100: var(--color-background-100); /* primary sidebar bg */
+ --color-sidebar-background-90: var(--color-background-90); /* secondary sidebar bg */
+ --color-sidebar-background-80: var(--color-background-80); /* tertiary sidebar bg */
+
+ --color-sidebar-text-100: var(--color-text-100); /* primary sidebar text */
+ --color-sidebar-text-200: var(--color-text-200); /* secondary sidebar text */
+ --color-sidebar-text-300: var(--color-text-300); /* tertiary sidebar text */
+ --color-sidebar-text-400: var(--color-text-400); /* sidebar placeholder text */
+
+ --color-sidebar-border-100: var(--color-border-100); /* subtle sidebar border= 1 */
+ --color-sidebar-border-200: var(--color-border-200); /* subtle sidebar border- 2 */
+ --color-sidebar-border-300: var(--color-border-300); /* strong sidebar border- 1 */
+ --color-sidebar-border-400: var(--color-border-400); /* strong sidebar border- 2 */
+ }
+}
+
+* {
+ margin: 0;
+ padding: 0;
+ box-sizing: border-box;
+ -webkit-text-size-adjust: 100%;
+ -ms-text-size-adjust: 100%;
+ font-variant-ligatures: none;
+ -webkit-font-variant-ligatures: none;
+ text-rendering: optimizeLegibility;
+ -moz-osx-font-smoothing: grayscale;
+ -webkit-font-smoothing: antialiased;
+}
+
+body {
+ color: rgba(var(--color-text-100));
+}
+
+/* scrollbar style */
+@-moz-document url-prefix() {
+ * {
+ scrollbar-width: none;
+ }
+ .vertical-scrollbar,
+ .horizontal-scrollbar {
+ scrollbar-width: initial;
+ scrollbar-color: rgba(96, 100, 108, 0.1) transparent;
+ }
+ .vertical-scrollbar:hover,
+ .horizontal-scrollbar:hover {
+ scrollbar-color: rgba(96, 100, 108, 0.25) transparent;
+ }
+ .vertical-scrollbar:active,
+ .horizontal-scrollbar:active {
+ scrollbar-color: rgba(96, 100, 108, 0.7) transparent;
+ }
+}
+
+.vertical-scrollbar {
+ overflow-y: auto;
+}
+.horizontal-scrollbar {
+ overflow-x: auto;
+}
+.vertical-scrollbar::-webkit-scrollbar,
+.horizontal-scrollbar::-webkit-scrollbar {
+ display: block;
+}
+.vertical-scrollbar::-webkit-scrollbar-track,
+.horizontal-scrollbar::-webkit-scrollbar-track {
+ background-color: transparent;
+ border-radius: 9999px;
+}
+.vertical-scrollbar::-webkit-scrollbar-thumb,
+.horizontal-scrollbar::-webkit-scrollbar-thumb {
+ background-clip: padding-box;
+ background-color: rgba(96, 100, 108, 0.1);
+ border-radius: 9999px;
+}
+.vertical-scrollbar:hover::-webkit-scrollbar-thumb,
+.horizontal-scrollbar:hover::-webkit-scrollbar-thumb {
+ background-color: rgba(96, 100, 108, 0.25);
+}
+.vertical-scrollbar::-webkit-scrollbar-thumb:hover,
+.horizontal-scrollbar::-webkit-scrollbar-thumb:hover {
+ background-color: rgba(96, 100, 108, 0.5);
+}
+.vertical-scrollbar::-webkit-scrollbar-thumb:active,
+.horizontal-scrollbar::-webkit-scrollbar-thumb:active {
+ background-color: rgba(96, 100, 108, 0.7);
+}
+.vertical-scrollbar::-webkit-scrollbar-corner,
+.horizontal-scrollbar::-webkit-scrollbar-corner {
+ background-color: transparent;
+}
+.vertical-scrollbar-margin-top-md::-webkit-scrollbar-track {
+ margin-top: 44px;
+}
+
+/* scrollbar sm size */
+.scrollbar-sm::-webkit-scrollbar {
+ height: 12px;
+ width: 12px;
+}
+.scrollbar-sm::-webkit-scrollbar-thumb {
+ border: 3px solid rgba(0, 0, 0, 0);
+}
+/* scrollbar md size */
+.scrollbar-md::-webkit-scrollbar {
+ height: 14px;
+ width: 14px;
+}
+.scrollbar-md::-webkit-scrollbar-thumb {
+ border: 3px solid rgba(0, 0, 0, 0);
+}
+/* scrollbar lg size */
+
+.scrollbar-lg::-webkit-scrollbar {
+ height: 16px;
+ width: 16px;
+}
+.scrollbar-lg::-webkit-scrollbar-thumb {
+ border: 4px solid rgba(0, 0, 0, 0);
+}
+/* end scrollbar style */
+
+/* progress bar */
+.progress-bar {
+ fill: currentColor;
+ color: rgba(var(--color-sidebar-background-100));
+}
+
+::-webkit-input-placeholder,
+::placeholder,
+:-ms-input-placeholder {
+ color: rgb(var(--color-text-400));
+}
diff --git a/apps/admin/tailwind.config.js b/apps/admin/tailwind.config.js
new file mode 100644
index 00000000..a05d9dcd
--- /dev/null
+++ b/apps/admin/tailwind.config.js
@@ -0,0 +1,6 @@
+/* eslint-disable @typescript-eslint/no-require-imports */
+const sharedConfig = require("@plane/tailwind-config/tailwind.config.js");
+
+module.exports = {
+ presets: [sharedConfig],
+};
diff --git a/apps/admin/tsconfig.json b/apps/admin/tsconfig.json
new file mode 100644
index 00000000..d85abf2c
--- /dev/null
+++ b/apps/admin/tsconfig.json
@@ -0,0 +1,21 @@
+{
+ "extends": "@plane/typescript-config/nextjs.json",
+ "compilerOptions": {
+ "plugins": [
+ {
+ "name": "next"
+ }
+ ],
+ "baseUrl": ".",
+ "paths": {
+ "@/app/*": ["app/*"],
+ "@/*": ["core/*"],
+ "@/public/*": ["public/*"],
+ "@/plane-admin/*": ["ce/*"],
+ "@/styles/*": ["styles/*"]
+ },
+ "strictNullChecks": true
+ },
+ "include": ["next-env.d.ts", "next.config.js", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
+ "exclude": ["node_modules"]
+}
diff --git a/apps/api/.coveragerc b/apps/api/.coveragerc
new file mode 100644
index 00000000..bd829d14
--- /dev/null
+++ b/apps/api/.coveragerc
@@ -0,0 +1,25 @@
+[run]
+source = plane
+omit =
+ */tests/*
+ */migrations/*
+ */settings/*
+ */wsgi.py
+ */asgi.py
+ */urls.py
+ manage.py
+ */admin.py
+ */apps.py
+
+[report]
+exclude_lines =
+ pragma: no cover
+ def __repr__
+ if self.debug:
+ raise NotImplementedError
+ if __name__ == .__main__.
+ pass
+ raise ImportError
+
+[html]
+directory = htmlcov
\ No newline at end of file
diff --git a/apps/api/.env.example b/apps/api/.env.example
new file mode 100644
index 00000000..f158e3d7
--- /dev/null
+++ b/apps/api/.env.example
@@ -0,0 +1,72 @@
+# Backend
+# Debug value for api server use it as 0 for production use
+DEBUG=0
+CORS_ALLOWED_ORIGINS="http://localhost:3000,http://localhost:3001,http://localhost:3002,http://localhost:3100"
+
+# Database Settings
+POSTGRES_USER="plane"
+POSTGRES_PASSWORD="plane"
+POSTGRES_HOST="plane-db"
+POSTGRES_DB="plane"
+POSTGRES_PORT=5432
+DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
+
+# Redis Settings
+REDIS_HOST="plane-redis"
+REDIS_PORT="6379"
+REDIS_URL="redis://${REDIS_HOST}:6379/"
+
+# RabbitMQ Settings
+RABBITMQ_HOST="plane-mq"
+RABBITMQ_PORT="5672"
+RABBITMQ_USER="plane"
+RABBITMQ_PASSWORD="plane"
+RABBITMQ_VHOST="plane"
+
+# AWS Settings
+AWS_REGION=""
+AWS_ACCESS_KEY_ID="access-key"
+AWS_SECRET_ACCESS_KEY="secret-key"
+AWS_S3_ENDPOINT_URL="http://localhost:9000"
+# Changing this requires change in the proxy config for uploads if using minio setup
+AWS_S3_BUCKET_NAME="uploads"
+# Maximum file upload limit
+FILE_SIZE_LIMIT=5242880
+
+# Settings related to Docker
+DOCKERIZED=1 # deprecated
+
+# set to 1 If using the pre-configured minio setup
+USE_MINIO=0
+
+
+
+# Email redirections and minio domain settings
+WEB_URL="http://localhost:8000"
+
+# Gunicorn Workers
+GUNICORN_WORKERS=2
+
+# Base URLs
+ADMIN_BASE_URL="http://localhost:3001"
+ADMIN_BASE_PATH="/god-mode"
+
+SPACE_BASE_URL="http://localhost:3002"
+SPACE_BASE_PATH="/spaces"
+
+APP_BASE_URL="http://localhost:3000"
+APP_BASE_PATH=""
+
+LIVE_BASE_URL="http://localhost:3100"
+LIVE_BASE_PATH="/live"
+
+LIVE_SERVER_SECRET_KEY="secret-key"
+
+# Hard delete files after days
+HARD_DELETE_AFTER_DAYS=60
+
+# Force HTTPS for handling SSL Termination
+MINIO_ENDPOINT_SSL=0
+
+# API key rate limit
+API_KEY_RATE_LIMIT="60/minute"
diff --git a/apps/api/Dockerfile.api b/apps/api/Dockerfile.api
new file mode 100644
index 00000000..13251481
--- /dev/null
+++ b/apps/api/Dockerfile.api
@@ -0,0 +1,58 @@
+FROM python:3.12.10-alpine
+
+# set environment variables
+ENV PYTHONDONTWRITEBYTECODE=1
+ENV PYTHONUNBUFFERED=1
+ENV PIP_DISABLE_PIP_VERSION_CHECK=1
+ENV INSTANCE_CHANGELOG_URL=https://sites.plane.so/pages/691ef037bcfe416a902e48cb55f59891/
+
+# Update system packages for security
+RUN apk update && apk upgrade
+
+WORKDIR /code
+
+RUN apk add --no-cache --upgrade \
+ "libpq" \
+ "libxslt" \
+ "xmlsec" \
+ "ca-certificates" \
+ "openssl"
+
+COPY requirements.txt ./
+COPY requirements ./requirements
+RUN apk add --no-cache libffi-dev
+RUN apk add --no-cache --virtual .build-deps \
+ "bash~=5.2" \
+ "g++" \
+ "gcc" \
+ "cargo" \
+ "git" \
+ "make" \
+ "postgresql-dev" \
+ "libc-dev" \
+ "linux-headers" \
+ && \
+ pip install -r requirements.txt --compile --no-cache-dir \
+ && \
+ apk del .build-deps \
+ && \
+ rm -rf /var/cache/apk/*
+
+
+# Add in Django deps and generate Django's static files
+COPY manage.py manage.py
+COPY plane plane/
+COPY templates templates/
+COPY package.json package.json
+
+RUN apk --no-cache add "bash~=5.2"
+COPY ./bin ./bin/
+
+RUN mkdir -p /code/plane/logs
+RUN chmod +x ./bin/*
+RUN chmod -R 777 /code
+
+# Expose container port and run entry point script
+EXPOSE 8000
+
+CMD ["./bin/docker-entrypoint-api.sh"]
\ No newline at end of file
diff --git a/apps/api/Dockerfile.dev b/apps/api/Dockerfile.dev
new file mode 100644
index 00000000..3ec8c634
--- /dev/null
+++ b/apps/api/Dockerfile.dev
@@ -0,0 +1,46 @@
+FROM python:3.12.5-alpine AS backend
+
+# set environment variables
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+ENV PIP_DISABLE_PIP_VERSION_CHECK=1
+ENV INSTANCE_CHANGELOG_URL https://sites.plane.so/pages/691ef037bcfe416a902e48cb55f59891/
+
+RUN apk --no-cache add \
+ "bash~=5.2" \
+ "libpq" \
+ "libxslt" \
+ "nodejs-current" \
+ "xmlsec" \
+ "libffi-dev" \
+ "bash~=5.2" \
+ "g++" \
+ "gcc" \
+ "cargo" \
+ "git" \
+ "make" \
+ "postgresql-dev" \
+ "libc-dev" \
+ "linux-headers"
+
+WORKDIR /code
+
+COPY requirements.txt ./requirements.txt
+ADD requirements ./requirements
+
+# Install the local development settings
+RUN pip install -r requirements/local.txt --compile --no-cache-dir
+
+
+COPY . .
+
+RUN mkdir -p /code/plane/logs
+RUN chmod -R +x /code/bin
+RUN chmod -R 777 /code
+
+
+# Expose container port and run entry point script
+EXPOSE 8000
+
+CMD [ "./bin/docker-entrypoint-api-local.sh" ]
+
diff --git a/apps/api/bin/docker-entrypoint-api-local.sh b/apps/api/bin/docker-entrypoint-api-local.sh
new file mode 100755
index 00000000..b5489b46
--- /dev/null
+++ b/apps/api/bin/docker-entrypoint-api-local.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+set -e
+python manage.py wait_for_db
+# Wait for migrations
+python manage.py wait_for_migrations
+
+# Create the default bucket
+#!/bin/bash
+
+# Collect system information
+HOSTNAME=$(hostname)
+MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
+CPU_INFO=$(cat /proc/cpuinfo)
+MEMORY_INFO=$(free -h)
+DISK_INFO=$(df -h)
+
+# Concatenate information and compute SHA-256 hash
+SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
+
+# Export the variables
+export MACHINE_SIGNATURE=$SIGNATURE
+
+# Register instance
+python manage.py register_instance "$MACHINE_SIGNATURE"
+# Load the configuration variable
+python manage.py configure_instance
+
+# Create the default bucket
+python manage.py create_bucket
+
+# Clear Cache before starting to remove stale values
+python manage.py clear_cache
+
+python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local
diff --git a/apps/api/bin/docker-entrypoint-api.sh b/apps/api/bin/docker-entrypoint-api.sh
new file mode 100755
index 00000000..5a1da157
--- /dev/null
+++ b/apps/api/bin/docker-entrypoint-api.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+set -e
+python manage.py wait_for_db
+# Wait for migrations
+python manage.py wait_for_migrations
+
+# Create the default bucket
+#!/bin/bash
+
+# Collect system information
+HOSTNAME=$(hostname)
+MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
+CPU_INFO=$(cat /proc/cpuinfo)
+MEMORY_INFO=$(free -h)
+DISK_INFO=$(df -h)
+
+# Concatenate information and compute SHA-256 hash
+SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
+
+# Export the variables
+export MACHINE_SIGNATURE=$SIGNATURE
+
+# Register instance
+python manage.py register_instance "$MACHINE_SIGNATURE"
+
+# Load the configuration variable
+python manage.py configure_instance
+
+# Create the default bucket
+python manage.py create_bucket
+
+# Clear Cache before starting to remove stale values
+python manage.py clear_cache
+
+exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
diff --git a/apps/api/bin/docker-entrypoint-beat.sh b/apps/api/bin/docker-entrypoint-beat.sh
new file mode 100755
index 00000000..3a9602a9
--- /dev/null
+++ b/apps/api/bin/docker-entrypoint-beat.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+set -e
+
+python manage.py wait_for_db
+# Wait for migrations
+python manage.py wait_for_migrations
+# Run the processes
+celery -A plane beat -l info
\ No newline at end of file
diff --git a/apps/api/bin/docker-entrypoint-migrator.sh b/apps/api/bin/docker-entrypoint-migrator.sh
new file mode 100755
index 00000000..104b3902
--- /dev/null
+++ b/apps/api/bin/docker-entrypoint-migrator.sh
@@ -0,0 +1,6 @@
+#!/bin/bash
+set -e
+
+python manage.py wait_for_db $1
+
+python manage.py migrate $1
\ No newline at end of file
diff --git a/apps/api/bin/docker-entrypoint-worker.sh b/apps/api/bin/docker-entrypoint-worker.sh
new file mode 100755
index 00000000..a70b5f77
--- /dev/null
+++ b/apps/api/bin/docker-entrypoint-worker.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+set -e
+
+python manage.py wait_for_db
+# Wait for migrations
+python manage.py wait_for_migrations
+# Run the processes
+celery -A plane worker -l info
\ No newline at end of file
diff --git a/apps/api/manage.py b/apps/api/manage.py
new file mode 100644
index 00000000..97286946
--- /dev/null
+++ b/apps/api/manage.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+import os
+import sys
+
+if __name__ == "__main__":
+ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
+ try:
+ from django.core.management import execute_from_command_line
+ except ImportError as exc:
+ raise ImportError(
+ "Couldn't import Django. Are you sure it's installed and "
+ "available on your PYTHONPATH environment variable? Did you "
+ "forget to activate a virtual environment?"
+ ) from exc
+ execute_from_command_line(sys.argv)
diff --git a/apps/api/package.json b/apps/api/package.json
new file mode 100644
index 00000000..ffecb3a7
--- /dev/null
+++ b/apps/api/package.json
@@ -0,0 +1,7 @@
+{
+ "name": "plane-api",
+ "version": "1.1.0",
+ "license": "AGPL-3.0",
+ "private": true,
+ "description": "API server powering Plane's backend"
+}
diff --git a/apps/api/plane/__init__.py b/apps/api/plane/__init__.py
new file mode 100644
index 00000000..53f4ccb1
--- /dev/null
+++ b/apps/api/plane/__init__.py
@@ -0,0 +1,3 @@
+from .celery import app as celery_app
+
+__all__ = ("celery_app",)
diff --git a/apps/api/plane/analytics/__init__.py b/apps/api/plane/analytics/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/apps/api/plane/analytics/apps.py b/apps/api/plane/analytics/apps.py
new file mode 100644
index 00000000..52a59f31
--- /dev/null
+++ b/apps/api/plane/analytics/apps.py
@@ -0,0 +1,5 @@
+from django.apps import AppConfig
+
+
+class AnalyticsConfig(AppConfig):
+ name = "plane.analytics"
diff --git a/apps/api/plane/api/__init__.py b/apps/api/plane/api/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/apps/api/plane/api/apps.py b/apps/api/plane/api/apps.py
new file mode 100644
index 00000000..f1f53111
--- /dev/null
+++ b/apps/api/plane/api/apps.py
@@ -0,0 +1,12 @@
+from django.apps import AppConfig
+
+
+class ApiConfig(AppConfig):
+ name = "plane.api"
+
+ def ready(self):
+ # Import authentication extensions to register them with drf-spectacular
+ try:
+ import plane.utils.openapi.auth # noqa
+ except ImportError:
+ pass
diff --git a/apps/api/plane/api/middleware/__init__.py b/apps/api/plane/api/middleware/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/apps/api/plane/api/middleware/api_authentication.py b/apps/api/plane/api/middleware/api_authentication.py
new file mode 100644
index 00000000..ddabb413
--- /dev/null
+++ b/apps/api/plane/api/middleware/api_authentication.py
@@ -0,0 +1,47 @@
+# Django imports
+from django.utils import timezone
+from django.db.models import Q
+
+# Third party imports
+from rest_framework import authentication
+from rest_framework.exceptions import AuthenticationFailed
+
+# Module imports
+from plane.db.models import APIToken
+
+
+class APIKeyAuthentication(authentication.BaseAuthentication):
+ """
+ Authentication with an API Key
+ """
+
+ www_authenticate_realm = "api"
+ media_type = "application/json"
+ auth_header_name = "X-Api-Key"
+
+ def get_api_token(self, request):
+ return request.headers.get(self.auth_header_name)
+
+ def validate_api_token(self, token):
+ try:
+ api_token = APIToken.objects.get(
+ Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
+ token=token,
+ is_active=True,
+ )
+ except APIToken.DoesNotExist:
+ raise AuthenticationFailed("Given API token is not valid")
+
+ # save api token last used
+ api_token.last_used = timezone.now()
+ api_token.save(update_fields=["last_used"])
+ return (api_token.user, api_token.token)
+
+ def authenticate(self, request):
+ token = self.get_api_token(request=request)
+ if not token:
+ return None
+
+ # Validate the API token
+ user, token = self.validate_api_token(token)
+ return user, token
diff --git a/apps/api/plane/api/rate_limit.py b/apps/api/plane/api/rate_limit.py
new file mode 100644
index 00000000..0d266e98
--- /dev/null
+++ b/apps/api/plane/api/rate_limit.py
@@ -0,0 +1,87 @@
+# python imports
+import os
+
+# Third party imports
+from rest_framework.throttling import SimpleRateThrottle
+
+
+class ApiKeyRateThrottle(SimpleRateThrottle):
+ scope = "api_key"
+ rate = os.environ.get("API_KEY_RATE_LIMIT", "60/minute")
+
+ def get_cache_key(self, request, view):
+ # Retrieve the API key from the request header
+ api_key = request.headers.get("X-Api-Key")
+ if not api_key:
+ return None # Allow the request if there's no API key
+
+ # Use the API key as part of the cache key
+ return f"{self.scope}:{api_key}"
+
+ def allow_request(self, request, view):
+ allowed = super().allow_request(request, view)
+
+ if allowed:
+ now = self.timer()
+ # Calculate the remaining limit and reset time
+ history = self.cache.get(self.key, [])
+
+ # Remove old histories
+ while history and history[-1] <= now - self.duration:
+ history.pop()
+
+ # Calculate the requests
+ num_requests = len(history)
+
+ # Check available requests
+ available = self.num_requests - num_requests
+
+ # Unix timestamp for when the rate limit will reset
+ reset_time = int(now + self.duration)
+
+ # Add headers
+ request.META["X-RateLimit-Remaining"] = max(0, available)
+ request.META["X-RateLimit-Reset"] = reset_time
+
+ return allowed
+
+
+class ServiceTokenRateThrottle(SimpleRateThrottle):
+ scope = "service_token"
+ rate = "300/minute"
+
+ def get_cache_key(self, request, view):
+ # Retrieve the API key from the request header
+ api_key = request.headers.get("X-Api-Key")
+ if not api_key:
+ return None # Allow the request if there's no API key
+
+ # Use the API key as part of the cache key
+ return f"{self.scope}:{api_key}"
+
+ def allow_request(self, request, view):
+ allowed = super().allow_request(request, view)
+
+ if allowed:
+ now = self.timer()
+ # Calculate the remaining limit and reset time
+ history = self.cache.get(self.key, [])
+
+ # Remove old histories
+ while history and history[-1] <= now - self.duration:
+ history.pop()
+
+ # Calculate the requests
+ num_requests = len(history)
+
+ # Check available requests
+ available = self.num_requests - num_requests
+
+ # Unix timestamp for when the rate limit will reset
+ reset_time = int(now + self.duration)
+
+ # Add headers
+ request.META["X-RateLimit-Remaining"] = max(0, available)
+ request.META["X-RateLimit-Reset"] = reset_time
+
+ return allowed
diff --git a/apps/api/plane/api/serializers/__init__.py b/apps/api/plane/api/serializers/__init__.py
new file mode 100644
index 00000000..7596915e
--- /dev/null
+++ b/apps/api/plane/api/serializers/__init__.py
@@ -0,0 +1,55 @@
+from .user import UserLiteSerializer
+from .workspace import WorkspaceLiteSerializer
+from .project import (
+ ProjectSerializer,
+ ProjectLiteSerializer,
+ ProjectCreateSerializer,
+ ProjectUpdateSerializer,
+)
+from .issue import (
+ IssueSerializer,
+ LabelCreateUpdateSerializer,
+ LabelSerializer,
+ IssueLinkSerializer,
+ IssueCommentSerializer,
+ IssueAttachmentSerializer,
+ IssueActivitySerializer,
+ IssueExpandSerializer,
+ IssueLiteSerializer,
+ IssueAttachmentUploadSerializer,
+ IssueSearchSerializer,
+ IssueCommentCreateSerializer,
+ IssueLinkCreateSerializer,
+ IssueLinkUpdateSerializer,
+)
+from .state import StateLiteSerializer, StateSerializer
+from .cycle import (
+ CycleSerializer,
+ CycleIssueSerializer,
+ CycleLiteSerializer,
+ CycleIssueRequestSerializer,
+ TransferCycleIssueRequestSerializer,
+ CycleCreateSerializer,
+ CycleUpdateSerializer,
+)
+from .module import (
+ ModuleSerializer,
+ ModuleIssueSerializer,
+ ModuleLiteSerializer,
+ ModuleIssueRequestSerializer,
+ ModuleCreateSerializer,
+ ModuleUpdateSerializer,
+)
+from .intake import (
+ IntakeIssueSerializer,
+ IntakeIssueCreateSerializer,
+ IntakeIssueUpdateSerializer,
+)
+from .estimate import EstimatePointSerializer
+from .asset import (
+ UserAssetUploadSerializer,
+ AssetUpdateSerializer,
+ GenericAssetUploadSerializer,
+ GenericAssetUpdateSerializer,
+ FileAssetSerializer,
+)
diff --git a/apps/api/plane/api/serializers/asset.py b/apps/api/plane/api/serializers/asset.py
new file mode 100644
index 00000000..6b74b375
--- /dev/null
+++ b/apps/api/plane/api/serializers/asset.py
@@ -0,0 +1,119 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from plane.db.models import FileAsset
+
+
+class UserAssetUploadSerializer(serializers.Serializer):
+ """
+ Serializer for user asset upload requests.
+
+ This serializer validates the metadata required to generate a presigned URL
+ for uploading user profile assets (avatar or cover image) directly to S3 storage.
+ Supports JPEG, PNG, WebP, JPG, and GIF image formats with size validation.
+ """
+
+ name = serializers.CharField(help_text="Original filename of the asset")
+ type = serializers.ChoiceField(
+ choices=[
+ ("image/jpeg", "JPEG"),
+ ("image/png", "PNG"),
+ ("image/webp", "WebP"),
+ ("image/jpg", "JPG"),
+ ("image/gif", "GIF"),
+ ],
+ default="image/jpeg",
+ help_text="MIME type of the file",
+ style={"placeholder": "image/jpeg"},
+ )
+ size = serializers.IntegerField(help_text="File size in bytes")
+ entity_type = serializers.ChoiceField(
+ choices=[
+ (FileAsset.EntityTypeContext.USER_AVATAR, "User Avatar"),
+ (FileAsset.EntityTypeContext.USER_COVER, "User Cover"),
+ ],
+ help_text="Type of user asset",
+ )
+
+
+class AssetUpdateSerializer(serializers.Serializer):
+ """
+ Serializer for asset status updates after successful upload completion.
+
+ Handles post-upload asset metadata updates including attribute modifications
+ and upload confirmation for S3-based file storage workflows.
+ """
+
+ attributes = serializers.JSONField(required=False, help_text="Additional attributes to update for the asset")
+
+
+class GenericAssetUploadSerializer(serializers.Serializer):
+ """
+ Serializer for generic asset upload requests with project association.
+
+ Validates metadata for generating presigned URLs for workspace assets including
+ project association, external system tracking, and file validation for
+ document management and content storage workflows.
+ """
+
+ name = serializers.CharField(help_text="Original filename of the asset")
+ type = serializers.CharField(required=False, help_text="MIME type of the file")
+ size = serializers.IntegerField(help_text="File size in bytes")
+ project_id = serializers.UUIDField(
+ required=False,
+ help_text="UUID of the project to associate with the asset",
+ style={"placeholder": "123e4567-e89b-12d3-a456-426614174000"},
+ )
+ external_id = serializers.CharField(
+ required=False,
+ help_text="External identifier for the asset (for integration tracking)",
+ )
+ external_source = serializers.CharField(
+ required=False, help_text="External source system (for integration tracking)"
+ )
+
+
+class GenericAssetUpdateSerializer(serializers.Serializer):
+ """
+ Serializer for generic asset upload confirmation and status management.
+
+ Handles post-upload status updates for workspace assets including
+ upload completion marking and metadata finalization.
+ """
+
+ is_uploaded = serializers.BooleanField(default=True, help_text="Whether the asset has been successfully uploaded")
+
+
+class FileAssetSerializer(BaseSerializer):
+ """
+ Comprehensive file asset serializer with complete metadata and URL generation.
+
+ Provides full file asset information including storage metadata, access URLs,
+ relationship data, and upload status for complete asset management workflows.
+ """
+
+ asset_url = serializers.CharField(read_only=True)
+
+ class Meta:
+ model = FileAsset
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "workspace",
+ "project",
+ "issue",
+ "comment",
+ "page",
+ "draft_issue",
+ "user",
+ "is_deleted",
+ "deleted_at",
+ "storage_metadata",
+ "asset_url",
+ ]
diff --git a/apps/api/plane/api/serializers/base.py b/apps/api/plane/api/serializers/base.py
new file mode 100644
index 00000000..bc790f2c
--- /dev/null
+++ b/apps/api/plane/api/serializers/base.py
@@ -0,0 +1,114 @@
+# Third party imports
+from rest_framework import serializers
+
+
+class BaseSerializer(serializers.ModelSerializer):
+ """
+ Base serializer providing common functionality for all model serializers.
+
+ Features field filtering, dynamic expansion of related fields, and standardized
+ primary key handling for consistent API responses across the application.
+ """
+
+ id = serializers.PrimaryKeyRelatedField(read_only=True)
+
+ def __init__(self, *args, **kwargs):
+ # If 'fields' is provided in the arguments, remove it and store it separately.
+ # This is done so as not to pass this custom argument up to the superclass.
+ fields = kwargs.pop("fields", [])
+ self.expand = kwargs.pop("expand", []) or []
+
+ # Call the initialization of the superclass.
+ super().__init__(*args, **kwargs)
+
+ # If 'fields' was provided, filter the fields of the serializer accordingly.
+ if fields:
+ self.fields = self._filter_fields(fields=fields)
+
+ def _filter_fields(self, fields):
+ """
+ Adjust the serializer's fields based on the provided 'fields' list.
+
+ :param fields: List or dictionary specifying which
+ fields to include in the serializer.
+ :return: The updated fields for the serializer.
+ """
+ # Check each field_name in the provided fields.
+ for field_name in fields:
+ # If the field is a dictionary (indicating nested fields),
+ # loop through its keys and values.
+ if isinstance(field_name, dict):
+ for key, value in field_name.items():
+ # If the value of this nested field is a list,
+ # perform a recursive filter on it.
+ if isinstance(value, list):
+ self._filter_fields(self.fields[key], value)
+
+ # Create a list to store allowed fields.
+ allowed = []
+ for item in fields:
+ # If the item is a string, it directly represents a field's name.
+ if isinstance(item, str):
+ allowed.append(item)
+ # If the item is a dictionary, it represents a nested field.
+ # Add the key of this dictionary to the allowed list.
+ elif isinstance(item, dict):
+ allowed.append(list(item.keys())[0])
+
+ # Convert the current serializer's fields and the allowed fields to sets.
+ existing = set(self.fields)
+ allowed = set(allowed)
+
+ # Remove fields from the serializer that aren't in the 'allowed' list.
+ for field_name in existing - allowed:
+ self.fields.pop(field_name)
+
+ return self.fields
+
+ def to_representation(self, instance):
+ response = super().to_representation(instance)
+
+ # Ensure 'expand' is iterable before processing
+ if self.expand:
+ for expand in self.expand:
+ if expand in self.fields:
+ # Import all the expandable serializers
+ from . import (
+ IssueSerializer,
+ IssueLiteSerializer,
+ ProjectLiteSerializer,
+ StateLiteSerializer,
+ UserLiteSerializer,
+ WorkspaceLiteSerializer,
+ EstimatePointSerializer,
+ )
+
+ # Expansion mapper
+ expansion = {
+ "user": UserLiteSerializer,
+ "workspace": WorkspaceLiteSerializer,
+ "project": ProjectLiteSerializer,
+ "default_assignee": UserLiteSerializer,
+ "project_lead": UserLiteSerializer,
+ "state": StateLiteSerializer,
+ "created_by": UserLiteSerializer,
+ "updated_by": UserLiteSerializer,
+ "issue": IssueSerializer,
+ "actor": UserLiteSerializer,
+ "owned_by": UserLiteSerializer,
+ "members": UserLiteSerializer,
+ "parent": IssueLiteSerializer,
+ "estimate_point": EstimatePointSerializer,
+ }
+ # Check if field in expansion then expand the field
+ if expand in expansion:
+ if isinstance(response.get(expand), list):
+ exp_serializer = expansion[expand](getattr(instance, expand), many=True)
+ else:
+ exp_serializer = expansion[expand](getattr(instance, expand))
+ response[expand] = exp_serializer.data
+ else:
+ # You might need to handle this case differently
+ response[expand] = getattr(instance, f"{expand}_id", None)
+
+ return response
diff --git a/apps/api/plane/api/serializers/cycle.py b/apps/api/plane/api/serializers/cycle.py
new file mode 100644
index 00000000..6b7bfa44
--- /dev/null
+++ b/apps/api/plane/api/serializers/cycle.py
@@ -0,0 +1,186 @@
+# Third party imports
+import pytz
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from plane.db.models import Cycle, CycleIssue, User
+from plane.utils.timezone_converter import convert_to_utc
+
+
+class CycleCreateSerializer(BaseSerializer):
+ """
+ Serializer for creating cycles with timezone handling and date validation.
+
+ Manages cycle creation including project timezone conversion, date range validation,
+ and UTC normalization for time-bound iteration planning and sprint management.
+ """
+
+ owned_by = serializers.PrimaryKeyRelatedField(
+ queryset=User.objects.all(),
+ required=False,
+ allow_null=True,
+ help_text="User who owns the cycle. If not provided, defaults to the current user.",
+ )
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ project = self.context.get("project")
+ if project and project.timezone:
+ project_timezone = pytz.timezone(project.timezone)
+ self.fields["start_date"].timezone = project_timezone
+ self.fields["end_date"].timezone = project_timezone
+
+ class Meta:
+ model = Cycle
+ fields = [
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by",
+ "external_source",
+ "external_id",
+ "timezone",
+ ]
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "deleted_at",
+ ]
+
+ def validate(self, data):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed end date")
+
+ if data.get("start_date", None) is not None and data.get("end_date", None) is not None:
+ project_id = self.initial_data.get("project_id") or (
+ self.instance.project_id if self.instance and hasattr(self.instance, "project_id") else None
+ )
+
+ if not project_id:
+ raise serializers.ValidationError("Project ID is required")
+
+ data["start_date"] = convert_to_utc(
+ date=str(data.get("start_date").date()),
+ project_id=project_id,
+ is_start_date=True,
+ )
+ data["end_date"] = convert_to_utc(
+ date=str(data.get("end_date", None).date()),
+ project_id=project_id,
+ )
+
+ if not data.get("owned_by"):
+ data["owned_by"] = self.context["request"].user
+
+ return data
+
+
+class CycleUpdateSerializer(CycleCreateSerializer):
+ """
+ Serializer for updating cycles with enhanced ownership management.
+
+ Extends cycle creation with update-specific features including ownership
+ assignment and modification tracking for cycle lifecycle management.
+ """
+
+ class Meta(CycleCreateSerializer.Meta):
+ model = Cycle
+ fields = CycleCreateSerializer.Meta.fields + [
+ "owned_by",
+ ]
+
+
+class CycleSerializer(BaseSerializer):
+ """
+ Cycle serializer with comprehensive project metrics and time tracking.
+
+ Provides cycle details including work item counts by status, progress estimates,
+ and time-bound iteration data for project management and sprint planning.
+ """
+
+ total_issues = serializers.IntegerField(read_only=True)
+ cancelled_issues = serializers.IntegerField(read_only=True)
+ completed_issues = serializers.IntegerField(read_only=True)
+ started_issues = serializers.IntegerField(read_only=True)
+ unstarted_issues = serializers.IntegerField(read_only=True)
+ backlog_issues = serializers.IntegerField(read_only=True)
+ total_estimates = serializers.FloatField(read_only=True)
+ completed_estimates = serializers.FloatField(read_only=True)
+ started_estimates = serializers.FloatField(read_only=True)
+
+ class Meta:
+ model = Cycle
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "workspace",
+ "project",
+ "owned_by",
+ "deleted_at",
+ ]
+
+
+class CycleIssueSerializer(BaseSerializer):
+ """
+ Serializer for cycle-issue relationships with sub-issue counting.
+
+ Manages the association between cycles and work items, including
+ hierarchical issue tracking for nested work item structures.
+ """
+
+ sub_issues_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = CycleIssue
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "cycle"]
+
+
+class CycleLiteSerializer(BaseSerializer):
+ """
+ Lightweight cycle serializer for minimal data transfer.
+
+ Provides essential cycle information without computed metrics,
+ optimized for list views and reference lookups.
+ """
+
+ class Meta:
+ model = Cycle
+ fields = "__all__"
+
+
+class CycleIssueRequestSerializer(serializers.Serializer):
+ """
+ Serializer for bulk work item assignment to cycles.
+
+ Validates work item ID lists for batch operations including
+ cycle assignment and sprint planning workflows.
+ """
+
+ issues = serializers.ListField(child=serializers.UUIDField(), help_text="List of issue IDs to add to the cycle")
+
+
+class TransferCycleIssueRequestSerializer(serializers.Serializer):
+ """
+ Serializer for transferring work items between cycles.
+
+ Handles work item migration between cycles including validation
+ and relationship updates for sprint reallocation workflows.
+ """
+
+ new_cycle_id = serializers.UUIDField(help_text="ID of the target cycle to transfer issues to")
diff --git a/apps/api/plane/api/serializers/estimate.py b/apps/api/plane/api/serializers/estimate.py
new file mode 100644
index 00000000..b670006d
--- /dev/null
+++ b/apps/api/plane/api/serializers/estimate.py
@@ -0,0 +1,17 @@
+# Module imports
+from plane.db.models import EstimatePoint
+from .base import BaseSerializer
+
+
+class EstimatePointSerializer(BaseSerializer):
+ """
+ Serializer for project estimation points and story point values.
+
+ Handles numeric estimation data for work item sizing and sprint planning,
+ providing standardized point values for project velocity calculations.
+ """
+
+ class Meta:
+ model = EstimatePoint
+ fields = ["id", "value"]
+ read_only_fields = fields
diff --git a/apps/api/plane/api/serializers/intake.py b/apps/api/plane/api/serializers/intake.py
new file mode 100644
index 00000000..fcfedcbd
--- /dev/null
+++ b/apps/api/plane/api/serializers/intake.py
@@ -0,0 +1,134 @@
+# Module imports
+from .base import BaseSerializer
+from .issue import IssueExpandSerializer
+from plane.db.models import IntakeIssue, Issue
+from rest_framework import serializers
+
+
+class IssueForIntakeSerializer(BaseSerializer):
+ """
+ Serializer for work item data within intake submissions.
+
+ Handles essential work item fields for intake processing including
+ content validation and priority assignment for triage workflows.
+ """
+
+ class Meta:
+ model = Issue
+ fields = [
+ "name",
+ "description",
+ "description_html",
+ "priority",
+ ]
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IntakeIssueCreateSerializer(BaseSerializer):
+ """
+ Serializer for creating intake work items with embedded issue data.
+
+ Manages intake work item creation including nested issue creation,
+ status assignment, and source tracking for issue queue management.
+ """
+
+ issue = IssueForIntakeSerializer(help_text="Issue data for the intake issue")
+
+ class Meta:
+ model = IntakeIssue
+ fields = [
+ "issue",
+ "intake",
+ "status",
+ "snoozed_till",
+ "duplicate_to",
+ "source",
+ "source_email",
+ ]
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IntakeIssueSerializer(BaseSerializer):
+ """
+ Comprehensive serializer for intake work items with expanded issue details.
+
+ Provides full intake work item data including embedded issue information,
+ status tracking, and triage metadata for issue queue management.
+ """
+
+ issue_detail = IssueExpandSerializer(read_only=True, source="issue")
+ inbox = serializers.UUIDField(source="intake.id", read_only=True)
+
+ class Meta:
+ model = IntakeIssue
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IntakeIssueUpdateSerializer(BaseSerializer):
+ """
+ Serializer for updating intake work items and their associated issues.
+
+ Handles intake work item modifications including status changes, triage decisions,
+ and embedded issue updates for issue queue processing workflows.
+ """
+
+ issue = IssueForIntakeSerializer(required=False, help_text="Issue data to update in the intake issue")
+
+ class Meta:
+ model = IntakeIssue
+ fields = [
+ "status",
+ "snoozed_till",
+ "duplicate_to",
+ "source",
+ "source_email",
+ "issue",
+ ]
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueDataSerializer(serializers.Serializer):
+ """
+ Serializer for nested work item data in intake request payloads.
+
+ Validates core work item fields within intake requests including
+ content formatting, priority levels, and metadata for issue creation.
+ """
+
+ name = serializers.CharField(max_length=255, help_text="Issue name")
+ description_html = serializers.CharField(required=False, allow_null=True, help_text="Issue description HTML")
+ priority = serializers.ChoiceField(choices=Issue.PRIORITY_CHOICES, default="none", help_text="Issue priority")
diff --git a/apps/api/plane/api/serializers/issue.py b/apps/api/plane/api/serializers/issue.py
new file mode 100644
index 00000000..d7fc3e91
--- /dev/null
+++ b/apps/api/plane/api/serializers/issue.py
@@ -0,0 +1,697 @@
+# Django imports
+from django.utils import timezone
+from lxml import html
+from django.db import IntegrityError
+
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from plane.db.models import (
+ Issue,
+ IssueType,
+ IssueActivity,
+ IssueAssignee,
+ FileAsset,
+ IssueComment,
+ IssueLabel,
+ IssueLink,
+ Label,
+ ProjectMember,
+ State,
+ User,
+ EstimatePoint,
+)
+from plane.utils.content_validator import (
+ validate_html_content,
+ validate_binary_data,
+)
+
+from .base import BaseSerializer
+from .cycle import CycleLiteSerializer, CycleSerializer
+from .module import ModuleLiteSerializer, ModuleSerializer
+from .state import StateLiteSerializer
+from .user import UserLiteSerializer
+
+# Django imports
+from django.core.exceptions import ValidationError
+from django.core.validators import URLValidator
+
+
+class IssueSerializer(BaseSerializer):
+ """
+ Comprehensive work item serializer with full relationship management.
+
+ Handles complete work item lifecycle including assignees, labels, validation,
+ and related model updates. Supports dynamic field expansion and HTML content
+ processing.
+ """
+
+ assignees = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.values_list("id", flat=True)),
+ write_only=True,
+ required=False,
+ )
+
+ labels = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.values_list("id", flat=True)),
+ write_only=True,
+ required=False,
+ )
+ type_id = serializers.PrimaryKeyRelatedField(
+ source="type", queryset=IssueType.objects.all(), required=False, allow_null=True
+ )
+
+ class Meta:
+ model = Issue
+ read_only_fields = ["id", "workspace", "project", "updated_by", "updated_at"]
+ exclude = ["description", "description_stripped"]
+
+ def validate(self, data):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("target_date", None) is not None
+ and data.get("start_date", None) > data.get("target_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed target date")
+
+ try:
+ if data.get("description_html", None) is not None:
+ parsed = html.fromstring(data["description_html"])
+ parsed_str = html.tostring(parsed, encoding="unicode")
+ data["description_html"] = parsed_str
+
+ except Exception:
+ raise serializers.ValidationError("Invalid HTML passed")
+
+ # Validate description content for security
+ if data.get("description_html"):
+ is_valid, error_msg, sanitized_html = validate_html_content(data["description_html"])
+ if not is_valid:
+ raise serializers.ValidationError({"error": "html content is not valid"})
+ # Update the data with sanitized HTML if available
+ if sanitized_html is not None:
+ data["description_html"] = sanitized_html
+
+ if data.get("description_binary"):
+ is_valid, error_msg = validate_binary_data(data["description_binary"])
+ if not is_valid:
+ raise serializers.ValidationError({"description_binary": "Invalid binary data"})
+
+ # Validate assignees are from project
+ if data.get("assignees", []):
+ data["assignees"] = ProjectMember.objects.filter(
+ project_id=self.context.get("project_id"),
+ is_active=True,
+ role__gte=15,
+ member_id__in=data["assignees"],
+ ).values_list("member_id", flat=True)
+
+ # Validate labels are from project
+ if data.get("labels", []):
+ data["labels"] = Label.objects.filter(
+ project_id=self.context.get("project_id"), id__in=data["labels"]
+ ).values_list("id", flat=True)
+
+ # Check state is from the project only else raise validation error
+ if (
+ data.get("state")
+ and not State.objects.filter(project_id=self.context.get("project_id"), pk=data.get("state").id).exists()
+ ):
+ raise serializers.ValidationError("State is not valid please pass a valid state_id")
+
+ # Check parent issue is from workspace as it can be cross workspace
+ if (
+ data.get("parent")
+ and not Issue.objects.filter(
+ workspace_id=self.context.get("workspace_id"),
+ project_id=self.context.get("project_id"),
+ pk=data.get("parent").id,
+ ).exists()
+ ):
+ raise serializers.ValidationError("Parent is not valid issue_id please pass a valid issue_id")
+
+ if (
+ data.get("estimate_point")
+ and not EstimatePoint.objects.filter(
+ workspace_id=self.context.get("workspace_id"),
+ project_id=self.context.get("project_id"),
+ pk=data.get("estimate_point").id,
+ ).exists()
+ ):
+ raise serializers.ValidationError("Estimate point is not valid please pass a valid estimate_point_id")
+
+ return data
+
+ def create(self, validated_data):
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
+
+ project_id = self.context["project_id"]
+ workspace_id = self.context["workspace_id"]
+ default_assignee_id = self.context["default_assignee_id"]
+
+ issue_type = validated_data.pop("type", None)
+
+ if not issue_type:
+ # Get default issue type
+ issue_type = IssueType.objects.filter(project_issue_types__project_id=project_id, is_default=True).first()
+ issue_type = issue_type
+
+ issue = Issue.objects.create(**validated_data, project_id=project_id, type=issue_type)
+
+ # Issue Audit Users
+ created_by_id = issue.created_by_id
+ updated_by_id = issue.updated_by_id
+
+ if assignees is not None and len(assignees):
+ try:
+ IssueAssignee.objects.bulk_create(
+ [
+ IssueAssignee(
+ assignee_id=assignee_id,
+ issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for assignee_id in assignees
+ ],
+ batch_size=10,
+ )
+ except IntegrityError:
+ pass
+ else:
+ try:
+ # Then assign it to default assignee, if it is a valid assignee
+ if (
+ default_assignee_id is not None
+ and ProjectMember.objects.filter(
+ member_id=default_assignee_id,
+ project_id=project_id,
+ role__gte=15,
+ is_active=True,
+ ).exists()
+ ):
+ IssueAssignee.objects.create(
+ assignee_id=default_assignee_id,
+ issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ except IntegrityError:
+ pass
+
+ if labels is not None and len(labels):
+ try:
+ IssueLabel.objects.bulk_create(
+ [
+ IssueLabel(
+ label_id=label_id,
+ issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for label_id in labels
+ ],
+ batch_size=10,
+ )
+ except IntegrityError:
+ pass
+
+ return issue
+
+ def update(self, instance, validated_data):
+ assignees = validated_data.pop("assignees", None)
+ labels = validated_data.pop("labels", None)
+
+ # Related models
+ project_id = instance.project_id
+ workspace_id = instance.workspace_id
+ created_by_id = instance.created_by_id
+ updated_by_id = instance.updated_by_id
+
+ if assignees is not None:
+ IssueAssignee.objects.filter(issue=instance).delete()
+ try:
+ IssueAssignee.objects.bulk_create(
+ [
+ IssueAssignee(
+ assignee_id=assignee_id,
+ issue=instance,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for assignee_id in assignees
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+ except IntegrityError:
+ pass
+
+ if labels is not None:
+ IssueLabel.objects.filter(issue=instance).delete()
+ try:
+ IssueLabel.objects.bulk_create(
+ [
+ IssueLabel(
+ label_id=label_id,
+ issue=instance,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for label_id in labels
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+ except IntegrityError:
+ pass
+
+ # Time updation occues even when other related models are updated
+ instance.updated_at = timezone.now()
+ return super().update(instance, validated_data)
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ if "assignees" in self.fields:
+ if "assignees" in self.expand:
+ from .user import UserLiteSerializer
+
+ data["assignees"] = UserLiteSerializer(
+ User.objects.filter(
+ pk__in=IssueAssignee.objects.filter(issue=instance).values_list("assignee_id", flat=True)
+ ),
+ many=True,
+ ).data
+ else:
+ data["assignees"] = [
+ str(assignee)
+ for assignee in IssueAssignee.objects.filter(issue=instance).values_list("assignee_id", flat=True)
+ ]
+ if "labels" in self.fields:
+ if "labels" in self.expand:
+ data["labels"] = LabelSerializer(
+ Label.objects.filter(
+ pk__in=IssueLabel.objects.filter(issue=instance).values_list("label_id", flat=True)
+ ),
+ many=True,
+ ).data
+ else:
+ data["labels"] = [
+ str(label) for label in IssueLabel.objects.filter(issue=instance).values_list("label_id", flat=True)
+ ]
+
+ return data
+
+
+class IssueLiteSerializer(BaseSerializer):
+ """
+ Lightweight work item serializer for minimal data transfer.
+
+ Provides essential work item identifiers optimized for list views,
+ references, and performance-critical operations.
+ """
+
+ class Meta:
+ model = Issue
+ fields = ["id", "sequence_id", "project_id"]
+ read_only_fields = fields
+
+
+class LabelCreateUpdateSerializer(BaseSerializer):
+ """
+ Serializer for creating and updating work item labels.
+
+ Manages label metadata including colors, descriptions, hierarchy,
+ and sorting for work item categorization and filtering.
+ """
+
+ class Meta:
+ model = Label
+ fields = [
+ "name",
+ "color",
+ "description",
+ "external_source",
+ "external_id",
+ "parent",
+ "sort_order",
+ ]
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "deleted_at",
+ ]
+
+
+class LabelSerializer(BaseSerializer):
+ """
+ Full serializer for work item labels with complete metadata.
+
+ Provides comprehensive label information including hierarchical relationships,
+ visual properties, and organizational data for work item tagging.
+ """
+
+ class Meta:
+ model = Label
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "deleted_at",
+ ]
+
+
+class IssueLinkCreateSerializer(BaseSerializer):
+ """
+ Serializer for creating work item external links with validation.
+
+ Handles URL validation, format checking, and duplicate prevention
+ for attaching external resources to work items.
+ """
+
+ class Meta:
+ model = IssueLink
+ fields = ["url", "issue_id"]
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+ def validate_url(self, value):
+ # Check URL format
+ validate_url = URLValidator()
+ try:
+ validate_url(value)
+ except ValidationError:
+ raise serializers.ValidationError("Invalid URL format.")
+
+ # Check URL scheme
+ if not value.startswith(("http://", "https://")):
+ raise serializers.ValidationError("Invalid URL scheme.")
+
+ return value
+
+ # Validation if url already exists
+ def create(self, validated_data):
+ if IssueLink.objects.filter(url=validated_data.get("url"), issue_id=validated_data.get("issue_id")).exists():
+ raise serializers.ValidationError({"error": "URL already exists for this Issue"})
+ return IssueLink.objects.create(**validated_data)
+
+
+class IssueLinkUpdateSerializer(IssueLinkCreateSerializer):
+ """
+ Serializer for updating work item external links.
+
+ Extends link creation with update-specific validation to prevent
+ URL conflicts and maintain link integrity during modifications.
+ """
+
+ class Meta(IssueLinkCreateSerializer.Meta):
+ model = IssueLink
+ fields = IssueLinkCreateSerializer.Meta.fields + [
+ "issue_id",
+ ]
+ read_only_fields = IssueLinkCreateSerializer.Meta.read_only_fields
+
+ def update(self, instance, validated_data):
+ if (
+ IssueLink.objects.filter(url=validated_data.get("url"), issue_id=instance.issue_id)
+ .exclude(pk=instance.id)
+ .exists()
+ ):
+ raise serializers.ValidationError({"error": "URL already exists for this Issue"})
+
+ return super().update(instance, validated_data)
+
+
+class IssueLinkSerializer(BaseSerializer):
+ """
+ Full serializer for work item external links.
+
+ Provides complete link information including metadata and timestamps
+ for managing external resource associations with work items.
+ """
+
+ class Meta:
+ model = IssueLink
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueAttachmentSerializer(BaseSerializer):
+ """
+ Serializer for work item file attachments.
+
+ Manages file asset associations with work items including metadata,
+ storage information, and access control for document management.
+ """
+
+ class Meta:
+ model = FileAsset
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
+ "updated_by",
+ "updated_at",
+ ]
+
+
+class IssueCommentCreateSerializer(BaseSerializer):
+ """
+ Serializer for creating work item comments.
+
+ Handles comment creation with JSON and HTML content support,
+ access control, and external integration tracking.
+ """
+
+ class Meta:
+ model = IssueComment
+ fields = [
+ "comment_json",
+ "comment_html",
+ "access",
+ "external_source",
+ "external_id",
+ ]
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "deleted_at",
+ "actor",
+ "comment_stripped",
+ "edited_at",
+ ]
+
+
+class IssueCommentSerializer(BaseSerializer):
+ """
+ Full serializer for work item comments with membership context.
+
+ Provides complete comment data including member status, content formatting,
+ and edit tracking for collaborative work item discussions.
+ """
+
+ is_member = serializers.BooleanField(read_only=True)
+
+ class Meta:
+ model = IssueComment
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+ exclude = ["comment_stripped", "comment_json"]
+
+ def validate(self, data):
+ try:
+ if data.get("comment_html", None) is not None:
+ parsed = html.fromstring(data["comment_html"])
+ parsed_str = html.tostring(parsed, encoding="unicode")
+ data["comment_html"] = parsed_str
+
+ except Exception:
+ raise serializers.ValidationError("Invalid HTML passed")
+ return data
+
+
+class IssueActivitySerializer(BaseSerializer):
+ """
+ Serializer for work item activity and change history.
+
+ Tracks and represents work item modifications, state changes,
+ and user interactions for audit trails and activity feeds.
+ """
+
+ class Meta:
+ model = IssueActivity
+ exclude = ["created_by", "updated_by"]
+
+
+class CycleIssueSerializer(BaseSerializer):
+ """
+ Serializer for work items within cycles.
+
+ Provides cycle context for work items including cycle metadata
+ and timing information for sprint and iteration management.
+ """
+
+ cycle = CycleSerializer(read_only=True)
+
+ class Meta:
+ fields = ["cycle"]
+
+
+class ModuleIssueSerializer(BaseSerializer):
+ """
+ Serializer for work items within modules.
+
+ Provides module context for work items including module metadata
+ and organizational information for feature-based work grouping.
+ """
+
+ module = ModuleSerializer(read_only=True)
+
+ class Meta:
+ fields = ["module"]
+
+
+class LabelLiteSerializer(BaseSerializer):
+ """
+ Lightweight label serializer for minimal data transfer.
+
+ Provides essential label information with visual properties,
+ optimized for UI display and performance-critical operations.
+ """
+
+ class Meta:
+ model = Label
+ fields = ["id", "name", "color"]
+
+
+class IssueExpandSerializer(BaseSerializer):
+ """
+ Extended work item serializer with full relationship expansion.
+
+ Provides work items with expanded related data including cycles, modules,
+ labels, assignees, and states for comprehensive data representation.
+ """
+
+ cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
+ module = ModuleLiteSerializer(source="issue_module.module", read_only=True)
+
+ labels = serializers.SerializerMethodField()
+ assignees = serializers.SerializerMethodField()
+ state = StateLiteSerializer(read_only=True)
+
+ def get_labels(self, obj):
+ expand = self.context.get("expand", [])
+ if "labels" in expand:
+ # Use prefetched data
+ return LabelLiteSerializer([il.label for il in obj.label_issue.all()], many=True).data
+ return [il.label_id for il in obj.label_issue.all()]
+
+ def get_assignees(self, obj):
+ expand = self.context.get("expand", [])
+ if "assignees" in expand:
+ return UserLiteSerializer([ia.assignee for ia in obj.issue_assignee.all()], many=True).data
+ return [ia.assignee_id for ia in obj.issue_assignee.all()]
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueAttachmentUploadSerializer(serializers.Serializer):
+ """
+ Serializer for work item attachment upload request validation.
+
+ Handles file upload metadata validation including size, type, and external
+ integration tracking for secure work item document attachment workflows.
+ """
+
+ name = serializers.CharField(help_text="Original filename of the asset")
+ type = serializers.CharField(required=False, help_text="MIME type of the file")
+ size = serializers.IntegerField(help_text="File size in bytes")
+ external_id = serializers.CharField(
+ required=False,
+ help_text="External identifier for the asset (for integration tracking)",
+ )
+ external_source = serializers.CharField(
+ required=False, help_text="External source system (for integration tracking)"
+ )
+
+
+class IssueSearchSerializer(serializers.Serializer):
+ """
+ Serializer for work item search result data formatting.
+
+ Provides standardized search result structure including work item identifiers,
+ project context, and workspace information for search API responses.
+ """
+
+ id = serializers.CharField(required=True, help_text="Issue ID")
+ name = serializers.CharField(required=True, help_text="Issue name")
+ sequence_id = serializers.CharField(required=True, help_text="Issue sequence ID")
+ project__identifier = serializers.CharField(required=True, help_text="Project identifier")
+ project_id = serializers.CharField(required=True, help_text="Project ID")
+ workspace__slug = serializers.CharField(required=True, help_text="Workspace slug")
diff --git a/apps/api/plane/api/serializers/module.py b/apps/api/plane/api/serializers/module.py
new file mode 100644
index 00000000..77be453c
--- /dev/null
+++ b/apps/api/plane/api/serializers/module.py
@@ -0,0 +1,272 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from plane.db.models import (
+ User,
+ Module,
+ ModuleLink,
+ ModuleMember,
+ ModuleIssue,
+ ProjectMember,
+)
+
+
+class ModuleCreateSerializer(BaseSerializer):
+ """
+ Serializer for creating modules with member validation and date checking.
+
+ Handles module creation including member assignment validation, date range
+ verification, and duplicate name prevention for feature-based
+ project organization setup.
+ """
+
+ members = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+
+ class Meta:
+ model = Module
+ fields = [
+ "name",
+ "description",
+ "start_date",
+ "target_date",
+ "status",
+ "lead",
+ "members",
+ "external_source",
+ "external_id",
+ ]
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "deleted_at",
+ ]
+
+ def validate(self, data):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("target_date", None) is not None
+ and data.get("start_date", None) > data.get("target_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed target date")
+
+ if data.get("members", []):
+ data["members"] = ProjectMember.objects.filter(
+ project_id=self.context.get("project_id"), member_id__in=data["members"]
+ ).values_list("member_id", flat=True)
+
+ return data
+
+ def create(self, validated_data):
+ members = validated_data.pop("members", None)
+
+ project_id = self.context["project_id"]
+ workspace_id = self.context["workspace_id"]
+
+ module_name = validated_data.get("name")
+ if module_name:
+ # Lookup for the module name in the module table for that project
+ module = Module.objects.filter(name=module_name, project_id=project_id).first()
+ if module:
+ raise serializers.ValidationError(
+ {
+ "id": str(module.id),
+ "code": "MODULE_NAME_ALREADY_EXISTS",
+ "error": "Module with this name already exists",
+ "message": "Module with this name already exists",
+ }
+ )
+
+ module = Module.objects.create(**validated_data, project_id=project_id)
+ if members is not None:
+ ModuleMember.objects.bulk_create(
+ [
+ ModuleMember(
+ module=module,
+ member_id=str(member),
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by=module.created_by,
+ updated_by=module.updated_by,
+ )
+ for member in members
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
+ return module
+
+
+class ModuleUpdateSerializer(ModuleCreateSerializer):
+ """
+ Serializer for updating modules with enhanced validation and member management.
+
+ Extends module creation with update-specific validations including
+ member reassignment, name conflict checking,
+ and relationship management for module modifications.
+ """
+
+ class Meta(ModuleCreateSerializer.Meta):
+ model = Module
+ fields = ModuleCreateSerializer.Meta.fields + [
+ "members",
+ ]
+ read_only_fields = ModuleCreateSerializer.Meta.read_only_fields
+
+ def update(self, instance, validated_data):
+ members = validated_data.pop("members", None)
+ module_name = validated_data.get("name")
+ if module_name:
+ # Lookup for the module name in the module table for that project
+ if Module.objects.filter(name=module_name, project=instance.project).exclude(id=instance.id).exists():
+ raise serializers.ValidationError({"error": "Module with this name already exists"})
+
+ if members is not None:
+ ModuleMember.objects.filter(module=instance).delete()
+ ModuleMember.objects.bulk_create(
+ [
+ ModuleMember(
+ module=instance,
+ member_id=str(member),
+ project=instance.project,
+ workspace=instance.project.workspace,
+ created_by=instance.created_by,
+ updated_by=instance.updated_by,
+ )
+ for member in members
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
+ return super().update(instance, validated_data)
+
+
+class ModuleSerializer(BaseSerializer):
+ """
+ Comprehensive module serializer with work item metrics and member management.
+
+ Provides complete module data including work item counts by status, member
+ relationships, and progress tracking for feature-based project organization.
+ """
+
+ members = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+ total_issues = serializers.IntegerField(read_only=True)
+ cancelled_issues = serializers.IntegerField(read_only=True)
+ completed_issues = serializers.IntegerField(read_only=True)
+ started_issues = serializers.IntegerField(read_only=True)
+ unstarted_issues = serializers.IntegerField(read_only=True)
+ backlog_issues = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = Module
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "deleted_at",
+ ]
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data["members"] = [str(member.id) for member in instance.members.all()]
+ return data
+
+
+class ModuleIssueSerializer(BaseSerializer):
+ """
+ Serializer for module-work item relationships with sub-item counting.
+
+ Manages the association between modules and work items, including
+ hierarchical issue tracking for nested work item structures.
+ """
+
+ sub_issues_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = ModuleIssue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "module",
+ ]
+
+
+class ModuleLinkSerializer(BaseSerializer):
+ """
+ Serializer for module external links with URL validation.
+
+ Handles external resource associations with modules including
+ URL validation and duplicate prevention for reference management.
+ """
+
+ class Meta:
+ model = ModuleLink
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "module",
+ ]
+
+ # Validation if url already exists
+ def create(self, validated_data):
+ if ModuleLink.objects.filter(url=validated_data.get("url"), module_id=validated_data.get("module_id")).exists():
+ raise serializers.ValidationError({"error": "URL already exists for this Issue"})
+ return ModuleLink.objects.create(**validated_data)
+
+
+class ModuleLiteSerializer(BaseSerializer):
+ """
+ Lightweight module serializer for minimal data transfer.
+
+ Provides essential module information without computed metrics,
+ optimized for list views and reference lookups.
+ """
+
+ class Meta:
+ model = Module
+ fields = "__all__"
+
+
+class ModuleIssueRequestSerializer(serializers.Serializer):
+ """
+ Serializer for bulk work item assignment to modules.
+
+ Validates work item ID lists for batch operations including
+ module assignment and work item organization workflows.
+ """
+
+ issues = serializers.ListField(
+ child=serializers.UUIDField(),
+ help_text="List of issue IDs to add to the module",
+ )
diff --git a/apps/api/plane/api/serializers/project.py b/apps/api/plane/api/serializers/project.py
new file mode 100644
index 00000000..3228c5ad
--- /dev/null
+++ b/apps/api/plane/api/serializers/project.py
@@ -0,0 +1,232 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from plane.db.models import (
+ Project,
+ ProjectIdentifier,
+ WorkspaceMember,
+ State,
+ Estimate,
+)
+
+from plane.utils.content_validator import (
+ validate_html_content,
+)
+from .base import BaseSerializer
+
+
+class ProjectCreateSerializer(BaseSerializer):
+ """
+ Serializer for creating projects with workspace validation.
+
+ Handles project creation including identifier validation, member verification,
+ and workspace association for new project initialization.
+ """
+
+ class Meta:
+ model = Project
+ fields = [
+ "name",
+ "description",
+ "project_lead",
+ "default_assignee",
+ "identifier",
+ "icon_prop",
+ "emoji",
+ "cover_image",
+ "module_view",
+ "cycle_view",
+ "issue_views_view",
+ "page_view",
+ "intake_view",
+ "guest_view_all_features",
+ "archive_in",
+ "close_in",
+ "timezone",
+ "logo_props",
+ "external_source",
+ "external_id",
+ "is_issue_type_enabled",
+ ]
+
+ read_only_fields = [
+ "id",
+ "workspace",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ ]
+
+ def validate(self, data):
+ if data.get("project_lead", None) is not None:
+ # Check if the project lead is a member of the workspace
+ if not WorkspaceMember.objects.filter(
+ workspace_id=self.context["workspace_id"],
+ member_id=data.get("project_lead"),
+ ).exists():
+ raise serializers.ValidationError("Project lead should be a user in the workspace")
+
+ if data.get("default_assignee", None) is not None:
+ # Check if the default assignee is a member of the workspace
+ if not WorkspaceMember.objects.filter(
+ workspace_id=self.context["workspace_id"],
+ member_id=data.get("default_assignee"),
+ ).exists():
+ raise serializers.ValidationError("Default assignee should be a user in the workspace")
+
+ return data
+
+ def create(self, validated_data):
+ identifier = validated_data.get("identifier", "").strip().upper()
+ if identifier == "":
+ raise serializers.ValidationError(detail="Project Identifier is required")
+
+ if ProjectIdentifier.objects.filter(name=identifier, workspace_id=self.context["workspace_id"]).exists():
+ raise serializers.ValidationError(detail="Project Identifier is taken")
+
+ project = Project.objects.create(**validated_data, workspace_id=self.context["workspace_id"])
+ return project
+
+
+class ProjectUpdateSerializer(ProjectCreateSerializer):
+ """
+ Serializer for updating projects with enhanced state and estimation management.
+
+ Extends project creation with update-specific validations including default state
+ assignment, estimation configuration, and project setting modifications.
+ """
+
+ class Meta(ProjectCreateSerializer.Meta):
+ model = Project
+ fields = ProjectCreateSerializer.Meta.fields + [
+ "default_state",
+ "estimate",
+ ]
+
+ read_only_fields = ProjectCreateSerializer.Meta.read_only_fields
+
+ def update(self, instance, validated_data):
+ """Update a project"""
+ if (
+ validated_data.get("default_state", None) is not None
+ and not State.objects.filter(project=instance, id=validated_data.get("default_state")).exists()
+ ):
+ # Check if the default state is a state in the project
+ raise serializers.ValidationError("Default state should be a state in the project")
+
+ if (
+ validated_data.get("estimate", None) is not None
+ and not Estimate.objects.filter(project=instance, id=validated_data.get("estimate")).exists()
+ ):
+ # Check if the estimate is a estimate in the project
+ raise serializers.ValidationError("Estimate should be a estimate in the project")
+ return super().update(instance, validated_data)
+
+
+class ProjectSerializer(BaseSerializer):
+ """
+ Comprehensive project serializer with metrics and member context.
+
+ Provides complete project data including member counts, cycle/module totals,
+ deployment status, and user-specific context for project management.
+ """
+
+ total_members = serializers.IntegerField(read_only=True)
+ total_cycles = serializers.IntegerField(read_only=True)
+ total_modules = serializers.IntegerField(read_only=True)
+ is_member = serializers.BooleanField(read_only=True)
+ sort_order = serializers.FloatField(read_only=True)
+ member_role = serializers.IntegerField(read_only=True)
+ is_deployed = serializers.BooleanField(read_only=True)
+ cover_image_url = serializers.CharField(read_only=True)
+
+ class Meta:
+ model = Project
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "emoji",
+ "workspace",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "deleted_at",
+ "cover_image_url",
+ ]
+
+ def validate(self, data):
+ # Check project lead should be a member of the workspace
+ if (
+ data.get("project_lead", None) is not None
+ and not WorkspaceMember.objects.filter(
+ workspace_id=self.context["workspace_id"],
+ member_id=data.get("project_lead"),
+ ).exists()
+ ):
+ raise serializers.ValidationError("Project lead should be a user in the workspace")
+
+ # Check default assignee should be a member of the workspace
+ if (
+ data.get("default_assignee", None) is not None
+ and not WorkspaceMember.objects.filter(
+ workspace_id=self.context["workspace_id"],
+ member_id=data.get("default_assignee"),
+ ).exists()
+ ):
+ raise serializers.ValidationError("Default assignee should be a user in the workspace")
+
+ # Validate description content for security
+ if "description_html" in data and data["description_html"]:
+ if isinstance(data["description_html"], dict):
+ is_valid, error_msg, sanitized_html = validate_html_content(str(data["description_html"]))
+ # Update the data with sanitized HTML if available
+ if sanitized_html is not None:
+ data["description_html"] = sanitized_html
+ if not is_valid:
+ raise serializers.ValidationError({"error": "html content is not valid"})
+
+ return data
+
+ def create(self, validated_data):
+ identifier = validated_data.get("identifier", "").strip().upper()
+ if identifier == "":
+ raise serializers.ValidationError(detail="Project Identifier is required")
+
+ if ProjectIdentifier.objects.filter(name=identifier, workspace_id=self.context["workspace_id"]).exists():
+ raise serializers.ValidationError(detail="Project Identifier is taken")
+
+ project = Project.objects.create(**validated_data, workspace_id=self.context["workspace_id"])
+ _ = ProjectIdentifier.objects.create(
+ name=project.identifier,
+ project=project,
+ workspace_id=self.context["workspace_id"],
+ )
+ return project
+
+
+class ProjectLiteSerializer(BaseSerializer):
+ """
+ Lightweight project serializer for minimal data transfer.
+
+ Provides essential project information including identifiers, visual properties,
+ and basic metadata optimized for list views and references.
+ """
+
+ cover_image_url = serializers.CharField(read_only=True)
+
+ class Meta:
+ model = Project
+ fields = [
+ "id",
+ "identifier",
+ "name",
+ "cover_image",
+ "icon_prop",
+ "emoji",
+ "description",
+ "cover_image_url",
+ ]
+ read_only_fields = fields
diff --git a/apps/api/plane/api/serializers/state.py b/apps/api/plane/api/serializers/state.py
new file mode 100644
index 00000000..fc6aac15
--- /dev/null
+++ b/apps/api/plane/api/serializers/state.py
@@ -0,0 +1,47 @@
+# Module imports
+from .base import BaseSerializer
+from plane.db.models import State
+
+
+class StateSerializer(BaseSerializer):
+ """
+ Serializer for work item states with default state management.
+
+ Handles state creation and updates including default state validation
+ and automatic default state switching for workflow management.
+ """
+
+ def validate(self, data):
+ # If the default is being provided then make all other states default False
+ if data.get("default", False):
+ State.objects.filter(project_id=self.context.get("project_id")).update(default=False)
+ return data
+
+ class Meta:
+ model = State
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "workspace",
+ "project",
+ "deleted_at",
+ "slug",
+ ]
+
+
+class StateLiteSerializer(BaseSerializer):
+ """
+ Lightweight state serializer for minimal data transfer.
+
+ Provides essential state information including visual properties
+ and grouping data optimized for UI display and filtering.
+ """
+
+ class Meta:
+ model = State
+ fields = ["id", "name", "color", "group"]
+ read_only_fields = fields
diff --git a/apps/api/plane/api/serializers/user.py b/apps/api/plane/api/serializers/user.py
new file mode 100644
index 00000000..805eb9fe
--- /dev/null
+++ b/apps/api/plane/api/serializers/user.py
@@ -0,0 +1,34 @@
+from rest_framework import serializers
+
+# Module imports
+from plane.db.models import User
+
+from .base import BaseSerializer
+
+
+class UserLiteSerializer(BaseSerializer):
+ """
+ Lightweight user serializer for minimal data transfer.
+
+ Provides essential user information including names, avatar, and contact details
+ optimized for member lists, assignee displays, and user references.
+ """
+
+ avatar_url = serializers.CharField(
+ help_text="Avatar URL",
+ read_only=True,
+ )
+
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "first_name",
+ "last_name",
+ "email",
+ "avatar",
+ "avatar_url",
+ "display_name",
+ "email",
+ ]
+ read_only_fields = fields
diff --git a/apps/api/plane/api/serializers/workspace.py b/apps/api/plane/api/serializers/workspace.py
new file mode 100644
index 00000000..e98683c2
--- /dev/null
+++ b/apps/api/plane/api/serializers/workspace.py
@@ -0,0 +1,17 @@
+# Module imports
+from plane.db.models import Workspace
+from .base import BaseSerializer
+
+
+class WorkspaceLiteSerializer(BaseSerializer):
+ """
+ Lightweight workspace serializer for minimal data transfer.
+
+ Provides essential workspace identifiers including name, slug, and ID
+ optimized for navigation, references, and performance-critical operations.
+ """
+
+ class Meta:
+ model = Workspace
+ fields = ["name", "slug", "id"]
+ read_only_fields = fields
diff --git a/apps/api/plane/api/urls/__init__.py b/apps/api/plane/api/urls/__init__.py
new file mode 100644
index 00000000..10cad206
--- /dev/null
+++ b/apps/api/plane/api/urls/__init__.py
@@ -0,0 +1,23 @@
+from .asset import urlpatterns as asset_patterns
+from .cycle import urlpatterns as cycle_patterns
+from .intake import urlpatterns as intake_patterns
+from .label import urlpatterns as label_patterns
+from .member import urlpatterns as member_patterns
+from .module import urlpatterns as module_patterns
+from .project import urlpatterns as project_patterns
+from .state import urlpatterns as state_patterns
+from .user import urlpatterns as user_patterns
+from .work_item import urlpatterns as work_item_patterns
+
+urlpatterns = [
+ *asset_patterns,
+ *cycle_patterns,
+ *intake_patterns,
+ *label_patterns,
+ *member_patterns,
+ *module_patterns,
+ *project_patterns,
+ *state_patterns,
+ *user_patterns,
+ *work_item_patterns,
+]
diff --git a/apps/api/plane/api/urls/asset.py b/apps/api/plane/api/urls/asset.py
new file mode 100644
index 00000000..5bdd4d91
--- /dev/null
+++ b/apps/api/plane/api/urls/asset.py
@@ -0,0 +1,40 @@
+from django.urls import path
+
+from plane.api.views import (
+ UserAssetEndpoint,
+ UserServerAssetEndpoint,
+ GenericAssetEndpoint,
+)
+
+urlpatterns = [
+ path(
+ "assets/user-assets/",
+ UserAssetEndpoint.as_view(http_method_names=["post"]),
+ name="user-assets",
+ ),
+ path(
+ "assets/user-assets//",
+ UserAssetEndpoint.as_view(http_method_names=["patch", "delete"]),
+ name="user-assets-detail",
+ ),
+ path(
+ "assets/user-assets/server/",
+ UserServerAssetEndpoint.as_view(http_method_names=["post"]),
+ name="user-server-assets",
+ ),
+ path(
+ "assets/user-assets//server/",
+ UserServerAssetEndpoint.as_view(http_method_names=["patch", "delete"]),
+ name="user-server-assets-detail",
+ ),
+ path(
+ "workspaces//assets/",
+ GenericAssetEndpoint.as_view(http_method_names=["post"]),
+ name="generic-asset",
+ ),
+ path(
+ "workspaces//assets//",
+ GenericAssetEndpoint.as_view(http_method_names=["get", "patch"]),
+ name="generic-asset-detail",
+ ),
+]
diff --git a/apps/api/plane/api/urls/cycle.py b/apps/api/plane/api/urls/cycle.py
new file mode 100644
index 00000000..bd7136aa
--- /dev/null
+++ b/apps/api/plane/api/urls/cycle.py
@@ -0,0 +1,53 @@
+from django.urls import path
+
+from plane.api.views.cycle import (
+ CycleListCreateAPIEndpoint,
+ CycleDetailAPIEndpoint,
+ CycleIssueListCreateAPIEndpoint,
+ CycleIssueDetailAPIEndpoint,
+ TransferCycleIssueAPIEndpoint,
+ CycleArchiveUnarchiveAPIEndpoint,
+)
+
+urlpatterns = [
+ path(
+ "workspaces//projects//cycles/",
+ CycleListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="cycles",
+ ),
+ path(
+ "workspaces//projects//cycles//",
+ CycleDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="cycles",
+ ),
+ path(
+ "workspaces//projects//cycles//cycle-issues/",
+ CycleIssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="cycle-issues",
+ ),
+ path(
+ "workspaces//projects//cycles//cycle-issues//",
+ CycleIssueDetailAPIEndpoint.as_view(http_method_names=["get", "delete"]),
+ name="cycle-issues",
+ ),
+ path(
+ "workspaces//projects//cycles//transfer-issues/",
+ TransferCycleIssueAPIEndpoint.as_view(http_method_names=["post"]),
+ name="transfer-issues",
+ ),
+ path(
+ "workspaces//projects//cycles//archive/",
+ CycleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post"]),
+ name="cycle-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-cycles/",
+ CycleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["get"]),
+ name="cycle-archive-unarchive",
+ ),
+ path(
+ "workspaces//projects//archived-cycles//unarchive/",
+ CycleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["delete"]),
+ name="cycle-archive-unarchive",
+ ),
+]
diff --git a/apps/api/plane/api/urls/intake.py b/apps/api/plane/api/urls/intake.py
new file mode 100644
index 00000000..5538467a
--- /dev/null
+++ b/apps/api/plane/api/urls/intake.py
@@ -0,0 +1,20 @@
+from django.urls import path
+
+from plane.api.views import (
+ IntakeIssueListCreateAPIEndpoint,
+ IntakeIssueDetailAPIEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//intake-issues/",
+ IntakeIssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="intake-issue",
+ ),
+ path(
+ "workspaces//projects//intake-issues//",
+ IntakeIssueDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="intake-issue",
+ ),
+]
diff --git a/apps/api/plane/api/urls/label.py b/apps/api/plane/api/urls/label.py
new file mode 100644
index 00000000..f7ee57b1
--- /dev/null
+++ b/apps/api/plane/api/urls/label.py
@@ -0,0 +1,17 @@
+from django.urls import path
+
+from plane.api.views import LabelListCreateAPIEndpoint, LabelDetailAPIEndpoint
+
+
+urlpatterns = [
+ path(
+ "workspaces//projects//labels/",
+ LabelListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="label",
+ ),
+ path(
+ "workspaces//projects//labels//",
+ LabelDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="label",
+ ),
+]
diff --git a/apps/api/plane/api/urls/member.py b/apps/api/plane/api/urls/member.py
new file mode 100644
index 00000000..a2b331ea
--- /dev/null
+++ b/apps/api/plane/api/urls/member.py
@@ -0,0 +1,16 @@
+from django.urls import path
+
+from plane.api.views import ProjectMemberAPIEndpoint, WorkspaceMemberAPIEndpoint
+
+urlpatterns = [
+ path(
+ "workspaces//projects//members/",
+ ProjectMemberAPIEndpoint.as_view(http_method_names=["get"]),
+ name="project-members",
+ ),
+ path(
+ "workspaces//members/",
+ WorkspaceMemberAPIEndpoint.as_view(http_method_names=["get"]),
+ name="workspace-members",
+ ),
+]
diff --git a/apps/api/plane/api/urls/module.py b/apps/api/plane/api/urls/module.py
new file mode 100644
index 00000000..578f5c86
--- /dev/null
+++ b/apps/api/plane/api/urls/module.py
@@ -0,0 +1,47 @@
+from django.urls import path
+
+from plane.api.views import (
+ ModuleListCreateAPIEndpoint,
+ ModuleDetailAPIEndpoint,
+ ModuleIssueListCreateAPIEndpoint,
+ ModuleIssueDetailAPIEndpoint,
+ ModuleArchiveUnarchiveAPIEndpoint,
+)
+
+urlpatterns = [
+ path(
+ "workspaces//projects//modules/",
+ ModuleListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="modules",
+ ),
+ path(
+ "workspaces//projects//modules//",
+ ModuleDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="modules-detail",
+ ),
+ path(
+ "workspaces//projects//modules//module-issues/",
+ ModuleIssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="module-issues",
+ ),
+ path(
+ "workspaces//projects//modules//module-issues//",
+ ModuleIssueDetailAPIEndpoint.as_view(http_method_names=["delete"]),
+ name="module-issues-detail",
+ ),
+ path(
+ "workspaces//projects//modules//archive/",
+ ModuleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post"]),
+ name="module-archive",
+ ),
+ path(
+ "workspaces//projects//archived-modules/",
+ ModuleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["get"]),
+ name="module-archive-list",
+ ),
+ path(
+ "workspaces//projects//archived-modules//unarchive/",
+ ModuleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["delete"]),
+ name="module-unarchive",
+ ),
+]
diff --git a/apps/api/plane/api/urls/project.py b/apps/api/plane/api/urls/project.py
new file mode 100644
index 00000000..9cf9291a
--- /dev/null
+++ b/apps/api/plane/api/urls/project.py
@@ -0,0 +1,25 @@
+from django.urls import path
+
+from plane.api.views import (
+ ProjectListCreateAPIEndpoint,
+ ProjectDetailAPIEndpoint,
+ ProjectArchiveUnarchiveAPIEndpoint,
+)
+
+urlpatterns = [
+ path(
+ "workspaces//projects/",
+ ProjectListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="project",
+ ),
+ path(
+ "workspaces//projects//",
+ ProjectDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="project",
+ ),
+ path(
+ "workspaces//projects//archive/",
+ ProjectArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post", "delete"]),
+ name="project-archive-unarchive",
+ ),
+]
diff --git a/apps/api/plane/api/urls/schema.py b/apps/api/plane/api/urls/schema.py
new file mode 100644
index 00000000..781dbe9d
--- /dev/null
+++ b/apps/api/plane/api/urls/schema.py
@@ -0,0 +1,20 @@
+from drf_spectacular.views import (
+ SpectacularAPIView,
+ SpectacularRedocView,
+ SpectacularSwaggerView,
+)
+from django.urls import path
+
+urlpatterns = [
+ path("schema/", SpectacularAPIView.as_view(), name="schema"),
+ path(
+ "schema/swagger-ui/",
+ SpectacularSwaggerView.as_view(url_name="schema"),
+ name="swagger-ui",
+ ),
+ path(
+ "schema/redoc/",
+ SpectacularRedocView.as_view(url_name="schema"),
+ name="redoc",
+ ),
+]
diff --git a/apps/api/plane/api/urls/state.py b/apps/api/plane/api/urls/state.py
new file mode 100644
index 00000000..e35012a2
--- /dev/null
+++ b/apps/api/plane/api/urls/state.py
@@ -0,0 +1,19 @@
+from django.urls import path
+
+from plane.api.views import (
+ StateListCreateAPIEndpoint,
+ StateDetailAPIEndpoint,
+)
+
+urlpatterns = [
+ path(
+ "workspaces//projects//states/",
+ StateListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="states",
+ ),
+ path(
+ "workspaces//projects//states//",
+ StateDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="states",
+ ),
+]
diff --git a/apps/api/plane/api/urls/user.py b/apps/api/plane/api/urls/user.py
new file mode 100644
index 00000000..461b0833
--- /dev/null
+++ b/apps/api/plane/api/urls/user.py
@@ -0,0 +1,11 @@
+from django.urls import path
+
+from plane.api.views import UserEndpoint
+
+urlpatterns = [
+ path(
+ "users/me/",
+ UserEndpoint.as_view(http_method_names=["get"]),
+ name="users",
+ ),
+]
diff --git a/apps/api/plane/api/urls/work_item.py b/apps/api/plane/api/urls/work_item.py
new file mode 100644
index 00000000..7207df95
--- /dev/null
+++ b/apps/api/plane/api/urls/work_item.py
@@ -0,0 +1,146 @@
+from django.urls import path
+
+from plane.api.views import (
+ IssueListCreateAPIEndpoint,
+ IssueDetailAPIEndpoint,
+ IssueLinkListCreateAPIEndpoint,
+ IssueLinkDetailAPIEndpoint,
+ IssueCommentListCreateAPIEndpoint,
+ IssueCommentDetailAPIEndpoint,
+ IssueActivityListAPIEndpoint,
+ IssueActivityDetailAPIEndpoint,
+ IssueAttachmentListCreateAPIEndpoint,
+ IssueAttachmentDetailAPIEndpoint,
+ WorkspaceIssueAPIEndpoint,
+ IssueSearchEndpoint,
+)
+
+# Deprecated url patterns
+old_url_patterns = [
+ path(
+ "workspaces//issues/search/",
+ IssueSearchEndpoint.as_view(http_method_names=["get"]),
+ name="issue-search",
+ ),
+ path(
+ "workspaces//issues/-/",
+ WorkspaceIssueAPIEndpoint.as_view(http_method_names=["get"]),
+ name="issue-by-identifier",
+ ),
+ path(
+ "workspaces//projects//issues/",
+ IssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="issue",
+ ),
+ path(
+ "workspaces//projects//issues//",
+ IssueDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="issue",
+ ),
+ path(
+ "workspaces//projects//issues//links/",
+ IssueLinkListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="link",
+ ),
+ path(
+ "workspaces//projects//issues//links//",
+ IssueLinkDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="link",
+ ),
+ path(
+ "workspaces//projects//issues//comments/",
+ IssueCommentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="comment",
+ ),
+ path(
+ "workspaces//projects//issues//comments//",
+ IssueCommentDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="comment",
+ ),
+ path(
+ "workspaces//projects//issues//activities/",
+ IssueActivityListAPIEndpoint.as_view(http_method_names=["get"]),
+ name="activity",
+ ),
+ path(
+ "workspaces//projects//issues//activities//",
+ IssueActivityDetailAPIEndpoint.as_view(http_method_names=["get"]),
+ name="activity",
+ ),
+ path(
+ "workspaces//projects//issues//issue-attachments/",
+ IssueAttachmentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="attachment",
+ ),
+ path(
+ "workspaces//projects//issues//issue-attachments//",
+ IssueAttachmentDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="issue-attachment",
+ ),
+]
+
+# New url patterns with work-items as the prefix
+new_url_patterns = [
+ path(
+ "workspaces//work-items/search/",
+ IssueSearchEndpoint.as_view(http_method_names=["get"]),
+ name="work-item-search",
+ ),
+ path(
+ "workspaces//work-items/-/",
+ WorkspaceIssueAPIEndpoint.as_view(http_method_names=["get"]),
+ name="work-item-by-identifier",
+ ),
+ path(
+ "workspaces//projects//work-items/",
+ IssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="work-item-list",
+ ),
+ path(
+ "workspaces//projects//work-items//",
+ IssueDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="work-item-detail",
+ ),
+ path(
+ "workspaces//projects//work-items//links/",
+ IssueLinkListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="work-item-link-list",
+ ),
+ path(
+ "workspaces//projects//work-items//links//",
+ IssueLinkDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="work-item-link-detail",
+ ),
+ path(
+ "workspaces//projects//work-items//comments/",
+ IssueCommentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="work-item-comment-list",
+ ),
+ path(
+ "workspaces//projects//work-items//comments//",
+ IssueCommentDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="work-item-comment-detail",
+ ),
+ path(
+ "workspaces//projects//work-items//activities/",
+ IssueActivityListAPIEndpoint.as_view(http_method_names=["get"]),
+ name="work-item-activity-list",
+ ),
+ path(
+ "workspaces//projects//work-items//activities//",
+ IssueActivityDetailAPIEndpoint.as_view(http_method_names=["get"]),
+ name="work-item-activity-detail",
+ ),
+ path(
+ "workspaces//projects//work-items//attachments/",
+ IssueAttachmentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
+ name="work-item-attachment-list",
+ ),
+ path(
+ "workspaces//projects//work-items//attachments//",
+ IssueAttachmentDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
+ name="work-item-attachment-detail",
+ ),
+]
+
+urlpatterns = old_url_patterns + new_url_patterns
diff --git a/apps/api/plane/api/views/__init__.py b/apps/api/plane/api/views/__init__.py
new file mode 100644
index 00000000..8535d485
--- /dev/null
+++ b/apps/api/plane/api/views/__init__.py
@@ -0,0 +1,55 @@
+from .project import (
+ ProjectListCreateAPIEndpoint,
+ ProjectDetailAPIEndpoint,
+ ProjectArchiveUnarchiveAPIEndpoint,
+)
+
+from .state import (
+ StateListCreateAPIEndpoint,
+ StateDetailAPIEndpoint,
+)
+
+from .issue import (
+ WorkspaceIssueAPIEndpoint,
+ IssueListCreateAPIEndpoint,
+ IssueDetailAPIEndpoint,
+ LabelListCreateAPIEndpoint,
+ LabelDetailAPIEndpoint,
+ IssueLinkListCreateAPIEndpoint,
+ IssueLinkDetailAPIEndpoint,
+ IssueCommentListCreateAPIEndpoint,
+ IssueCommentDetailAPIEndpoint,
+ IssueActivityListAPIEndpoint,
+ IssueActivityDetailAPIEndpoint,
+ IssueAttachmentListCreateAPIEndpoint,
+ IssueAttachmentDetailAPIEndpoint,
+ IssueSearchEndpoint,
+)
+
+from .cycle import (
+ CycleListCreateAPIEndpoint,
+ CycleDetailAPIEndpoint,
+ CycleIssueListCreateAPIEndpoint,
+ CycleIssueDetailAPIEndpoint,
+ TransferCycleIssueAPIEndpoint,
+ CycleArchiveUnarchiveAPIEndpoint,
+)
+
+from .module import (
+ ModuleListCreateAPIEndpoint,
+ ModuleDetailAPIEndpoint,
+ ModuleIssueListCreateAPIEndpoint,
+ ModuleIssueDetailAPIEndpoint,
+ ModuleArchiveUnarchiveAPIEndpoint,
+)
+
+from .member import ProjectMemberAPIEndpoint, WorkspaceMemberAPIEndpoint
+
+from .intake import (
+ IntakeIssueListCreateAPIEndpoint,
+ IntakeIssueDetailAPIEndpoint,
+)
+
+from .asset import UserAssetEndpoint, UserServerAssetEndpoint, GenericAssetEndpoint
+
+from .user import UserEndpoint
diff --git a/apps/api/plane/api/views/asset.py b/apps/api/plane/api/views/asset.py
new file mode 100644
index 00000000..a91ebc88
--- /dev/null
+++ b/apps/api/plane/api/views/asset.py
@@ -0,0 +1,613 @@
+# Python Imports
+import uuid
+
+# Django Imports
+from django.utils import timezone
+from django.conf import settings
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from drf_spectacular.utils import OpenApiExample, OpenApiRequest
+
+# Module Imports
+from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
+from plane.settings.storage import S3Storage
+from plane.db.models import FileAsset, User, Workspace
+from plane.api.views.base import BaseAPIView
+from plane.api.serializers import (
+ UserAssetUploadSerializer,
+ AssetUpdateSerializer,
+ GenericAssetUploadSerializer,
+ GenericAssetUpdateSerializer,
+)
+from plane.utils.openapi import (
+ ASSET_ID_PARAMETER,
+ WORKSPACE_SLUG_PARAMETER,
+ PRESIGNED_URL_SUCCESS_RESPONSE,
+ GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE,
+ GENERIC_ASSET_VALIDATION_ERROR_RESPONSE,
+ ASSET_CONFLICT_RESPONSE,
+ ASSET_DOWNLOAD_SUCCESS_RESPONSE,
+ ASSET_DOWNLOAD_ERROR_RESPONSE,
+ ASSET_UPDATED_RESPONSE,
+ ASSET_DELETED_RESPONSE,
+ VALIDATION_ERROR_RESPONSE,
+ ASSET_NOT_FOUND_RESPONSE,
+ NOT_FOUND_RESPONSE,
+ UNAUTHORIZED_RESPONSE,
+ asset_docs,
+)
+from plane.utils.exception_logger import log_exception
+
+
+class UserAssetEndpoint(BaseAPIView):
+ """This endpoint is used to upload user profile images."""
+
+ def asset_delete(self, asset_id):
+ asset = FileAsset.objects.filter(id=asset_id).first()
+ if asset is None:
+ return
+ asset.is_deleted = True
+ asset.deleted_at = timezone.now()
+ asset.save(update_fields=["is_deleted", "deleted_at"])
+ return
+
+ def entity_asset_delete(self, entity_type, asset, request):
+ # User Avatar
+ if entity_type == FileAsset.EntityTypeContext.USER_AVATAR:
+ user = User.objects.get(id=asset.user_id)
+ user.avatar_asset_id = None
+ user.save()
+ return
+ # User Cover
+ if entity_type == FileAsset.EntityTypeContext.USER_COVER:
+ user = User.objects.get(id=asset.user_id)
+ user.cover_image_asset_id = None
+ user.save()
+ return
+ return
+
+ @asset_docs(
+ operation_id="create_user_asset_upload",
+ summary="Generate presigned URL for user asset upload",
+ description="Generate presigned URL for user asset upload",
+ request=OpenApiRequest(
+ request=UserAssetUploadSerializer,
+ examples=[
+ OpenApiExample(
+ "User Avatar Upload",
+ value={
+ "name": "profile.jpg",
+ "type": "image/jpeg",
+ "size": 1024000,
+ "entity_type": "USER_AVATAR",
+ },
+ description="Example request for uploading a user avatar",
+ ),
+ OpenApiExample(
+ "User Cover Upload",
+ value={
+ "name": "cover.jpg",
+ "type": "image/jpeg",
+ "size": 1024000,
+ "entity_type": "USER_COVER",
+ },
+ description="Example request for uploading a user cover",
+ ),
+ ],
+ ),
+ responses={
+ 200: PRESIGNED_URL_SUCCESS_RESPONSE,
+ 400: VALIDATION_ERROR_RESPONSE,
+ 401: UNAUTHORIZED_RESPONSE,
+ },
+ )
+ def post(self, request):
+ """Generate presigned URL for user asset upload.
+
+ Create a presigned URL for uploading user profile assets (avatar or cover image).
+ This endpoint generates the necessary credentials for direct S3 upload.
+ """
+ # get the asset key
+ name = request.data.get("name")
+ type = request.data.get("type", "image/jpeg")
+ size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
+ entity_type = request.data.get("entity_type", False)
+
+ # Check if the file size is within the limit
+ size_limit = min(size, settings.FILE_SIZE_LIMIT)
+
+ # Check if the entity type is allowed
+ if not entity_type or entity_type not in ["USER_AVATAR", "USER_COVER"]:
+ return Response(
+ {"error": "Invalid entity type.", "status": False},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Check if the file type is allowed
+ allowed_types = [
+ "image/jpeg",
+ "image/png",
+ "image/webp",
+ "image/jpg",
+ "image/gif",
+ ]
+ if type not in allowed_types:
+ return Response(
+ {
+ "error": "Invalid file type. Only JPEG and PNG files are allowed.",
+ "status": False,
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # asset key
+ asset_key = f"{uuid.uuid4().hex}-{name}"
+
+ # Create a File Asset
+ asset = FileAsset.objects.create(
+ attributes={"name": name, "type": type, "size": size_limit},
+ asset=asset_key,
+ size=size_limit,
+ user=request.user,
+ created_by=request.user,
+ entity_type=entity_type,
+ )
+
+ # Get the presigned URL
+ storage = S3Storage(request=request)
+ # Generate a presigned URL to share an S3 object
+ presigned_url = storage.generate_presigned_post(object_name=asset_key, file_type=type, file_size=size_limit)
+ # Return the presigned URL
+ return Response(
+ {
+ "upload_data": presigned_url,
+ "asset_id": str(asset.id),
+ "asset_url": asset.asset_url,
+ },
+ status=status.HTTP_200_OK,
+ )
+
+ @asset_docs(
+ operation_id="update_user_asset",
+ summary="Mark user asset as uploaded",
+ description="Mark user asset as uploaded",
+ parameters=[ASSET_ID_PARAMETER],
+ request=OpenApiRequest(
+ request=AssetUpdateSerializer,
+ examples=[
+ OpenApiExample(
+ "Update Asset Attributes",
+ value={
+ "attributes": {
+ "name": "updated_profile.jpg",
+ "type": "image/jpeg",
+ "size": 1024000,
+ },
+ "entity_type": "USER_AVATAR",
+ },
+ description="Example request for updating asset attributes",
+ ),
+ ],
+ ),
+ responses={
+ 204: ASSET_UPDATED_RESPONSE,
+ 404: NOT_FOUND_RESPONSE,
+ },
+ )
+ def patch(self, request, asset_id):
+ """Update user asset after upload completion.
+
+ Update the asset status and attributes after the file has been uploaded to S3.
+ This endpoint should be called after completing the S3 upload to mark the asset as uploaded.
+ """
+ # get the asset id
+ asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
+ # get the storage metadata
+ asset.is_uploaded = True
+ # get the storage metadata
+ if not asset.storage_metadata:
+ get_asset_object_metadata.delay(asset_id=str(asset_id))
+ # update the attributes
+ asset.attributes = request.data.get("attributes", asset.attributes)
+ # save the asset
+ asset.save(update_fields=["is_uploaded", "attributes"])
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ @asset_docs(
+ operation_id="delete_user_asset",
+ summary="Delete user asset",
+ parameters=[ASSET_ID_PARAMETER],
+ responses={
+ 204: ASSET_DELETED_RESPONSE,
+ 404: NOT_FOUND_RESPONSE,
+ },
+ )
+ def delete(self, request, asset_id):
+ """Delete user asset.
+
+ Delete a user profile asset (avatar or cover image) and remove its reference from the user profile.
+ This performs a soft delete by marking the asset as deleted and updating the user's profile.
+ """
+ asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
+ asset.is_deleted = True
+ asset.deleted_at = timezone.now()
+ # get the entity and save the asset id for the request field
+ self.entity_asset_delete(entity_type=asset.entity_type, asset=asset, request=request)
+ asset.save(update_fields=["is_deleted", "deleted_at"])
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class UserServerAssetEndpoint(BaseAPIView):
+ """This endpoint is used to upload user profile images."""
+
+ def asset_delete(self, asset_id):
+ asset = FileAsset.objects.filter(id=asset_id).first()
+ if asset is None:
+ return
+ asset.is_deleted = True
+ asset.deleted_at = timezone.now()
+ asset.save(update_fields=["is_deleted", "deleted_at"])
+ return
+
+ def entity_asset_delete(self, entity_type, asset, request):
+ # User Avatar
+ if entity_type == FileAsset.EntityTypeContext.USER_AVATAR:
+ user = User.objects.get(id=asset.user_id)
+ user.avatar_asset_id = None
+ user.save()
+ return
+ # User Cover
+ if entity_type == FileAsset.EntityTypeContext.USER_COVER:
+ user = User.objects.get(id=asset.user_id)
+ user.cover_image_asset_id = None
+ user.save()
+ return
+ return
+
+ @asset_docs(
+ operation_id="create_user_server_asset_upload",
+ summary="Generate presigned URL for user server asset upload",
+ request=UserAssetUploadSerializer,
+ responses={
+ 200: PRESIGNED_URL_SUCCESS_RESPONSE,
+ 400: VALIDATION_ERROR_RESPONSE,
+ },
+ )
+ def post(self, request):
+ """Generate presigned URL for user server asset upload.
+
+ Create a presigned URL for uploading user profile assets
+ (avatar or cover image) using server credentials. This endpoint generates the
+ necessary credentials for direct S3 upload with server-side authentication.
+ """
+ # get the asset key
+ name = request.data.get("name")
+ type = request.data.get("type", "image/jpeg")
+ size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
+ entity_type = request.data.get("entity_type", False)
+
+ # Check if the file size is within the limit
+ size_limit = min(size, settings.FILE_SIZE_LIMIT)
+
+ # Check if the entity type is allowed
+ if not entity_type or entity_type not in ["USER_AVATAR", "USER_COVER"]:
+ return Response(
+ {"error": "Invalid entity type.", "status": False},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Check if the file type is allowed
+ allowed_types = [
+ "image/jpeg",
+ "image/png",
+ "image/webp",
+ "image/jpg",
+ "image/gif",
+ ]
+ if type not in allowed_types:
+ return Response(
+ {
+ "error": "Invalid file type. Only JPEG and PNG files are allowed.",
+ "status": False,
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # asset key
+ asset_key = f"{uuid.uuid4().hex}-{name}"
+
+ # Create a File Asset
+ asset = FileAsset.objects.create(
+ attributes={"name": name, "type": type, "size": size_limit},
+ asset=asset_key,
+ size=size_limit,
+ user=request.user,
+ created_by=request.user,
+ entity_type=entity_type,
+ )
+
+ # Get the presigned URL
+ storage = S3Storage(request=request, is_server=True)
+ # Generate a presigned URL to share an S3 object
+ presigned_url = storage.generate_presigned_post(object_name=asset_key, file_type=type, file_size=size_limit)
+ # Return the presigned URL
+ return Response(
+ {
+ "upload_data": presigned_url,
+ "asset_id": str(asset.id),
+ "asset_url": asset.asset_url,
+ },
+ status=status.HTTP_200_OK,
+ )
+
+ @asset_docs(
+ operation_id="update_user_server_asset",
+ summary="Mark user server asset as uploaded",
+ parameters=[ASSET_ID_PARAMETER],
+ request=AssetUpdateSerializer,
+ responses={
+ 204: ASSET_UPDATED_RESPONSE,
+ 404: NOT_FOUND_RESPONSE,
+ },
+ )
+ def patch(self, request, asset_id):
+ """Update user server asset after upload completion.
+
+ Update the asset status and attributes after the file has been uploaded to S3 using server credentials.
+ This endpoint should be called after completing the S3 upload to mark the asset as uploaded.
+ """
+ # get the asset id
+ asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
+ # get the storage metadata
+ asset.is_uploaded = True
+ # get the storage metadata
+ if not asset.storage_metadata:
+ get_asset_object_metadata.delay(asset_id=str(asset_id))
+ # update the attributes
+ asset.attributes = request.data.get("attributes", asset.attributes)
+ # save the asset
+ asset.save(update_fields=["is_uploaded", "attributes"])
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ @asset_docs(
+ operation_id="delete_user_server_asset",
+ summary="Delete user server asset",
+ parameters=[ASSET_ID_PARAMETER],
+ responses={
+ 204: ASSET_DELETED_RESPONSE,
+ 404: NOT_FOUND_RESPONSE,
+ },
+ )
+ def delete(self, request, asset_id):
+ """Delete user server asset.
+
+ Delete a user profile asset (avatar or cover image) using server credentials and
+ remove its reference from the user profile. This performs a soft delete by marking the
+ asset as deleted and updating the user's profile.
+ """
+ asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
+ asset.is_deleted = True
+ asset.deleted_at = timezone.now()
+ # get the entity and save the asset id for the request field
+ self.entity_asset_delete(entity_type=asset.entity_type, asset=asset, request=request)
+ asset.save(update_fields=["is_deleted", "deleted_at"])
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class GenericAssetEndpoint(BaseAPIView):
+ """This endpoint is used to upload generic assets that can be later bound to entities."""
+
+ use_read_replica = True
+
+ @asset_docs(
+ operation_id="get_generic_asset",
+ summary="Get presigned URL for asset download",
+ description="Get presigned URL for asset download",
+ parameters=[WORKSPACE_SLUG_PARAMETER],
+ responses={
+ 200: ASSET_DOWNLOAD_SUCCESS_RESPONSE,
+ 400: ASSET_DOWNLOAD_ERROR_RESPONSE,
+ 404: ASSET_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, asset_id):
+ """Get presigned URL for asset download.
+
+ Generate a presigned URL for downloading a generic asset.
+ The asset must be uploaded and associated with the specified workspace.
+ """
+ try:
+ # Get the workspace
+ workspace = Workspace.objects.get(slug=slug)
+
+ # Get the asset
+ asset = FileAsset.objects.get(id=asset_id, workspace_id=workspace.id, is_deleted=False)
+
+ # Check if the asset exists and is uploaded
+ if not asset.is_uploaded:
+ return Response(
+ {"error": "Asset not yet uploaded"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Generate presigned URL for GET
+ storage = S3Storage(request=request, is_server=True)
+ presigned_url = storage.generate_presigned_url(
+ object_name=asset.asset.name, filename=asset.attributes.get("name")
+ )
+
+ return Response(
+ {
+ "asset_id": str(asset.id),
+ "asset_url": presigned_url,
+ "asset_name": asset.attributes.get("name", ""),
+ "asset_type": asset.attributes.get("type", ""),
+ },
+ status=status.HTTP_200_OK,
+ )
+
+ except Workspace.DoesNotExist:
+ return Response({"error": "Workspace not found"}, status=status.HTTP_404_NOT_FOUND)
+ except FileAsset.DoesNotExist:
+ return Response({"error": "Asset not found"}, status=status.HTTP_404_NOT_FOUND)
+ except Exception as e:
+ log_exception(e)
+ return Response(
+ {"error": "Internal server error"},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
+
+ @asset_docs(
+ operation_id="create_generic_asset_upload",
+ summary="Generate presigned URL for generic asset upload",
+ description="Generate presigned URL for generic asset upload",
+ parameters=[WORKSPACE_SLUG_PARAMETER],
+ request=OpenApiRequest(
+ request=GenericAssetUploadSerializer,
+ examples=[
+ OpenApiExample(
+ "GenericAssetUploadSerializer",
+ value={
+ "name": "image.jpg",
+ "type": "image/jpeg",
+ "size": 1024000,
+ "project_id": "123e4567-e89b-12d3-a456-426614174000",
+ "external_id": "1234567890",
+ "external_source": "github",
+ },
+ description="Example request for uploading a generic asset",
+ ),
+ ],
+ ),
+ responses={
+ 200: GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE,
+ 400: GENERIC_ASSET_VALIDATION_ERROR_RESPONSE,
+ 404: NOT_FOUND_RESPONSE,
+ 409: ASSET_CONFLICT_RESPONSE,
+ },
+ )
+ def post(self, request, slug):
+ """Generate presigned URL for generic asset upload.
+
+ Create a presigned URL for uploading generic assets that can be bound to entities like work items.
+ Supports various file types and includes external source tracking for integrations.
+ """
+ name = request.data.get("name")
+ type = request.data.get("type")
+ size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
+ project_id = request.data.get("project_id")
+ external_id = request.data.get("external_id")
+ external_source = request.data.get("external_source")
+
+ # Check if the request is valid
+ if not name or not size:
+ return Response(
+ {"error": "Name and size are required fields.", "status": False},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Check if the file size is within the limit
+ size_limit = min(size, settings.FILE_SIZE_LIMIT)
+
+ # Check if the file type is allowed
+ if not type or type not in settings.ATTACHMENT_MIME_TYPES:
+ return Response(
+ {"error": "Invalid file type.", "status": False},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Get the workspace
+ workspace = Workspace.objects.get(slug=slug)
+
+ # asset key
+ asset_key = f"{workspace.id}/{uuid.uuid4().hex}-{name}"
+
+ # Check for existing asset with same external details if provided
+ if external_id and external_source:
+ existing_asset = FileAsset.objects.filter(
+ workspace__slug=slug,
+ external_source=external_source,
+ external_id=external_id,
+ is_deleted=False,
+ ).first()
+
+ if existing_asset:
+ return Response(
+ {
+ "message": "Asset with same external id and source already exists",
+ "asset_id": str(existing_asset.id),
+ "asset_url": existing_asset.asset_url,
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ # Create a File Asset
+ asset = FileAsset.objects.create(
+ attributes={"name": name, "type": type, "size": size_limit},
+ asset=asset_key,
+ size=size_limit,
+ workspace_id=workspace.id,
+ project_id=project_id,
+ created_by=request.user,
+ external_id=external_id,
+ external_source=external_source,
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, # Using ISSUE_ATTACHMENT since we'll bind it to issues # noqa: E501
+ )
+
+ # Get the presigned URL
+ storage = S3Storage(request=request, is_server=True)
+ presigned_url = storage.generate_presigned_post(object_name=asset_key, file_type=type, file_size=size_limit)
+
+ return Response(
+ {
+ "upload_data": presigned_url,
+ "asset_id": str(asset.id),
+ "asset_url": asset.asset_url,
+ },
+ status=status.HTTP_200_OK,
+ )
+
+ @asset_docs(
+ operation_id="update_generic_asset",
+ summary="Update generic asset after upload completion",
+ description="Update generic asset after upload completion",
+ parameters=[WORKSPACE_SLUG_PARAMETER, ASSET_ID_PARAMETER],
+ request=OpenApiRequest(
+ request=GenericAssetUpdateSerializer,
+ examples=[
+ OpenApiExample(
+ "GenericAssetUpdateSerializer",
+ value={"is_uploaded": True},
+ description="Example request for updating a generic asset",
+ )
+ ],
+ ),
+ responses={
+ 204: ASSET_UPDATED_RESPONSE,
+ 404: ASSET_NOT_FOUND_RESPONSE,
+ },
+ )
+ def patch(self, request, slug, asset_id):
+ """Update generic asset after upload completion.
+
+ Update the asset status after the file has been uploaded to S3.
+ This endpoint should be called after completing the S3 upload to mark the asset as uploaded
+ and trigger metadata extraction.
+ """
+ try:
+ asset = FileAsset.objects.get(id=asset_id, workspace__slug=slug, is_deleted=False)
+
+ # Update is_uploaded status
+ asset.is_uploaded = request.data.get("is_uploaded", asset.is_uploaded)
+
+ # Update storage metadata if not present
+ if not asset.storage_metadata:
+ get_asset_object_metadata.delay(asset_id=str(asset_id))
+
+ asset.save(update_fields=["is_uploaded"])
+
+ return Response(status=status.HTTP_204_NO_CONTENT)
+ except FileAsset.DoesNotExist:
+ return Response({"error": "Asset not found"}, status=status.HTTP_404_NOT_FOUND)
diff --git a/apps/api/plane/api/views/base.py b/apps/api/plane/api/views/base.py
new file mode 100644
index 00000000..b3acbab3
--- /dev/null
+++ b/apps/api/plane/api/views/base.py
@@ -0,0 +1,154 @@
+# Python imports
+import zoneinfo
+
+# Django imports
+from django.conf import settings
+from django.core.exceptions import ObjectDoesNotExist, ValidationError
+from django.db import IntegrityError
+from django.urls import resolve
+from django.utils import timezone
+from plane.db.models.api import APIToken
+from rest_framework import status
+from rest_framework.permissions import IsAuthenticated
+from rest_framework.response import Response
+
+# Third party imports
+from rest_framework.generics import GenericAPIView
+
+# Module imports
+from plane.api.middleware.api_authentication import APIKeyAuthentication
+from plane.api.rate_limit import ApiKeyRateThrottle, ServiceTokenRateThrottle
+from plane.utils.exception_logger import log_exception
+from plane.utils.paginator import BasePaginator
+from plane.utils.core.mixins import ReadReplicaControlMixin
+
+
+class TimezoneMixin:
+ """
+ This enables timezone conversion according
+ to the user set timezone
+ """
+
+ def initial(self, request, *args, **kwargs):
+ super().initial(request, *args, **kwargs)
+ if request.user.is_authenticated:
+ timezone.activate(zoneinfo.ZoneInfo(request.user.user_timezone))
+ else:
+ timezone.deactivate()
+
+
+class BaseAPIView(TimezoneMixin, GenericAPIView, ReadReplicaControlMixin, BasePaginator):
+ authentication_classes = [APIKeyAuthentication]
+
+ permission_classes = [IsAuthenticated]
+
+ use_read_replica = False
+
+ def filter_queryset(self, queryset):
+ for backend in list(self.filter_backends):
+ queryset = backend().filter_queryset(self.request, queryset, self)
+ return queryset
+
+ def get_throttles(self):
+ throttle_classes = []
+ api_key = self.request.headers.get("X-Api-Key")
+
+ if api_key:
+ service_token = APIToken.objects.filter(token=api_key, is_service=True).first()
+
+ if service_token:
+ throttle_classes.append(ServiceTokenRateThrottle())
+ return throttle_classes
+
+ throttle_classes.append(ApiKeyRateThrottle())
+
+ return throttle_classes
+
+ def handle_exception(self, exc):
+ """
+ Handle any exception that occurs, by returning an appropriate response,
+ or re-raising the error.
+ """
+ try:
+ response = super().handle_exception(exc)
+ return response
+ except Exception as e:
+ if isinstance(e, IntegrityError):
+ return Response(
+ {"error": "The payload is not valid"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ if isinstance(e, ValidationError):
+ return Response(
+ {"error": "Please provide valid detail"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ if isinstance(e, ObjectDoesNotExist):
+ return Response(
+ {"error": "The requested resource does not exist."},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+
+ if isinstance(e, KeyError):
+ return Response(
+ {"error": "The required key does not exist."},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ log_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_500_INTERNAL_SERVER_ERROR,
+ )
+
+ def dispatch(self, request, *args, **kwargs):
+ try:
+ response = super().dispatch(request, *args, **kwargs)
+ if settings.DEBUG:
+ from django.db import connection
+
+ print(f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}")
+ return response
+ except Exception as exc:
+ response = self.handle_exception(exc)
+ return exc
+
+ def finalize_response(self, request, response, *args, **kwargs):
+ # Call super to get the default response
+ response = super().finalize_response(request, response, *args, **kwargs)
+
+ # Add custom headers if they exist in the request META
+ ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
+ if ratelimit_remaining is not None:
+ response["X-RateLimit-Remaining"] = ratelimit_remaining
+
+ ratelimit_reset = request.META.get("X-RateLimit-Reset")
+ if ratelimit_reset is not None:
+ response["X-RateLimit-Reset"] = ratelimit_reset
+
+ return response
+
+ @property
+ def workspace_slug(self):
+ return self.kwargs.get("slug", None)
+
+ @property
+ def project_id(self):
+ project_id = self.kwargs.get("project_id", None)
+ if project_id:
+ return project_id
+
+ if resolve(self.request.path_info).url_name == "project":
+ return self.kwargs.get("pk", None)
+
+ @property
+ def fields(self):
+ fields = [field for field in self.request.GET.get("fields", "").split(",") if field]
+ return fields if fields else None
+
+ @property
+ def expand(self):
+ expand = [expand for expand in self.request.GET.get("expand", "").split(",") if expand]
+ return expand if expand else None
diff --git a/apps/api/plane/api/views/cycle.py b/apps/api/plane/api/views/cycle.py
new file mode 100644
index 00000000..849dab34
--- /dev/null
+++ b/apps/api/plane/api/views/cycle.py
@@ -0,0 +1,1247 @@
+# Python imports
+import json
+
+# Django imports
+from django.core import serializers
+from django.utils import timezone
+from django.core.serializers.json import DjangoJSONEncoder
+from django.db.models import (
+ Count,
+ F,
+ Func,
+ OuterRef,
+ Q,
+ Sum,
+)
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from drf_spectacular.utils import OpenApiRequest, OpenApiResponse
+
+# Module imports
+from plane.api.serializers import (
+ CycleIssueSerializer,
+ CycleSerializer,
+ CycleIssueRequestSerializer,
+ TransferCycleIssueRequestSerializer,
+ CycleCreateSerializer,
+ CycleUpdateSerializer,
+ IssueSerializer,
+)
+from plane.app.permissions import ProjectEntityPermission
+from plane.bgtasks.issue_activities_task import issue_activity
+from plane.db.models import (
+ Cycle,
+ CycleIssue,
+ Issue,
+ Project,
+ FileAsset,
+ IssueLink,
+ ProjectMember,
+ UserFavorite,
+)
+from plane.utils.cycle_transfer_issues import transfer_cycle_issues
+from plane.utils.host import base_host
+from .base import BaseAPIView
+from plane.bgtasks.webhook_task import model_activity
+from plane.utils.openapi.decorators import cycle_docs
+from plane.utils.openapi import (
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ CYCLE_VIEW_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ create_paginated_response,
+ # Request Examples
+ CYCLE_CREATE_EXAMPLE,
+ CYCLE_UPDATE_EXAMPLE,
+ CYCLE_ISSUE_REQUEST_EXAMPLE,
+ TRANSFER_CYCLE_ISSUE_EXAMPLE,
+ # Response Examples
+ CYCLE_EXAMPLE,
+ CYCLE_ISSUE_EXAMPLE,
+ TRANSFER_CYCLE_ISSUE_SUCCESS_EXAMPLE,
+ TRANSFER_CYCLE_ISSUE_ERROR_EXAMPLE,
+ TRANSFER_CYCLE_COMPLETED_ERROR_EXAMPLE,
+ DELETED_RESPONSE,
+ ARCHIVED_RESPONSE,
+ CYCLE_CANNOT_ARCHIVE_RESPONSE,
+ UNARCHIVED_RESPONSE,
+ REQUIRED_FIELDS_RESPONSE,
+)
+
+
+class CycleListCreateAPIEndpoint(BaseAPIView):
+ """Cycle List and Create Endpoint"""
+
+ serializer_class = CycleSerializer
+ model = Cycle
+ webhook_event = "cycle"
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("owned_by")
+ .annotate(
+ total_issues=Count(
+ "issue_cycle",
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="cancelled",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="unstarted",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="backlog",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @cycle_docs(
+ operation_id="list_cycles",
+ summary="List cycles",
+ description="Retrieve all cycles in a project. Supports filtering by cycle status like current, upcoming, completed, or draft.", # noqa: E501
+ parameters=[
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ CYCLE_VIEW_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ CycleSerializer,
+ "PaginatedCycleResponse",
+ "Paginated list of cycles",
+ "Paginated Cycles",
+ ),
+ },
+ )
+ def get(self, request, slug, project_id):
+ """List cycles
+
+ Retrieve all cycles in a project.
+ Supports filtering by cycle status like current, upcoming, completed, or draft.
+ """
+ project = Project.objects.get(workspace__slug=slug, pk=project_id)
+ queryset = self.get_queryset().filter(archived_at__isnull=True)
+ cycle_view = request.GET.get("cycle_view", "all")
+
+ # Current Cycle
+ if cycle_view == "current":
+ queryset = queryset.filter(
+ start_date__lte=timezone.now(), end_date__gte=timezone.now()
+ )
+ data = CycleSerializer(
+ queryset,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ context={"project": project},
+ ).data
+ return Response(data, status=status.HTTP_200_OK)
+
+ # Upcoming Cycles
+ if cycle_view == "upcoming":
+ queryset = queryset.filter(start_date__gt=timezone.now())
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ context={"project": project},
+ ).data,
+ )
+
+ # Completed Cycles
+ if cycle_view == "completed":
+ queryset = queryset.filter(end_date__lt=timezone.now())
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ context={"project": project},
+ ).data,
+ )
+
+ # Draft Cycles
+ if cycle_view == "draft":
+ queryset = queryset.filter(end_date=None, start_date=None)
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ context={"project": project},
+ ).data,
+ )
+
+ # Incomplete Cycles
+ if cycle_view == "incomplete":
+ queryset = queryset.filter(
+ Q(end_date__gte=timezone.now()) | Q(end_date__isnull=True)
+ )
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ context={"project": project},
+ ).data,
+ )
+ return self.paginate(
+ request=request,
+ queryset=(queryset),
+ on_results=lambda cycles: CycleSerializer(
+ cycles,
+ many=True,
+ fields=self.fields,
+ expand=self.expand,
+ context={"project": project},
+ ).data,
+ )
+
+ @cycle_docs(
+ operation_id="create_cycle",
+ summary="Create cycle",
+ description="Create a new development cycle with specified name, description, and date range. Supports external ID tracking for integration purposes.", # noqa: E501
+ request=OpenApiRequest(
+ request=CycleCreateSerializer,
+ examples=[CYCLE_CREATE_EXAMPLE],
+ ),
+ responses={
+ 201: OpenApiResponse(
+ description="Cycle created",
+ response=CycleSerializer,
+ examples=[CYCLE_EXAMPLE],
+ ),
+ },
+ )
+ def post(self, request, slug, project_id):
+ """Create cycle
+
+ Create a new development cycle with specified name, description, and date range.
+ Supports external ID tracking for integration purposes.
+ """
+ if (
+ request.data.get("start_date", None) is None
+ and request.data.get("end_date", None) is None
+ ) or (
+ request.data.get("start_date", None) is not None
+ and request.data.get("end_date", None) is not None
+ ):
+
+ serializer = CycleCreateSerializer(
+ data=request.data, context={"request": request}
+ )
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and Cycle.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ cycle = Cycle.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).first()
+ return Response(
+ {
+ "error": "Cycle with the same external id and external source already exists",
+ "id": str(cycle.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+ serializer.save(project_id=project_id)
+ # Send the model activity
+ model_activity.delay(
+ model_name="cycle",
+ model_id=str(serializer.instance.id),
+ requested_data=request.data,
+ current_instance=None,
+ actor_id=request.user.id,
+ slug=slug,
+ origin=base_host(request=request, is_app=True),
+ )
+
+ cycle = Cycle.objects.get(pk=serializer.instance.id)
+ serializer = CycleSerializer(cycle)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ else:
+ return Response(
+ {
+ "error": "Both start date and end date are either required or are to be null"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+
+class CycleDetailAPIEndpoint(BaseAPIView):
+ """
+ This viewset automatically provides `retrieve`, `update` and `destroy` actions related to cycle.
+ """
+
+ serializer_class = CycleSerializer
+ model = Cycle
+ webhook_event = "cycle"
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("owned_by")
+ .annotate(
+ total_issues=Count(
+ "issue_cycle",
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="cancelled",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="unstarted",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="backlog",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @cycle_docs(
+ operation_id="retrieve_cycle",
+ summary="Retrieve cycle",
+ description="Retrieve details of a specific cycle by its ID. Supports cycle status filtering.",
+ responses={
+ 200: OpenApiResponse(
+ description="Cycles",
+ response=CycleSerializer,
+ examples=[CYCLE_EXAMPLE],
+ ),
+ },
+ )
+ def get(self, request, slug, project_id, pk):
+ """List or retrieve cycles
+
+ Retrieve all cycles in a project or get details of a specific cycle.
+ Supports filtering by cycle status like current, upcoming, completed, or draft.
+ """
+ project = Project.objects.get(workspace__slug=slug, pk=project_id)
+ queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
+ data = CycleSerializer(
+ queryset,
+ fields=self.fields,
+ expand=self.expand,
+ context={"project": project},
+ ).data
+ return Response(data, status=status.HTTP_200_OK)
+
+ @cycle_docs(
+ operation_id="update_cycle",
+ summary="Update cycle",
+ description="Modify an existing cycle's properties like name, description, or date range. Completed cycles can only have their sort order changed.", # noqa: E501
+ request=OpenApiRequest(
+ request=CycleUpdateSerializer,
+ examples=[CYCLE_UPDATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Cycle updated",
+ response=CycleSerializer,
+ examples=[CYCLE_EXAMPLE],
+ ),
+ },
+ )
+ def patch(self, request, slug, project_id, pk):
+ """Update cycle
+
+ Modify an existing cycle's properties like name, description, or date range.
+ Completed cycles can only have their sort order changed.
+ """
+ cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+
+ current_instance = json.dumps(
+ CycleSerializer(cycle).data, cls=DjangoJSONEncoder
+ )
+
+ if cycle.archived_at:
+ return Response(
+ {"error": "Archived cycle cannot be edited"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ request_data = request.data
+
+ if cycle.end_date is not None and cycle.end_date < timezone.now():
+ if "sort_order" in request_data:
+ # Can only change sort order
+ request_data = {
+ "sort_order": request_data.get("sort_order", cycle.sort_order)
+ }
+ else:
+ return Response(
+ {
+ "error": "The Cycle has already been completed so it cannot be edited"
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ serializer = CycleUpdateSerializer(
+ cycle, data=request.data, partial=True, context={"request": request}
+ )
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and (cycle.external_id != request.data.get("external_id"))
+ and Cycle.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get(
+ "external_source", cycle.external_source
+ ),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "Cycle with the same external id and external source already exists",
+ "id": str(cycle.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+ serializer.save()
+
+ # Send the model activity
+ model_activity.delay(
+ model_name="cycle",
+ model_id=str(serializer.instance.id),
+ requested_data=request.data,
+ current_instance=current_instance,
+ actor_id=request.user.id,
+ slug=slug,
+ origin=base_host(request=request, is_app=True),
+ )
+ cycle = Cycle.objects.get(pk=serializer.instance.id)
+ serializer = CycleSerializer(cycle)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ @cycle_docs(
+ operation_id="delete_cycle",
+ summary="Delete cycle",
+ description="Permanently remove a cycle and all its associated issue relationships",
+ responses={
+ 204: DELETED_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, pk):
+ """Delete cycle
+
+ Permanently remove a cycle and all its associated issue relationships.
+ Only admins or the cycle creator can perform this action.
+ """
+ cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ if cycle.owned_by_id != request.user.id and (
+ not ProjectMember.objects.filter(
+ workspace__slug=slug,
+ member=request.user,
+ role=20,
+ project_id=project_id,
+ is_active=True,
+ ).exists()
+ ):
+ return Response(
+ {"error": "Only admin or creator can delete the cycle"},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ cycle_issues = list(
+ CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list(
+ "issue", flat=True
+ )
+ )
+
+ issue_activity.delay(
+ type="cycle.activity.deleted",
+ requested_data=json.dumps(
+ {
+ "cycle_id": str(pk),
+ "cycle_name": str(cycle.name),
+ "issues": [str(issue_id) for issue_id in cycle_issues],
+ }
+ ),
+ actor_id=str(request.user.id),
+ issue_id=None,
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ # Delete the cycle
+ cycle.delete()
+ # Delete the user favorite cycle
+ UserFavorite.objects.filter(
+ entity_type="cycle", entity_identifier=pk, project_id=project_id
+ ).delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
+ """Cycle Archive and Unarchive Endpoint"""
+
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(archived_at__isnull=False)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("owned_by")
+ .annotate(
+ total_issues=Count(
+ "issue_cycle",
+ filter=Q(
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="cancelled",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="unstarted",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_cycle__issue__state__group",
+ filter=Q(
+ issue_cycle__issue__state__group="backlog",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(total_estimates=Sum("issue_cycle__issue__estimate_point"))
+ .annotate(
+ completed_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="completed",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .annotate(
+ started_estimates=Sum(
+ "issue_cycle__issue__estimate_point",
+ filter=Q(
+ issue_cycle__issue__state__group="started",
+ issue_cycle__issue__archived_at__isnull=True,
+ issue_cycle__issue__is_draft=False,
+ issue_cycle__deleted_at__isnull=True,
+ ),
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @cycle_docs(
+ operation_id="list_archived_cycles",
+ description="Retrieve all cycles that have been archived in the project.",
+ summary="List archived cycles",
+ parameters=[CURSOR_PARAMETER, PER_PAGE_PARAMETER],
+ request={},
+ responses={
+ 200: create_paginated_response(
+ CycleSerializer,
+ "PaginatedArchivedCycleResponse",
+ "Paginated list of archived cycles",
+ "Paginated Archived Cycles",
+ ),
+ },
+ )
+ def get(self, request, slug, project_id):
+ """List archived cycles
+
+ Retrieve all cycles that have been archived in the project.
+ Returns paginated results with cycle statistics and completion data.
+ """
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda cycles: CycleSerializer(
+ cycles, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+ @cycle_docs(
+ operation_id="archive_cycle",
+ summary="Archive cycle",
+ description="Move a completed cycle to archived status for historical tracking. Only cycles that have ended can be archived.", # noqa: E501
+ request={},
+ responses={
+ 204: ARCHIVED_RESPONSE,
+ 400: CYCLE_CANNOT_ARCHIVE_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id, cycle_id):
+ """Archive cycle
+
+ Move a completed cycle to archived status for historical tracking.
+ Only cycles that have ended can be archived.
+ """
+ cycle = Cycle.objects.get(
+ pk=cycle_id, project_id=project_id, workspace__slug=slug
+ )
+ if cycle.end_date >= timezone.now():
+ return Response(
+ {"error": "Only completed cycles can be archived"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ cycle.archived_at = timezone.now()
+ cycle.save()
+ UserFavorite.objects.filter(
+ entity_type="cycle",
+ entity_identifier=cycle_id,
+ project_id=project_id,
+ workspace__slug=slug,
+ ).delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ @cycle_docs(
+ operation_id="unarchive_cycle",
+ summary="Unarchive cycle",
+ description="Restore an archived cycle to active status, making it available for regular use.",
+ request={},
+ responses={
+ 204: UNARCHIVED_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, cycle_id):
+ """Unarchive cycle
+
+ Restore an archived cycle to active status, making it available for regular use.
+ The cycle will reappear in active cycle lists.
+ """
+ cycle = Cycle.objects.get(
+ pk=cycle_id, project_id=project_id, workspace__slug=slug
+ )
+ cycle.archived_at = None
+ cycle.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class CycleIssueListCreateAPIEndpoint(BaseAPIView):
+ """Cycle Issue List and Create Endpoint"""
+
+ serializer_class = CycleIssueSerializer
+ model = CycleIssue
+ webhook_event = "cycle_issue"
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ CycleIssue.objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(cycle_id=self.kwargs.get("cycle_id"))
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("cycle")
+ .select_related("issue", "issue__state", "issue__project")
+ .prefetch_related("issue__assignees", "issue__labels")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @cycle_docs(
+ operation_id="list_cycle_work_items",
+ summary="List cycle work items",
+ description="Retrieve all work items assigned to a cycle.",
+ parameters=[CURSOR_PARAMETER, PER_PAGE_PARAMETER],
+ request={},
+ responses={
+ 200: create_paginated_response(
+ CycleIssueSerializer,
+ "PaginatedCycleIssueResponse",
+ "Paginated list of cycle work items",
+ "Paginated Cycle Work Items",
+ ),
+ },
+ )
+ def get(self, request, slug, project_id, cycle_id):
+ """List or retrieve cycle work items
+
+ Retrieve all work items assigned to a cycle or get details of a specific cycle work item.
+ Returns paginated results with work item details, assignees, and labels.
+ """
+ # List
+ order_by = request.GET.get("order_by", "created_at")
+ issues = (
+ Issue.issue_objects.filter(
+ issue_cycle__cycle_id=cycle_id, issue_cycle__deleted_at__isnull=True
+ )
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(bridge_id=F("issue_cycle__id"))
+ .filter(project_id=project_id)
+ .filter(workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(order_by)
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=FileAsset.objects.filter(
+ issue_id=OuterRef("id"),
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
+
+ return self.paginate(
+ request=request,
+ queryset=(issues),
+ on_results=lambda issues: IssueSerializer(
+ issues, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+ @cycle_docs(
+ operation_id="add_cycle_work_items",
+ summary="Add Work Items to Cycle",
+ description="Assign multiple work items to a cycle. Automatically handles bulk creation and updates with activity tracking.", # noqa: E501
+ request=OpenApiRequest(
+ request=CycleIssueRequestSerializer,
+ examples=[CYCLE_ISSUE_REQUEST_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Cycle work items added",
+ response=CycleIssueSerializer,
+ examples=[CYCLE_ISSUE_EXAMPLE],
+ ),
+ 400: REQUIRED_FIELDS_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id, cycle_id):
+ """Add cycle issues
+
+ Assign multiple work items to a cycle or move them from another cycle.
+ Automatically handles bulk creation and updates with activity tracking.
+ """
+ issues = request.data.get("issues", [])
+
+ if not issues:
+ return Response(
+ {"error": "Work items are required", "code": "MISSING_WORK_ITEMS"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ cycle = Cycle.objects.get(
+ workspace__slug=slug, project_id=project_id, pk=cycle_id
+ )
+
+ if cycle.end_date is not None and cycle.end_date < timezone.now():
+ return Response(
+ {
+ "code": "CYCLE_COMPLETED",
+ "message": "The Cycle has already been completed so no new issues can be added",
+ },
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Get all CycleWorkItems already created
+ cycle_issues = list(
+ CycleIssue.objects.filter(~Q(cycle_id=cycle_id), issue_id__in=issues)
+ )
+ existing_issues = [
+ str(cycle_issue.issue_id)
+ for cycle_issue in cycle_issues
+ if str(cycle_issue.issue_id) in issues
+ ]
+ new_issues = list(set(issues) - set(existing_issues))
+
+ # New issues to create
+ created_records = CycleIssue.objects.bulk_create(
+ [
+ CycleIssue(
+ project_id=project_id,
+ workspace_id=cycle.workspace_id,
+ cycle_id=cycle_id,
+ issue_id=issue,
+ )
+ for issue in new_issues
+ ],
+ ignore_conflicts=True,
+ batch_size=10,
+ )
+
+ # Updated Issues
+ updated_records = []
+ update_cycle_issue_activity = []
+ # Iterate over each cycle_issue in cycle_issues
+ for cycle_issue in cycle_issues:
+ old_cycle_id = cycle_issue.cycle_id
+ # Update the cycle_issue's cycle_id
+ cycle_issue.cycle_id = cycle_id
+ # Add the modified cycle_issue to the records_to_update list
+ updated_records.append(cycle_issue)
+ # Record the update activity
+ update_cycle_issue_activity.append(
+ {
+ "old_cycle_id": str(old_cycle_id),
+ "new_cycle_id": str(cycle_id),
+ "issue_id": str(cycle_issue.issue_id),
+ }
+ )
+
+ # Update the cycle issues
+ CycleIssue.objects.bulk_update(updated_records, ["cycle_id"], batch_size=100)
+
+ # Capture Issue Activity
+ issue_activity.delay(
+ type="cycle.activity.created",
+ requested_data=json.dumps({"cycles_list": issues}),
+ actor_id=str(self.request.user.id),
+ issue_id=None,
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "updated_cycle_issues": update_cycle_issue_activity,
+ "created_cycle_issues": serializers.serialize(
+ "json", created_records
+ ),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ notification=True,
+ origin=base_host(request=request, is_app=True),
+ )
+ # Return all Cycle Issues
+ return Response(
+ CycleIssueSerializer(self.get_queryset(), many=True).data,
+ status=status.HTTP_200_OK,
+ )
+
+
+class CycleIssueDetailAPIEndpoint(BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`,
+ and `destroy` actions related to cycle issues.
+
+ """
+
+ serializer_class = CycleIssueSerializer
+ model = CycleIssue
+ webhook_event = "cycle_issue"
+ bulk = True
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ CycleIssue.objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue_id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(cycle_id=self.kwargs.get("cycle_id"))
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("cycle")
+ .select_related("issue", "issue__state", "issue__project")
+ .prefetch_related("issue__assignees", "issue__labels")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @cycle_docs(
+ operation_id="retrieve_cycle_work_item",
+ summary="Retrieve cycle work item",
+ description="Retrieve details of a specific cycle work item.",
+ responses={
+ 200: OpenApiResponse(
+ description="Cycle work items",
+ response=CycleIssueSerializer,
+ examples=[CYCLE_ISSUE_EXAMPLE],
+ ),
+ },
+ )
+ def get(self, request, slug, project_id, cycle_id, issue_id):
+ """Retrieve cycle work item
+
+ Retrieve details of a specific cycle work item.
+ Returns paginated results with work item details, assignees, and labels.
+ """
+ cycle_issue = CycleIssue.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ cycle_id=cycle_id,
+ issue_id=issue_id,
+ )
+ serializer = CycleIssueSerializer(
+ cycle_issue, fields=self.fields, expand=self.expand
+ )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ @cycle_docs(
+ operation_id="delete_cycle_work_item",
+ summary="Delete cycle work item",
+ description="Remove a work item from a cycle while keeping the work item in the project.",
+ responses={
+ 204: DELETED_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, cycle_id, issue_id):
+ """Remove cycle work item
+
+ Remove a work item from a cycle while keeping the work item in the project.
+ Records the removal activity for tracking purposes.
+ """
+ cycle_issue = CycleIssue.objects.get(
+ issue_id=issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ cycle_id=cycle_id,
+ )
+ issue_id = cycle_issue.issue_id
+ cycle_issue.delete()
+ issue_activity.delay(
+ type="cycle.activity.deleted",
+ requested_data=json.dumps(
+ {
+ "cycle_id": str(self.kwargs.get("cycle_id")),
+ "issues": [str(issue_id)],
+ }
+ ),
+ actor_id=str(self.request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class TransferCycleIssueAPIEndpoint(BaseAPIView):
+ """
+ This viewset provides `create` actions for transferring the issues into a particular cycle.
+
+ """
+
+ permission_classes = [ProjectEntityPermission]
+
+ @cycle_docs(
+ operation_id="transfer_cycle_work_items",
+ summary="Transfer cycle work items",
+ description="Move incomplete work items from the current cycle to a new target cycle. Captures progress snapshot and transfers only unfinished work items.", # noqa: E501
+ request=OpenApiRequest(
+ request=TransferCycleIssueRequestSerializer,
+ examples=[TRANSFER_CYCLE_ISSUE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Work items transferred successfully",
+ response={
+ "type": "object",
+ "properties": {
+ "message": {
+ "type": "string",
+ "description": "Success message",
+ "example": "Success",
+ },
+ },
+ },
+ examples=[TRANSFER_CYCLE_ISSUE_SUCCESS_EXAMPLE],
+ ),
+ 400: OpenApiResponse(
+ description="Bad request",
+ response={
+ "type": "object",
+ "properties": {
+ "error": {
+ "type": "string",
+ "description": "Error message",
+ "example": "New Cycle Id is required",
+ },
+ },
+ },
+ examples=[
+ TRANSFER_CYCLE_ISSUE_ERROR_EXAMPLE,
+ TRANSFER_CYCLE_COMPLETED_ERROR_EXAMPLE,
+ ],
+ ),
+ },
+ )
+ def post(self, request, slug, project_id, cycle_id):
+ """Transfer cycle issues
+
+ Move incomplete issues from the current cycle to a new target cycle.
+ Captures progress snapshot and transfers only unfinished work items.
+ """
+ new_cycle_id = request.data.get("new_cycle_id", False)
+
+ if not new_cycle_id:
+ return Response(
+ {"error": "New Cycle Id is required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ old_cycle = Cycle.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ pk=cycle_id,
+ )
+ # transfer work items only when cycle is completed (passed the end data)
+ if old_cycle.end_date is not None and old_cycle.end_date < timezone.now():
+ return Response(
+ {"error": "The old cycle is not completed yet"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Call the utility function to handle the transfer
+ result = transfer_cycle_issues(
+ slug=slug,
+ project_id=project_id,
+ cycle_id=cycle_id,
+ new_cycle_id=new_cycle_id,
+ request=request,
+ user_id=self.request.user.id,
+ )
+
+ # Handle the result
+ if result.get("success"):
+ return Response({"message": "Success"}, status=status.HTTP_200_OK)
+ else:
+ return Response(
+ {"error": result.get("error")},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
diff --git a/apps/api/plane/api/views/intake.py b/apps/api/plane/api/views/intake.py
new file mode 100644
index 00000000..7a00fa43
--- /dev/null
+++ b/apps/api/plane/api/views/intake.py
@@ -0,0 +1,483 @@
+# Python imports
+import json
+
+# Django imports
+from django.core.serializers.json import DjangoJSONEncoder
+from django.utils import timezone
+from django.db.models import Q, Value, UUIDField
+from django.db.models.functions import Coalesce
+from django.contrib.postgres.aggregates import ArrayAgg
+from django.contrib.postgres.fields import ArrayField
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from drf_spectacular.utils import OpenApiResponse, OpenApiRequest
+
+# Module imports
+from plane.api.serializers import (
+ IntakeIssueSerializer,
+ IssueSerializer,
+ IntakeIssueCreateSerializer,
+ IntakeIssueUpdateSerializer,
+)
+from plane.app.permissions import ProjectLitePermission
+from plane.bgtasks.issue_activities_task import issue_activity
+from plane.db.models import Intake, IntakeIssue, Issue, Project, ProjectMember, State
+from plane.utils.host import base_host
+from .base import BaseAPIView
+from plane.db.models.intake import SourceType
+from plane.utils.openapi import (
+ intake_docs,
+ WORKSPACE_SLUG_PARAMETER,
+ PROJECT_ID_PARAMETER,
+ ISSUE_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ create_paginated_response,
+ # Request Examples
+ INTAKE_ISSUE_CREATE_EXAMPLE,
+ INTAKE_ISSUE_UPDATE_EXAMPLE,
+ # Response Examples
+ INTAKE_ISSUE_EXAMPLE,
+ INVALID_REQUEST_RESPONSE,
+ DELETED_RESPONSE,
+)
+
+
+class IntakeIssueListCreateAPIEndpoint(BaseAPIView):
+ """Intake Work Item List and Create Endpoint"""
+
+ serializer_class = IntakeIssueSerializer
+
+ model = Intake
+ permission_classes = [ProjectLitePermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ intake = Intake.objects.filter(
+ workspace__slug=self.kwargs.get("slug"),
+ project_id=self.kwargs.get("project_id"),
+ ).first()
+
+ project = Project.objects.get(workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id"))
+
+ if intake is None or not project.intake_view:
+ return IntakeIssue.objects.none()
+
+ return (
+ IntakeIssue.objects.filter(
+ Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
+ workspace__slug=self.kwargs.get("slug"),
+ project_id=self.kwargs.get("project_id"),
+ intake_id=intake.id,
+ )
+ .select_related("issue", "workspace", "project")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ @intake_docs(
+ operation_id="get_intake_work_items_list",
+ summary="List intake work items",
+ description="Retrieve all work items in the project's intake queue. Returns paginated results when listing all intake work items.", # noqa: E501
+ parameters=[
+ WORKSPACE_SLUG_PARAMETER,
+ PROJECT_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ IntakeIssueSerializer,
+ "PaginatedIntakeIssueResponse",
+ "Paginated list of intake work items",
+ "Paginated Intake Work Items",
+ ),
+ },
+ )
+ def get(self, request, slug, project_id):
+ """List intake work items
+
+ Retrieve all work items in the project's intake queue.
+ Returns paginated results when listing all intake work items.
+ """
+ issue_queryset = self.get_queryset()
+ return self.paginate(
+ request=request,
+ queryset=(issue_queryset),
+ on_results=lambda intake_issues: IntakeIssueSerializer(
+ intake_issues, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+ @intake_docs(
+ operation_id="create_intake_work_item",
+ summary="Create intake work item",
+ description="Submit a new work item to the project's intake queue for review and triage. Automatically creates the work item with default triage state and tracks activity.", # noqa: E501
+ parameters=[
+ WORKSPACE_SLUG_PARAMETER,
+ PROJECT_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=IntakeIssueCreateSerializer,
+ examples=[INTAKE_ISSUE_CREATE_EXAMPLE],
+ ),
+ responses={
+ 201: OpenApiResponse(
+ description="Intake work item created",
+ response=IntakeIssueSerializer,
+ examples=[INTAKE_ISSUE_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id):
+ """Create intake work item
+
+ Submit a new work item to the project's intake queue for review and triage.
+ Automatically creates the work item with default triage state and tracks activity.
+ """
+ if not request.data.get("issue", {}).get("name", False):
+ return Response({"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST)
+
+ intake = Intake.objects.filter(workspace__slug=slug, project_id=project_id).first()
+
+ project = Project.objects.get(workspace__slug=slug, pk=project_id)
+
+ # Intake view
+ if intake is None and not project.intake_view:
+ return Response(
+ {"error": "Intake is not enabled for this project enable it through the project's api"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Check for valid priority
+ if request.data.get("issue", {}).get("priority", "none") not in [
+ "low",
+ "medium",
+ "high",
+ "urgent",
+ "none",
+ ]:
+ return Response({"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST)
+
+ # create an issue
+ issue = Issue.objects.create(
+ name=request.data.get("issue", {}).get("name"),
+ description=request.data.get("issue", {}).get("description", {}),
+ description_html=request.data.get("issue", {}).get("description_html", "
"),
+ priority=request.data.get("issue", {}).get("priority", "none"),
+ project_id=project_id,
+ )
+
+ # create an intake issue
+ intake_issue = IntakeIssue.objects.create(
+ intake_id=intake.id,
+ project_id=project_id,
+ issue=issue,
+ source=SourceType.IN_APP,
+ )
+ # Create an Issue Activity
+ issue_activity.delay(
+ type="issue.activity.created",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(issue.id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ intake=str(intake_issue.id),
+ )
+
+ serializer = IntakeIssueSerializer(intake_issue)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+
+
+class IntakeIssueDetailAPIEndpoint(BaseAPIView):
+ """Intake Issue API Endpoint"""
+
+ permission_classes = [ProjectLitePermission]
+
+ serializer_class = IntakeIssueSerializer
+ model = IntakeIssue
+ use_read_replica = True
+
+ filterset_fields = ["status"]
+
+ def get_queryset(self):
+ intake = Intake.objects.filter(
+ workspace__slug=self.kwargs.get("slug"),
+ project_id=self.kwargs.get("project_id"),
+ ).first()
+
+ project = Project.objects.get(workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id"))
+
+ if intake is None or not project.intake_view:
+ return IntakeIssue.objects.none()
+
+ return (
+ IntakeIssue.objects.filter(
+ Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
+ workspace__slug=self.kwargs.get("slug"),
+ project_id=self.kwargs.get("project_id"),
+ intake_id=intake.id,
+ )
+ .select_related("issue", "workspace", "project")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ @intake_docs(
+ operation_id="retrieve_intake_work_item",
+ summary="Retrieve intake work item",
+ description="Retrieve details of a specific intake work item.",
+ parameters=[
+ WORKSPACE_SLUG_PARAMETER,
+ PROJECT_ID_PARAMETER,
+ ISSUE_ID_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="Intake work item",
+ response=IntakeIssueSerializer,
+ examples=[INTAKE_ISSUE_EXAMPLE],
+ ),
+ },
+ )
+ def get(self, request, slug, project_id, issue_id):
+ """Retrieve intake work item
+
+ Retrieve details of a specific intake work item.
+ """
+ intake_issue_queryset = self.get_queryset().get(issue_id=issue_id)
+ intake_issue_data = IntakeIssueSerializer(intake_issue_queryset, fields=self.fields, expand=self.expand).data
+ return Response(intake_issue_data, status=status.HTTP_200_OK)
+
+ @intake_docs(
+ operation_id="update_intake_work_item",
+ summary="Update intake work item",
+ description="Modify an existing intake work item's properties or status for triage processing. Supports status changes like accept, reject, or mark as duplicate.", # noqa: E501
+ parameters=[
+ WORKSPACE_SLUG_PARAMETER,
+ PROJECT_ID_PARAMETER,
+ ISSUE_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=IntakeIssueUpdateSerializer,
+ examples=[INTAKE_ISSUE_UPDATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Intake work item updated",
+ response=IntakeIssueSerializer,
+ examples=[INTAKE_ISSUE_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ },
+ )
+ def patch(self, request, slug, project_id, issue_id):
+ """Update intake work item
+
+ Modify an existing intake work item's properties or status for triage processing.
+ Supports status changes like accept, reject, or mark as duplicate.
+ """
+ intake = Intake.objects.filter(workspace__slug=slug, project_id=project_id).first()
+
+ project = Project.objects.get(workspace__slug=slug, pk=project_id)
+
+ # Intake view
+ if intake is None and not project.intake_view:
+ return Response(
+ {"error": "Intake is not enabled for this project enable it through the project's api"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Get the intake issue
+ intake_issue = IntakeIssue.objects.get(
+ issue_id=issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ intake_id=intake.id,
+ )
+
+ # Get the project member
+ project_member = ProjectMember.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ member=request.user,
+ is_active=True,
+ )
+
+ # Only project members admins and created_by users can access this endpoint
+ if project_member.role <= 5 and str(intake_issue.created_by_id) != str(request.user.id):
+ return Response(
+ {"error": "You cannot edit intake work items"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Get issue data
+ issue_data = request.data.pop("issue", False)
+
+ if bool(issue_data):
+ issue = Issue.objects.annotate(
+ label_ids=Coalesce(
+ ArrayAgg(
+ "labels__id",
+ distinct=True,
+ filter=Q(~Q(labels__id__isnull=True) & Q(label_issue__deleted_at__isnull=True)),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ assignee_ids=Coalesce(
+ ArrayAgg(
+ "assignees__id",
+ distinct=True,
+ filter=Q(
+ ~Q(assignees__id__isnull=True)
+ & Q(assignees__member_project__is_active=True)
+ & Q(issue_assignee__deleted_at__isnull=True)
+ ),
+ ),
+ Value([], output_field=ArrayField(UUIDField())),
+ ),
+ ).get(pk=issue_id, workspace__slug=slug, project_id=project_id)
+ # Only allow guests to edit name and description
+ if project_member.role <= 5:
+ issue_data = {
+ "name": issue_data.get("name", issue.name),
+ "description_html": issue_data.get("description_html", issue.description_html),
+ "description": issue_data.get("description", issue.description),
+ }
+
+ issue_serializer = IssueSerializer(issue, data=issue_data, partial=True)
+
+ if issue_serializer.is_valid():
+ current_instance = issue
+ # Log all the updates
+ requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
+ if issue is not None:
+ issue_activity.delay(
+ type="issue.activity.updated",
+ requested_data=requested_data,
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=json.dumps(
+ IssueSerializer(current_instance).data,
+ cls=DjangoJSONEncoder,
+ ),
+ epoch=int(timezone.now().timestamp()),
+ intake=(intake_issue.id),
+ )
+ issue_serializer.save()
+ else:
+ return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ # Only project admins and members can edit intake issue attributes
+ if project_member.role > 15:
+ serializer = IntakeIssueUpdateSerializer(intake_issue, data=request.data, partial=True)
+ current_instance = json.dumps(IntakeIssueSerializer(intake_issue).data, cls=DjangoJSONEncoder)
+
+ if serializer.is_valid():
+ serializer.save()
+ # Update the issue state if the issue is rejected or marked as duplicate
+ if serializer.data["status"] in [-1, 2]:
+ issue = Issue.objects.get(pk=issue_id, workspace__slug=slug, project_id=project_id)
+ state = State.objects.filter(group="cancelled", workspace__slug=slug, project_id=project_id).first()
+ if state is not None:
+ issue.state = state
+ issue.save()
+
+ # Update the issue state if it is accepted
+ if serializer.data["status"] in [1]:
+ issue = Issue.objects.get(pk=issue_id, workspace__slug=slug, project_id=project_id)
+
+ # Update the issue state only if it is in triage state
+ if issue.state.is_triage:
+ # Move to default state
+ state = State.objects.filter(workspace__slug=slug, project_id=project_id, default=True).first()
+ if state is not None:
+ issue.state = state
+ issue.save()
+
+ # create a activity for status change
+ issue_activity.delay(
+ type="intake.activity.created",
+ requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ notification=False,
+ origin=base_host(request=request, is_app=True),
+ intake=str(intake_issue.id),
+ )
+ serializer = IntakeIssueSerializer(intake_issue)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ else:
+ return Response(IntakeIssueSerializer(intake_issue).data, status=status.HTTP_200_OK)
+
+ @intake_docs(
+ operation_id="delete_intake_work_item",
+ summary="Delete intake work item",
+ description="Permanently remove an intake work item from the triage queue. Also deletes the underlying work item if it hasn't been accepted yet.", # noqa: E501
+ parameters=[
+ WORKSPACE_SLUG_PARAMETER,
+ PROJECT_ID_PARAMETER,
+ ISSUE_ID_PARAMETER,
+ ],
+ responses={
+ 204: DELETED_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, issue_id):
+ """Delete intake work item
+
+ Permanently remove an intake work item from the triage queue.
+ Also deletes the underlying work item if it hasn't been accepted yet.
+ """
+ intake = Intake.objects.filter(workspace__slug=slug, project_id=project_id).first()
+
+ project = Project.objects.get(workspace__slug=slug, pk=project_id)
+
+ # Intake view
+ if intake is None and not project.intake_view:
+ return Response(
+ {"error": "Intake is not enabled for this project enable it through the project's api"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Get the intake issue
+ intake_issue = IntakeIssue.objects.get(
+ issue_id=issue_id,
+ workspace__slug=slug,
+ project_id=project_id,
+ intake_id=intake.id,
+ )
+
+ # Check the issue status
+ if intake_issue.status in [-2, -1, 0, 2]:
+ # Delete the issue also
+ issue = Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=issue_id).first()
+ if issue.created_by_id != request.user.id and (
+ not ProjectMember.objects.filter(
+ workspace__slug=slug,
+ member=request.user,
+ role=20,
+ project_id=project_id,
+ is_active=True,
+ ).exists()
+ ):
+ return Response(
+ {"error": "Only admin or creator can delete the work item"},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+ issue.delete()
+
+ intake_issue.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apps/api/plane/api/views/issue.py b/apps/api/plane/api/views/issue.py
new file mode 100644
index 00000000..d3686cee
--- /dev/null
+++ b/apps/api/plane/api/views/issue.py
@@ -0,0 +1,2214 @@
+# Python imports
+import json
+import uuid
+import re
+
+# Django imports
+from django.core.serializers.json import DjangoJSONEncoder
+from django.http import HttpResponseRedirect
+from django.db import IntegrityError
+from django.db.models import (
+ Case,
+ CharField,
+ Exists,
+ F,
+ Func,
+ Max,
+ OuterRef,
+ Q,
+ Value,
+ When,
+ Subquery,
+)
+from django.utils import timezone
+from django.conf import settings
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+
+# drf-spectacular imports
+from drf_spectacular.utils import (
+ extend_schema,
+ OpenApiResponse,
+ OpenApiExample,
+ OpenApiRequest,
+)
+
+# Module imports
+from plane.api.serializers import (
+ IssueAttachmentSerializer,
+ IssueActivitySerializer,
+ IssueCommentSerializer,
+ IssueLinkSerializer,
+ IssueSerializer,
+ LabelSerializer,
+ IssueAttachmentUploadSerializer,
+ IssueSearchSerializer,
+ IssueCommentCreateSerializer,
+ IssueLinkCreateSerializer,
+ IssueLinkUpdateSerializer,
+ LabelCreateUpdateSerializer,
+)
+from plane.app.permissions import (
+ ProjectEntityPermission,
+ ProjectLitePermission,
+ ProjectMemberPermission,
+)
+from plane.bgtasks.issue_activities_task import issue_activity
+from plane.db.models import (
+ Issue,
+ IssueActivity,
+ FileAsset,
+ IssueComment,
+ IssueLink,
+ Label,
+ Project,
+ ProjectMember,
+ CycleIssue,
+ Workspace,
+)
+from plane.settings.storage import S3Storage
+from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
+from .base import BaseAPIView
+from plane.utils.host import base_host
+from plane.bgtasks.webhook_task import model_activity
+from plane.app.permissions import ROLE
+from plane.utils.openapi import (
+ work_item_docs,
+ label_docs,
+ issue_link_docs,
+ issue_comment_docs,
+ issue_activity_docs,
+ issue_attachment_docs,
+ WORKSPACE_SLUG_PARAMETER,
+ PROJECT_IDENTIFIER_PARAMETER,
+ ISSUE_IDENTIFIER_PARAMETER,
+ PROJECT_ID_PARAMETER,
+ ISSUE_ID_PARAMETER,
+ LABEL_ID_PARAMETER,
+ COMMENT_ID_PARAMETER,
+ LINK_ID_PARAMETER,
+ ATTACHMENT_ID_PARAMETER,
+ ACTIVITY_ID_PARAMETER,
+ PROJECT_ID_QUERY_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ EXTERNAL_ID_PARAMETER,
+ EXTERNAL_SOURCE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ SEARCH_PARAMETER_REQUIRED,
+ LIMIT_PARAMETER,
+ WORKSPACE_SEARCH_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ create_paginated_response,
+ # Request Examples
+ ISSUE_CREATE_EXAMPLE,
+ ISSUE_UPDATE_EXAMPLE,
+ ISSUE_UPSERT_EXAMPLE,
+ LABEL_CREATE_EXAMPLE,
+ LABEL_UPDATE_EXAMPLE,
+ ISSUE_LINK_CREATE_EXAMPLE,
+ ISSUE_LINK_UPDATE_EXAMPLE,
+ ISSUE_COMMENT_CREATE_EXAMPLE,
+ ISSUE_COMMENT_UPDATE_EXAMPLE,
+ ISSUE_ATTACHMENT_UPLOAD_EXAMPLE,
+ ATTACHMENT_UPLOAD_CONFIRM_EXAMPLE,
+ # Response Examples
+ ISSUE_EXAMPLE,
+ LABEL_EXAMPLE,
+ ISSUE_LINK_EXAMPLE,
+ ISSUE_COMMENT_EXAMPLE,
+ ISSUE_ATTACHMENT_EXAMPLE,
+ ISSUE_ATTACHMENT_NOT_UPLOADED_EXAMPLE,
+ ISSUE_SEARCH_EXAMPLE,
+ WORK_ITEM_NOT_FOUND_RESPONSE,
+ ISSUE_NOT_FOUND_RESPONSE,
+ PROJECT_NOT_FOUND_RESPONSE,
+ EXTERNAL_ID_EXISTS_RESPONSE,
+ DELETED_RESPONSE,
+ ADMIN_ONLY_RESPONSE,
+ LABEL_NOT_FOUND_RESPONSE,
+ LABEL_NAME_EXISTS_RESPONSE,
+ INVALID_REQUEST_RESPONSE,
+ LINK_NOT_FOUND_RESPONSE,
+ COMMENT_NOT_FOUND_RESPONSE,
+ ATTACHMENT_NOT_FOUND_RESPONSE,
+ BAD_SEARCH_REQUEST_RESPONSE,
+ UNAUTHORIZED_RESPONSE,
+ FORBIDDEN_RESPONSE,
+ WORKSPACE_NOT_FOUND_RESPONSE,
+)
+from plane.bgtasks.work_item_link_task import crawl_work_item_link_title
+
+
+def user_has_issue_permission(user_id, project_id, issue=None, allowed_roles=None, allow_creator=True):
+ if allow_creator and issue is not None and user_id == issue.created_by_id:
+ return True
+
+ qs = ProjectMember.objects.filter(
+ project_id=project_id,
+ member_id=user_id,
+ is_active=True,
+ )
+ if allowed_roles is not None:
+ qs = qs.filter(role__in=allowed_roles)
+
+ return qs.exists()
+
+
+class WorkspaceIssueAPIEndpoint(BaseAPIView):
+ """
+ This viewset provides `retrieveByIssueId` on workspace level
+
+ """
+
+ model = Issue
+ webhook_event = "issue"
+ permission_classes = [ProjectEntityPermission]
+ serializer_class = IssueSerializer
+ use_read_replica = True
+
+ @property
+ def project_identifier(self):
+ return self.kwargs.get("project_identifier", None)
+
+ def get_queryset(self):
+ return (
+ Issue.issue_objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project__identifier=self.kwargs.get("project_identifier"))
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ ).distinct()
+
+ @extend_schema(
+ operation_id="get_workspace_work_item",
+ summary="Retrieve work item by identifiers",
+ description="Retrieve a specific work item using workspace slug, project identifier, and issue identifier.",
+ tags=["Work Items"],
+ parameters=[
+ WORKSPACE_SLUG_PARAMETER,
+ PROJECT_IDENTIFIER_PARAMETER,
+ ISSUE_IDENTIFIER_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="Work item details",
+ response=IssueSerializer,
+ examples=[ISSUE_EXAMPLE],
+ ),
+ 404: WORK_ITEM_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_identifier=None, issue_identifier=None):
+ """Retrieve work item by identifiers
+
+ Retrieve a specific work item using workspace slug, project identifier, and issue identifier.
+ This endpoint provides workspace-level access to work items.
+ """
+ if issue_identifier and project_identifier:
+ issue = Issue.issue_objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ ).get(
+ workspace__slug=slug,
+ project__identifier=project_identifier,
+ sequence_id=issue_identifier,
+ )
+ return Response(
+ IssueSerializer(issue, fields=self.fields, expand=self.expand).data,
+ status=status.HTTP_200_OK,
+ )
+
+
+class IssueListCreateAPIEndpoint(BaseAPIView):
+ """
+ This viewset provides `list` and `create` on issue level
+ """
+
+ model = Issue
+ webhook_event = "issue"
+ permission_classes = [ProjectEntityPermission]
+ serializer_class = IssueSerializer
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Issue.issue_objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ ).distinct()
+
+ @work_item_docs(
+ operation_id="list_work_items",
+ summary="List work items",
+ description="Retrieve a paginated list of all work items in a project. Supports filtering, ordering, and field selection through query parameters.", # noqa: E501
+ parameters=[
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ EXTERNAL_ID_PARAMETER,
+ EXTERNAL_SOURCE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ IssueSerializer,
+ "PaginatedWorkItemResponse",
+ "Paginated list of work items",
+ "Paginated Work Items",
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: PROJECT_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id):
+ """List work items
+
+ Retrieve a paginated list of all work items in a project.
+ Supports filtering, ordering, and field selection through query parameters.
+ """
+
+ external_id = request.GET.get("external_id")
+ external_source = request.GET.get("external_source")
+
+ if external_id and external_source:
+ issue = Issue.objects.get(
+ external_id=external_id,
+ external_source=external_source,
+ workspace__slug=slug,
+ project_id=project_id,
+ )
+ return Response(
+ IssueSerializer(issue, fields=self.fields, expand=self.expand).data,
+ status=status.HTTP_200_OK,
+ )
+
+ # Custom ordering for priority and state
+ priority_order = ["urgent", "high", "medium", "low", "none"]
+ state_order = ["backlog", "unstarted", "started", "completed", "cancelled"]
+
+ order_by_param = request.GET.get("order_by", "-created_at")
+
+ issue_queryset = (
+ self.get_queryset()
+ .annotate(
+ cycle_id=Subquery(
+ CycleIssue.objects.filter(issue=OuterRef("id"), deleted_at__isnull=True).values("cycle_id")[:1]
+ )
+ )
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=FileAsset.objects.filter(
+ issue_id=OuterRef("id"),
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
+
+ total_issue_queryset = Issue.issue_objects.filter(project_id=project_id, workspace__slug=slug)
+
+ # Priority Ordering
+ if order_by_param == "priority" or order_by_param == "-priority":
+ priority_order = priority_order if order_by_param == "priority" else priority_order[::-1]
+ issue_queryset = issue_queryset.annotate(
+ priority_order=Case(
+ *[When(priority=p, then=Value(i)) for i, p in enumerate(priority_order)],
+ output_field=CharField(),
+ )
+ ).order_by("priority_order")
+
+ # State Ordering
+ elif order_by_param in [
+ "state__name",
+ "state__group",
+ "-state__name",
+ "-state__group",
+ ]:
+ state_order = state_order if order_by_param in ["state__name", "state__group"] else state_order[::-1]
+ issue_queryset = issue_queryset.annotate(
+ state_order=Case(
+ *[When(state__group=state_group, then=Value(i)) for i, state_group in enumerate(state_order)],
+ default=Value(len(state_order)),
+ output_field=CharField(),
+ )
+ ).order_by("state_order")
+ # assignee and label ordering
+ elif order_by_param in [
+ "labels__name",
+ "-labels__name",
+ "assignees__first_name",
+ "-assignees__first_name",
+ ]:
+ issue_queryset = issue_queryset.annotate(
+ max_values=Max(order_by_param[1::] if order_by_param.startswith("-") else order_by_param)
+ ).order_by("-max_values" if order_by_param.startswith("-") else "max_values")
+ else:
+ issue_queryset = issue_queryset.order_by(order_by_param)
+
+ return self.paginate(
+ request=request,
+ queryset=(issue_queryset),
+ total_count_queryset=total_issue_queryset,
+ on_results=lambda issues: IssueSerializer(issues, many=True, fields=self.fields, expand=self.expand).data,
+ )
+
+ @work_item_docs(
+ operation_id="create_work_item",
+ summary="Create work item",
+ description="Create a new work item in the specified project with the provided details.",
+ request=OpenApiRequest(
+ request=IssueSerializer,
+ examples=[ISSUE_CREATE_EXAMPLE],
+ ),
+ responses={
+ 201: OpenApiResponse(
+ description="Work Item created successfully",
+ response=IssueSerializer,
+ examples=[ISSUE_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: PROJECT_NOT_FOUND_RESPONSE,
+ 409: EXTERNAL_ID_EXISTS_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id):
+ """Create work item
+
+ Create a new work item in the specified project with the provided details.
+ Supports external ID tracking for integration purposes.
+ """
+ project = Project.objects.get(pk=project_id)
+
+ serializer = IssueSerializer(
+ data=request.data,
+ context={
+ "project_id": project_id,
+ "workspace_id": project.workspace_id,
+ "default_assignee_id": project.default_assignee_id,
+ },
+ )
+
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and Issue.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ issue = Issue.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_id=request.data.get("external_id"),
+ external_source=request.data.get("external_source"),
+ ).first()
+ return Response(
+ {
+ "error": "Issue with the same external id and external source already exists",
+ "id": str(issue.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ serializer.save()
+ # Refetch the issue
+ issue = Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=serializer.data["id"]).first()
+ issue.created_at = request.data.get("created_at", timezone.now())
+ issue.created_by_id = request.data.get("created_by", request.user.id)
+ issue.save(update_fields=["created_at", "created_by"])
+
+ # Track the issue
+ issue_activity.delay(
+ type="issue.activity.created",
+ requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(serializer.data.get("id", None)),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+
+ # Send the model activity
+ model_activity.delay(
+ model_name="issue",
+ model_id=str(serializer.data["id"]),
+ requested_data=request.data,
+ current_instance=None,
+ actor_id=request.user.id,
+ slug=slug,
+ origin=base_host(request=request, is_app=True),
+ )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+
+class IssueDetailAPIEndpoint(BaseAPIView):
+ """Issue Detail Endpoint"""
+
+ model = Issue
+ webhook_event = "issue"
+ permission_classes = [ProjectEntityPermission]
+ serializer_class = IssueSerializer
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Issue.issue_objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ ).distinct()
+
+ @work_item_docs(
+ operation_id="retrieve_work_item",
+ summary="Retrieve work item",
+ description="Retrieve details of a specific work item.",
+ parameters=[
+ PROJECT_ID_PARAMETER,
+ EXTERNAL_ID_PARAMETER,
+ EXTERNAL_SOURCE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="List of issues or issue details",
+ response=IssueSerializer,
+ examples=[ISSUE_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: WORK_ITEM_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id, pk):
+ """Retrieve work item
+
+ Retrieve details of a specific work item.
+ Supports filtering, ordering, and field selection through query parameters.
+ """
+
+ issue = Issue.issue_objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ ).get(workspace__slug=slug, project_id=project_id, pk=pk)
+ return Response(
+ IssueSerializer(issue, fields=self.fields, expand=self.expand).data,
+ status=status.HTTP_200_OK,
+ )
+
+ @work_item_docs(
+ operation_id="put_work_item",
+ summary="Update or create work item",
+ description="Update an existing work item identified by external ID and source, or create a new one if it doesn't exist. Requires external_id and external_source parameters for identification.", # noqa: E501
+ request=OpenApiRequest(
+ request=IssueSerializer,
+ examples=[ISSUE_UPSERT_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Work Item updated successfully",
+ response=IssueSerializer,
+ examples=[ISSUE_EXAMPLE],
+ ),
+ 201: OpenApiResponse(
+ description="Work Item created successfully",
+ response=IssueSerializer,
+ examples=[ISSUE_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: WORK_ITEM_NOT_FOUND_RESPONSE,
+ },
+ )
+ def put(self, request, slug, project_id):
+ """Update or create work item
+
+ Update an existing work item identified by external ID and source, or create a new one if it doesn't exist.
+ Requires external_id and external_source parameters for identification.
+ """
+ # Get the entities required for putting the issue, external_id and
+ # external_source are must to identify the issue here
+ project = Project.objects.get(pk=project_id)
+ external_id = request.data.get("external_id")
+ external_source = request.data.get("external_source")
+
+ # If the external_id and source are present, we need to find the exact
+ # issue that needs to be updated with the provided external_id and
+ # external_source
+ if external_id and external_source:
+ try:
+ issue = Issue.objects.get(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_id=external_id,
+ external_source=external_source,
+ )
+
+ # Get the current instance of the issue in order to track
+ # changes and dispatch the issue activity
+ current_instance = json.dumps(IssueSerializer(issue).data, cls=DjangoJSONEncoder)
+
+ # Get the requested data, encode it as django object and pass it
+ # to serializer to validation
+ requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
+ serializer = IssueSerializer(
+ issue,
+ data=request.data,
+ context={
+ "project_id": project_id,
+ "workspace_id": project.workspace_id,
+ },
+ partial=True,
+ )
+ if serializer.is_valid():
+ # If the serializer is valid, save the issue and dispatch
+ # the update issue activity worker event.
+ serializer.save()
+ issue_activity.delay(
+ type="issue.activity.updated",
+ requested_data=requested_data,
+ actor_id=str(request.user.id),
+ issue_id=str(issue.id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(
+ # If the serializer is not valid, respond with 400 bad
+ # request
+ serializer.errors,
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ except Issue.DoesNotExist:
+ # If the issue does not exist, a new record needs to be created
+ # for the requested data.
+ # Serialize the data with the context of the project and
+ # workspace
+ serializer = IssueSerializer(
+ data=request.data,
+ context={
+ "project_id": project_id,
+ "workspace_id": project.workspace_id,
+ "default_assignee_id": project.default_assignee_id,
+ },
+ )
+
+ # If the serializer is valid, save the issue and dispatch the
+ # issue activity worker event as created
+ if serializer.is_valid():
+ serializer.save()
+ # Refetch the issue
+ issue = Issue.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ pk=serializer.data["id"],
+ ).first()
+
+ # If any of the created_at or created_by is present, update
+ # the issue with the provided data, else return with the
+ # default states given.
+ issue.created_at = request.data.get("created_at", timezone.now())
+ issue.created_by_id = request.data.get("created_by", request.user.id)
+ issue.save(update_fields=["created_at", "created_by"])
+
+ issue_activity.delay(
+ type="issue.activity.created",
+ requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder),
+ actor_id=str(request.user.id),
+ issue_id=str(serializer.data.get("id", None)),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ else:
+ return Response(
+ {"error": "external_id and external_source are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ @work_item_docs(
+ operation_id="update_work_item",
+ summary="Partially update work item",
+ description="Partially update an existing work item with the provided fields. Supports external ID validation to prevent conflicts.", # noqa: E501
+ parameters=[
+ PROJECT_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=IssueSerializer,
+ examples=[ISSUE_UPDATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Work Item patched successfully",
+ response=IssueSerializer,
+ examples=[ISSUE_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: WORK_ITEM_NOT_FOUND_RESPONSE,
+ 409: EXTERNAL_ID_EXISTS_RESPONSE,
+ },
+ )
+ def patch(self, request, slug, project_id, pk):
+ """Update work item
+
+ Partially update an existing work item with the provided fields.
+ Supports external ID validation to prevent conflicts.
+ """
+ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ project = Project.objects.get(pk=project_id)
+ current_instance = json.dumps(IssueSerializer(issue).data, cls=DjangoJSONEncoder)
+ requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
+ serializer = IssueSerializer(
+ issue,
+ data=request.data,
+ context={"project_id": project_id, "workspace_id": project.workspace_id},
+ partial=True,
+ )
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and (issue.external_id != str(request.data.get("external_id")))
+ and Issue.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source", issue.external_source),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "Issue with the same external id and external source already exists",
+ "id": str(issue.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ serializer.save()
+ issue_activity.delay(
+ type="issue.activity.updated",
+ requested_data=requested_data,
+ actor_id=str(request.user.id),
+ issue_id=str(pk),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ @work_item_docs(
+ operation_id="delete_work_item",
+ summary="Delete work item",
+ description="Permanently delete an existing work item from the project. Only admins or the item creator can perform this action.", # noqa: E501
+ parameters=[
+ PROJECT_ID_PARAMETER,
+ ],
+ responses={
+ 204: DELETED_RESPONSE,
+ 403: ADMIN_ONLY_RESPONSE,
+ 404: WORK_ITEM_NOT_FOUND_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, pk):
+ """Delete work item
+
+ Permanently delete an existing work item from the project.
+ Only admins or the item creator can perform this action.
+ """
+ issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ if issue.created_by_id != request.user.id and (
+ not ProjectMember.objects.filter(
+ workspace__slug=slug,
+ member=request.user,
+ role=20,
+ project_id=project_id,
+ is_active=True,
+ ).exists()
+ ):
+ return Response(
+ {"error": "Only admin or creator can delete the work item"},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+ current_instance = json.dumps(IssueSerializer(issue).data, cls=DjangoJSONEncoder)
+ issue.delete()
+ issue_activity.delay(
+ type="issue.activity.deleted",
+ requested_data=json.dumps({"issue_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(pk),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class LabelListCreateAPIEndpoint(BaseAPIView):
+ """Label List and Create Endpoint"""
+
+ serializer_class = LabelSerializer
+ model = Label
+ permission_classes = [ProjectMemberPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Label.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("parent")
+ .distinct()
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ @label_docs(
+ operation_id="create_label",
+ description="Create a new label in the specified project with name, color, and description.",
+ request=OpenApiRequest(
+ request=LabelCreateUpdateSerializer,
+ examples=[LABEL_CREATE_EXAMPLE],
+ ),
+ responses={
+ 201: OpenApiResponse(
+ description="Label created successfully",
+ response=LabelSerializer,
+ examples=[LABEL_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 409: LABEL_NAME_EXISTS_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id):
+ """Create label
+
+ Create a new label in the specified project with name, color, and description.
+ Supports external ID tracking for integration purposes.
+ """
+ try:
+ serializer = LabelCreateUpdateSerializer(data=request.data)
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and Label.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ label = Label.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_id=request.data.get("external_id"),
+ external_source=request.data.get("external_source"),
+ ).first()
+ return Response(
+ {
+ "error": "Label with the same external id and external source already exists",
+ "id": str(label.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ serializer.save(project_id=project_id)
+ label = Label.objects.get(pk=serializer.instance.id)
+ serializer = LabelSerializer(label)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ except IntegrityError:
+ label = Label.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ name=request.data.get("name"),
+ ).first()
+ return Response(
+ {
+ "error": "Label with the same name already exists in the project",
+ "id": str(label.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ @label_docs(
+ operation_id="list_labels",
+ description="Retrieve all labels in a project. Supports filtering by name and color.",
+ parameters=[
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ LabelSerializer,
+ "PaginatedLabelResponse",
+ "Paginated list of labels",
+ "Paginated Labels",
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: PROJECT_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id):
+ """List labels
+
+ Retrieve all labels in the project.
+ """
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda labels: LabelSerializer(labels, many=True, fields=self.fields, expand=self.expand).data,
+ )
+
+
+class LabelDetailAPIEndpoint(LabelListCreateAPIEndpoint):
+ """Label Detail Endpoint"""
+
+ serializer_class = LabelSerializer
+ model = Label
+ permission_classes = [ProjectMemberPermission]
+ use_read_replica = True
+
+ @label_docs(
+ operation_id="get_labels",
+ description="Retrieve details of a specific label.",
+ parameters=[
+ LABEL_ID_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="Labels",
+ response=LabelSerializer,
+ examples=[LABEL_EXAMPLE],
+ ),
+ 404: LABEL_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id, pk):
+ """Retrieve label
+
+ Retrieve details of a specific label.
+ """
+ label = self.get_queryset().get(pk=pk)
+ serializer = LabelSerializer(label)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ @label_docs(
+ operation_id="update_label",
+ description="Partially update an existing label's properties like name, color, or description.",
+ parameters=[
+ LABEL_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=LabelCreateUpdateSerializer,
+ examples=[LABEL_UPDATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Label updated successfully",
+ response=LabelSerializer,
+ examples=[LABEL_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: LABEL_NOT_FOUND_RESPONSE,
+ 409: EXTERNAL_ID_EXISTS_RESPONSE,
+ },
+ )
+ def patch(self, request, slug, project_id, pk):
+ """Update label
+
+ Partially update an existing label's properties like name, color, or description.
+ Validates external ID uniqueness if provided.
+ """
+ label = self.get_queryset().get(pk=pk)
+ serializer = LabelCreateUpdateSerializer(label, data=request.data, partial=True)
+ if serializer.is_valid():
+ if (
+ str(request.data.get("external_id"))
+ and (label.external_id != str(request.data.get("external_id")))
+ and Label.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source", label.external_source),
+ external_id=request.data.get("external_id"),
+ )
+ .exclude(id=pk)
+ .exists()
+ ):
+ return Response(
+ {
+ "error": "Label with the same external id and external source already exists",
+ "id": str(label.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+ serializer.save()
+ label = Label.objects.get(pk=serializer.instance.id)
+ serializer = LabelSerializer(label)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ @label_docs(
+ operation_id="delete_label",
+ description="Permanently remove a label from the project. This action cannot be undone.",
+ parameters=[
+ LABEL_ID_PARAMETER,
+ ],
+ responses={
+ 204: DELETED_RESPONSE,
+ 404: LABEL_NOT_FOUND_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, pk):
+ """Delete label
+
+ Permanently remove a label from the project.
+ This action cannot be undone.
+ """
+ label = self.get_queryset().get(pk=pk)
+ label.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class IssueLinkListCreateAPIEndpoint(BaseAPIView):
+ """Work Item Link List and Create Endpoint"""
+
+ serializer_class = IssueLinkSerializer
+ model = IssueLink
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(issue_id=self.kwargs.get("issue_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @issue_link_docs(
+ operation_id="list_work_item_links",
+ description="Retrieve all links associated with a work item. Supports filtering by URL, title, and metadata.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ IssueLinkSerializer,
+ "PaginatedIssueLinkResponse",
+ "Paginated list of work item links",
+ "Paginated Work Item Links",
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: ISSUE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id, issue_id):
+ """List work item links
+
+ Retrieve all links associated with a work item.
+ """
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda issue_links: IssueLinkSerializer(
+ issue_links, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+ @issue_link_docs(
+ operation_id="create_work_item_link",
+ description="Add a new external link to a work item with URL, title, and metadata.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=IssueLinkCreateSerializer,
+ examples=[ISSUE_LINK_CREATE_EXAMPLE],
+ ),
+ responses={
+ 201: OpenApiResponse(
+ description="Work item link created successfully",
+ response=IssueLinkSerializer,
+ examples=[ISSUE_LINK_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: ISSUE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id, issue_id):
+ """Create issue link
+
+ Add a new external link to a work item with URL, title, and metadata.
+ Automatically tracks link creation activity.
+ """
+ serializer = IssueLinkCreateSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(project_id=project_id, issue_id=issue_id)
+ crawl_work_item_link_title.delay(serializer.instance.id, serializer.instance.url)
+ link = IssueLink.objects.get(pk=serializer.instance.id)
+ link.created_by_id = request.data.get("created_by", request.user.id)
+ link.save(update_fields=["created_by"])
+ issue_activity.delay(
+ type="link.activity.created",
+ requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
+ issue_id=str(self.kwargs.get("issue_id")),
+ project_id=str(self.kwargs.get("project_id")),
+ actor_id=str(link.created_by_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ serializer = IssueLinkSerializer(link)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+
+class IssueLinkDetailAPIEndpoint(BaseAPIView):
+ """Issue Link Detail Endpoint"""
+
+ permission_classes = [ProjectEntityPermission]
+
+ model = IssueLink
+ serializer_class = IssueLinkSerializer
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(issue_id=self.kwargs.get("issue_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @issue_link_docs(
+ operation_id="retrieve_work_item_link",
+ description="Retrieve details of a specific work item link.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ LINK_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ IssueLinkSerializer,
+ "PaginatedIssueLinkDetailResponse",
+ "Work item link details or paginated list",
+ "Work Item Link Details",
+ ),
+ 404: OpenApiResponse(description="Issue not found"),
+ },
+ )
+ def get(self, request, slug, project_id, issue_id, pk):
+ """Retrieve work item link
+
+ Retrieve details of a specific work item link.
+ """
+ if pk is None:
+ issue_links = self.get_queryset()
+ serializer = IssueLinkSerializer(issue_links, fields=self.fields, expand=self.expand)
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda issue_links: IssueLinkSerializer(
+ issue_links, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+ issue_link = self.get_queryset().get(pk=pk)
+ serializer = IssueLinkSerializer(issue_link, fields=self.fields, expand=self.expand)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ @issue_link_docs(
+ operation_id="update_issue_link",
+ description="Modify the URL, title, or metadata of an existing issue link.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ LINK_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=IssueLinkUpdateSerializer,
+ examples=[ISSUE_LINK_UPDATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Issue link updated successfully",
+ response=IssueLinkSerializer,
+ examples=[ISSUE_LINK_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: LINK_NOT_FOUND_RESPONSE,
+ },
+ )
+ def patch(self, request, slug, project_id, issue_id, pk):
+ """Update issue link
+
+ Modify the URL, title, or metadata of an existing issue link.
+ Tracks all changes in issue activity logs.
+ """
+ issue_link = IssueLink.objects.get(workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk)
+ requested_data = json.dumps(request.data, cls=DjangoJSONEncoder)
+ current_instance = json.dumps(IssueLinkSerializer(issue_link).data, cls=DjangoJSONEncoder)
+ serializer = IssueLinkSerializer(issue_link, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ crawl_work_item_link_title.delay(serializer.data.get("id"), serializer.data.get("url"))
+ issue_activity.delay(
+ type="link.activity.updated",
+ requested_data=requested_data,
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ serializer = IssueLinkSerializer(issue_link)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ @issue_link_docs(
+ operation_id="delete_work_item_link",
+ description="Permanently remove an external link from a work item.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ LINK_ID_PARAMETER,
+ ],
+ responses={
+ 204: OpenApiResponse(description="Work item link deleted successfully"),
+ 404: OpenApiResponse(description="Work item link not found"),
+ },
+ )
+ def delete(self, request, slug, project_id, issue_id, pk):
+ """Delete work item link
+
+ Permanently remove an external link from a work item.
+ Records deletion activity for audit purposes.
+ """
+ issue_link = IssueLink.objects.get(workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk)
+ current_instance = json.dumps(IssueLinkSerializer(issue_link).data, cls=DjangoJSONEncoder)
+ issue_activity.delay(
+ type="link.activity.deleted",
+ requested_data=json.dumps({"link_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ issue_link.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class IssueCommentListCreateAPIEndpoint(BaseAPIView):
+ """Issue Comment List and Create Endpoint"""
+
+ serializer_class = IssueCommentSerializer
+ model = IssueComment
+ webhook_event = "issue_comment"
+ permission_classes = [ProjectLitePermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ IssueComment.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(issue_id=self.kwargs.get("issue_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .select_related("workspace", "project", "issue", "actor")
+ .annotate(
+ is_member=Exists(
+ ProjectMember.objects.filter(
+ workspace__slug=self.kwargs.get("slug"),
+ project_id=self.kwargs.get("project_id"),
+ member_id=self.request.user.id,
+ is_active=True,
+ )
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @issue_comment_docs(
+ operation_id="list_work_item_comments",
+ description="Retrieve all comments for a work item.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ IssueCommentSerializer,
+ "PaginatedIssueCommentResponse",
+ "Paginated list of work item comments",
+ "Paginated Work Item Comments",
+ ),
+ 404: OpenApiResponse(description="Issue not found"),
+ },
+ )
+ def get(self, request, slug, project_id, issue_id):
+ """List work item comments
+
+ Retrieve all comments for a work item.
+ """
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda issue_comments: IssueCommentSerializer(
+ issue_comments, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+ @issue_comment_docs(
+ operation_id="create_work_item_comment",
+ description="Add a new comment to a work item with HTML content.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=IssueCommentCreateSerializer,
+ examples=[ISSUE_COMMENT_CREATE_EXAMPLE],
+ ),
+ responses={
+ 201: OpenApiResponse(
+ description="Work item comment created successfully",
+ response=IssueCommentSerializer,
+ examples=[ISSUE_COMMENT_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: ISSUE_NOT_FOUND_RESPONSE,
+ 409: EXTERNAL_ID_EXISTS_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id, issue_id):
+ """Create work item comment
+
+ Add a new comment to a work item with HTML content.
+ Supports external ID tracking for integration purposes.
+ """
+ # Validation check if the issue already exists
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and IssueComment.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ issue_comment = IssueComment.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_id=request.data.get("external_id"),
+ external_source=request.data.get("external_source"),
+ ).first()
+ return Response(
+ {
+ "error": "Work item comment with the same external id and external source already exists",
+ "id": str(issue_comment.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ serializer = IssueCommentCreateSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save(project_id=project_id, issue_id=issue_id, actor=request.user)
+ issue_comment = IssueComment.objects.get(pk=serializer.instance.id)
+ # Update the created_at and the created_by and save the comment
+ issue_comment.created_at = request.data.get("created_at", timezone.now())
+ issue_comment.created_by_id = request.data.get("created_by", request.user.id)
+ issue_comment.actor_id = request.data.get("created_by", request.user.id)
+ issue_comment.save(update_fields=["created_at", "created_by"])
+
+ issue_activity.delay(
+ type="comment.activity.created",
+ requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder),
+ actor_id=str(issue_comment.created_by_id),
+ issue_id=str(self.kwargs.get("issue_id")),
+ project_id=str(self.kwargs.get("project_id")),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+
+ # Send the model activity
+ model_activity.delay(
+ model_name="issue_comment",
+ model_id=str(serializer.instance.id),
+ requested_data=request.data,
+ current_instance=None,
+ actor_id=request.user.id,
+ slug=slug,
+ origin=base_host(request=request, is_app=True),
+ )
+
+ serializer = IssueCommentSerializer(issue_comment)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+
+class IssueCommentDetailAPIEndpoint(BaseAPIView):
+ """Work Item Comment Detail Endpoint"""
+
+ serializer_class = IssueCommentSerializer
+ model = IssueComment
+ webhook_event = "issue_comment"
+ permission_classes = [ProjectLitePermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ IssueComment.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(issue_id=self.kwargs.get("issue_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .select_related("workspace", "project", "issue", "actor")
+ .annotate(
+ is_member=Exists(
+ ProjectMember.objects.filter(
+ workspace__slug=self.kwargs.get("slug"),
+ project_id=self.kwargs.get("project_id"),
+ member_id=self.request.user.id,
+ is_active=True,
+ )
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @issue_comment_docs(
+ operation_id="retrieve_work_item_comment",
+ description="Retrieve details of a specific comment.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ COMMENT_ID_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="Work item comments",
+ response=IssueCommentSerializer,
+ examples=[ISSUE_COMMENT_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: ISSUE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id, issue_id, pk):
+ """Retrieve issue comment
+
+ Retrieve details of a specific comment.
+ """
+ issue_comment = self.get_queryset().get(pk=pk)
+ serializer = IssueCommentSerializer(issue_comment, fields=self.fields, expand=self.expand)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ @issue_comment_docs(
+ operation_id="update_work_item_comment",
+ description="Modify the content of an existing comment on a work item.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ COMMENT_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=IssueCommentCreateSerializer,
+ examples=[ISSUE_COMMENT_UPDATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Work item comment updated successfully",
+ response=IssueCommentSerializer,
+ examples=[ISSUE_COMMENT_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: COMMENT_NOT_FOUND_RESPONSE,
+ 409: EXTERNAL_ID_EXISTS_RESPONSE,
+ },
+ )
+ def patch(self, request, slug, project_id, issue_id, pk):
+ """Update work item comment
+
+ Modify the content of an existing comment on a work item.
+ Validates external ID uniqueness if provided.
+ """
+ issue_comment = IssueComment.objects.get(workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk)
+ requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
+ current_instance = json.dumps(IssueCommentSerializer(issue_comment).data, cls=DjangoJSONEncoder)
+
+ # Validation check if the issue already exists
+ if (
+ request.data.get("external_id")
+ and (issue_comment.external_id != str(request.data.get("external_id")))
+ and IssueComment.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source", issue_comment.external_source),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "Work item comment with the same external id and external source already exists",
+ "id": str(issue_comment.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ serializer = IssueCommentCreateSerializer(issue_comment, data=request.data, partial=True)
+ if serializer.is_valid():
+ serializer.save()
+ issue_activity.delay(
+ type="comment.activity.updated",
+ requested_data=requested_data,
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ # Send the model activity
+ model_activity.delay(
+ model_name="issue_comment",
+ model_id=str(pk),
+ requested_data=request.data,
+ current_instance=current_instance,
+ actor_id=request.user.id,
+ slug=slug,
+ origin=base_host(request=request, is_app=True),
+ )
+
+ issue_comment = IssueComment.objects.get(pk=serializer.instance.id)
+ serializer = IssueCommentSerializer(issue_comment)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ @issue_comment_docs(
+ operation_id="delete_work_item_comment",
+ description="Permanently remove a comment from a work item. Records deletion activity for audit purposes.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ COMMENT_ID_PARAMETER,
+ ],
+ responses={
+ 204: OpenApiResponse(description="Work item comment deleted successfully"),
+ 404: COMMENT_NOT_FOUND_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, issue_id, pk):
+ """Delete issue comment
+
+ Permanently remove a comment from a work item.
+ Records deletion activity for audit purposes.
+ """
+ issue_comment = IssueComment.objects.get(workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk)
+ current_instance = json.dumps(IssueCommentSerializer(issue_comment).data, cls=DjangoJSONEncoder)
+ issue_comment.delete()
+ issue_activity.delay(
+ type="comment.activity.deleted",
+ requested_data=json.dumps({"comment_id": str(pk)}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=current_instance,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class IssueActivityListAPIEndpoint(BaseAPIView):
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ @issue_activity_docs(
+ operation_id="list_work_item_activities",
+ description="Retrieve all activities for a work item. Supports filtering by activity type and date range.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ IssueActivitySerializer,
+ "PaginatedIssueActivityResponse",
+ "Paginated list of issue activities",
+ "Paginated Issue Activities",
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: ISSUE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id, issue_id):
+ """List issue activities
+
+ Retrieve chronological activity logs for an issue.
+ Excludes comment, vote, reaction, and draft activities.
+ """
+ issue_activities = (
+ IssueActivity.objects.filter(issue_id=issue_id, workspace__slug=slug, project_id=project_id)
+ .filter(
+ ~Q(field__in=["comment", "vote", "reaction", "draft"]),
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .select_related("actor", "workspace", "issue", "project")
+ ).order_by(request.GET.get("order_by", "created_at"))
+
+ return self.paginate(
+ request=request,
+ queryset=(issue_activities),
+ on_results=lambda issue_activity: IssueActivitySerializer(
+ issue_activity, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+
+class IssueActivityDetailAPIEndpoint(BaseAPIView):
+ """Issue Activity Detail Endpoint"""
+
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ @issue_activity_docs(
+ operation_id="retrieve_work_item_activity",
+ description="Retrieve details of a specific activity.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ ACTIVITY_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ IssueActivitySerializer,
+ "PaginatedIssueActivityDetailResponse",
+ "Paginated list of work item activities",
+ "Work Item Activity Details",
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: ISSUE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id, issue_id, pk):
+ """Retrieve issue activity
+
+ Retrieve details of a specific activity.
+ Excludes comment, vote, reaction, and draft activities.
+ """
+ issue_activities = (
+ IssueActivity.objects.filter(issue_id=issue_id, workspace__slug=slug, project_id=project_id)
+ .filter(
+ ~Q(field__in=["comment", "vote", "reaction", "draft"]),
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .select_related("actor", "workspace", "issue", "project")
+ ).order_by(request.GET.get("order_by", "created_at"))
+
+ return self.paginate(
+ request=request,
+ queryset=(issue_activities),
+ on_results=lambda issue_activity: IssueActivitySerializer(
+ issue_activity, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+
+class IssueAttachmentListCreateAPIEndpoint(BaseAPIView):
+ """Issue Attachment List and Create Endpoint"""
+
+ serializer_class = IssueAttachmentSerializer
+ model = FileAsset
+ use_read_replica = True
+
+ @issue_attachment_docs(
+ operation_id="create_work_item_attachment",
+ description="Generate presigned URL for uploading file attachments to a work item.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=IssueAttachmentUploadSerializer,
+ examples=[ISSUE_ATTACHMENT_UPLOAD_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Presigned download URL generated successfully",
+ examples=[
+ OpenApiExample(
+ name="Work Item Attachment Response",
+ value={
+ "upload_data": {
+ "url": "https://s3.amazonaws.com/bucket/file.pdf?signed-url",
+ "fields": {
+ "key": "file.pdf",
+ "AWSAccessKeyId": "AKIAIOSFODNN7EXAMPLE",
+ "policy": "EXAMPLE",
+ "signature": "EXAMPLE",
+ "acl": "public-read",
+ "Content-Type": "application/pdf",
+ },
+ },
+ "asset_id": "550e8400-e29b-41d4-a716-446655440000",
+ "asset_url": "https://s3.amazonaws.com/bucket/file.pdf?signed-url",
+ "attachment": {
+ "id": "550e8400-e29b-41d4-a716-446655440000",
+ "name": "file.pdf",
+ "type": "application/pdf",
+ "size": 1234567890,
+ "url": "https://s3.amazonaws.com/bucket/file.pdf?signed-url",
+ },
+ },
+ )
+ ],
+ ),
+ 400: OpenApiResponse(
+ description="Validation error",
+ examples=[
+ OpenApiExample(
+ name="Missing required fields",
+ value={
+ "error": "Name and size are required fields.",
+ "status": False,
+ },
+ ),
+ OpenApiExample(
+ name="Invalid file type",
+ value={"error": "Invalid file type.", "status": False},
+ ),
+ ],
+ ),
+ 404: OpenApiResponse(
+ description="Issue or Project or Workspace not found",
+ examples=[
+ OpenApiExample(
+ name="Workspace not found",
+ value={"error": "Workspace not found"},
+ ),
+ OpenApiExample(name="Project not found", value={"error": "Project not found"}),
+ OpenApiExample(name="Issue not found", value={"error": "Issue not found"}),
+ ],
+ ),
+ },
+ )
+ def post(self, request, slug, project_id, issue_id):
+ """Create work item attachment
+
+ Generate presigned URL for uploading file attachments to a work item.
+ Validates file type and size before creating the attachment record.
+ """
+ issue = Issue.objects.get(pk=issue_id, workspace__slug=slug, project_id=project_id)
+ # if the user is creator or admin,member then allow the upload
+ if not user_has_issue_permission(
+ request.user.id,
+ project_id=project_id,
+ issue=issue,
+ allowed_roles=[ROLE.ADMIN.value, ROLE.MEMBER.value],
+ allow_creator=True,
+ ):
+ return Response(
+ {"error": "You are not allowed to upload this attachment"},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ name = request.data.get("name")
+ type = request.data.get("type", False)
+ size = request.data.get("size")
+ external_id = request.data.get("external_id")
+ external_source = request.data.get("external_source")
+
+ # Check if the request is valid
+ if not name or not size:
+ return Response(
+ {"error": "Invalid request.", "status": False},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ size_limit = min(size, settings.FILE_SIZE_LIMIT)
+
+ if not type or type not in settings.ATTACHMENT_MIME_TYPES:
+ return Response(
+ {"error": "Invalid file type.", "status": False},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Get the workspace
+ workspace = Workspace.objects.get(slug=slug)
+
+ # asset key
+ asset_key = f"{workspace.id}/{uuid.uuid4().hex}-{name}"
+
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and FileAsset.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ issue_id=issue_id,
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
+ ).exists()
+ ):
+ asset = FileAsset.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ issue_id=issue_id,
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
+ ).first()
+ return Response(
+ {
+ "error": "Issue with the same external id and external source already exists",
+ "id": str(asset.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ # Create a File Asset
+ asset = FileAsset.objects.create(
+ attributes={"name": name, "type": type, "size": size_limit},
+ asset=asset_key,
+ size=size_limit,
+ workspace_id=workspace.id,
+ created_by=request.user,
+ issue_id=issue_id,
+ project_id=project_id,
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
+ external_id=external_id,
+ external_source=external_source,
+ )
+
+ # Get the presigned URL
+ storage = S3Storage(request=request)
+ # Generate a presigned URL to share an S3 object
+ presigned_url = storage.generate_presigned_post(object_name=asset_key, file_type=type, file_size=size_limit)
+ # Return the presigned URL
+ return Response(
+ {
+ "upload_data": presigned_url,
+ "asset_id": str(asset.id),
+ "attachment": IssueAttachmentSerializer(asset).data,
+ "asset_url": asset.asset_url,
+ },
+ status=status.HTTP_200_OK,
+ )
+
+ @issue_attachment_docs(
+ operation_id="list_work_item_attachments",
+ description="Retrieve all attachments for a work item.",
+ parameters=[
+ ISSUE_ID_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="Work item attachment",
+ response=IssueAttachmentSerializer,
+ examples=[ISSUE_ATTACHMENT_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: ATTACHMENT_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id, issue_id):
+ """List issue attachments
+
+ List all attachments for an issue.
+ """
+ # Get all the attachments
+ issue_attachments = FileAsset.objects.filter(
+ issue_id=issue_id,
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
+ workspace__slug=slug,
+ project_id=project_id,
+ is_uploaded=True,
+ )
+ # Serialize the attachments
+ serializer = IssueAttachmentSerializer(issue_attachments, many=True)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+
+class IssueAttachmentDetailAPIEndpoint(BaseAPIView):
+ """Issue Attachment Detail Endpoint"""
+
+ serializer_class = IssueAttachmentSerializer
+ model = FileAsset
+ use_read_replica = True
+
+ @issue_attachment_docs(
+ operation_id="delete_work_item_attachment",
+ description="Permanently remove an attachment from a work item. Records deletion activity for audit purposes.",
+ parameters=[
+ ATTACHMENT_ID_PARAMETER,
+ ],
+ responses={
+ 204: OpenApiResponse(description="Work item attachment deleted successfully"),
+ 404: ATTACHMENT_NOT_FOUND_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, issue_id, pk):
+ """Delete work item attachment
+
+ Soft delete an attachment from a work item by marking it as deleted.
+ Records deletion activity and triggers metadata cleanup.
+ """
+ issue = Issue.objects.get(pk=issue_id, workspace__slug=slug, project_id=project_id)
+ # if the request user is creator or admin then delete the attachment
+ if not user_has_issue_permission(
+ request.user,
+ project_id=project_id,
+ issue=issue,
+ allowed_roles=[ROLE.ADMIN.value],
+ allow_creator=True,
+ ):
+ return Response(
+ {"error": "You are not allowed to delete this attachment"},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ issue_attachment = FileAsset.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
+ issue_attachment.is_deleted = True
+ issue_attachment.deleted_at = timezone.now()
+ issue_attachment.save()
+
+ issue_activity.delay(
+ type="attachment.activity.deleted",
+ requested_data=None,
+ actor_id=str(self.request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ notification=True,
+ origin=base_host(request=request, is_app=True),
+ )
+
+ # Get the storage metadata
+ if not issue_attachment.storage_metadata:
+ get_asset_object_metadata.delay(str(issue_attachment.id))
+ issue_attachment.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ @issue_attachment_docs(
+ operation_id="retrieve_work_item_attachment",
+ description="Download attachment file. Returns a redirect to the presigned download URL.",
+ parameters=[
+ ATTACHMENT_ID_PARAMETER,
+ ],
+ responses={
+ 302: OpenApiResponse(
+ description="Redirect to presigned download URL",
+ ),
+ 400: OpenApiResponse(
+ description="Asset not uploaded",
+ response={
+ "type": "object",
+ "properties": {
+ "error": {
+ "type": "string",
+ "description": "Error message",
+ "example": "The asset is not uploaded.",
+ },
+ "status": {
+ "type": "boolean",
+ "description": "Request status",
+ "example": False,
+ },
+ },
+ },
+ examples=[ISSUE_ATTACHMENT_NOT_UPLOADED_EXAMPLE],
+ ),
+ 404: ATTACHMENT_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, project_id, issue_id, pk):
+ """Retrieve work item attachment
+
+ Retrieve details of a specific attachment.
+ """
+ # if the user is part of the project then allow the download
+ if not user_has_issue_permission(
+ request.user,
+ project_id=project_id,
+ issue=None,
+ allowed_roles=None,
+ allow_creator=False,
+ ):
+ return Response(
+ {"error": "You are not allowed to download this attachment"},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ # Get the asset
+ asset = FileAsset.objects.get(id=pk, workspace__slug=slug, project_id=project_id)
+
+ # Check if the asset is uploaded
+ if not asset.is_uploaded:
+ return Response(
+ {"error": "The asset is not uploaded.", "status": False},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ storage = S3Storage(request=request)
+ presigned_url = storage.generate_presigned_url(
+ object_name=asset.asset.name,
+ disposition="attachment",
+ filename=asset.attributes.get("name"),
+ )
+ return HttpResponseRedirect(presigned_url)
+
+ @issue_attachment_docs(
+ operation_id="upload_work_item_attachment",
+ description="Mark an attachment as uploaded after successful file transfer to storage.",
+ parameters=[
+ ATTACHMENT_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request={
+ "application/json": {
+ "type": "object",
+ "properties": {
+ "is_uploaded": {
+ "type": "boolean",
+ "description": "Mark attachment as uploaded",
+ }
+ },
+ }
+ },
+ examples=[ATTACHMENT_UPLOAD_CONFIRM_EXAMPLE],
+ ),
+ responses={
+ 204: OpenApiResponse(description="Work item attachment uploaded successfully"),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: ATTACHMENT_NOT_FOUND_RESPONSE,
+ },
+ )
+ def patch(self, request, slug, project_id, issue_id, pk):
+ """Confirm attachment upload
+
+ Mark an attachment as uploaded after successful file transfer to storage.
+ Triggers activity logging and metadata extraction.
+ """
+
+ issue = Issue.objects.get(pk=issue_id, workspace__slug=slug, project_id=project_id)
+ # if the user is creator or admin then allow the upload
+ if not user_has_issue_permission(
+ request.user,
+ project_id=project_id,
+ issue=issue,
+ allowed_roles=[ROLE.ADMIN.value, ROLE.MEMBER.value],
+ allow_creator=True,
+ ):
+ return Response(
+ {"error": "You are not allowed to upload this attachment"},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ issue_attachment = FileAsset.objects.get(pk=pk, workspace__slug=slug, project_id=project_id)
+ serializer = IssueAttachmentSerializer(issue_attachment)
+
+ # Send this activity only if the attachment is not uploaded before
+ if not issue_attachment.is_uploaded:
+ issue_activity.delay(
+ type="attachment.activity.created",
+ requested_data=None,
+ actor_id=str(self.request.user.id),
+ issue_id=str(self.kwargs.get("issue_id", None)),
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(serializer.data, cls=DjangoJSONEncoder),
+ epoch=int(timezone.now().timestamp()),
+ notification=True,
+ origin=base_host(request=request, is_app=True),
+ )
+
+ # Update the attachment
+ issue_attachment.is_uploaded = True
+ issue_attachment.created_by = request.user
+
+ # Get the storage metadata
+ if not issue_attachment.storage_metadata:
+ get_asset_object_metadata.delay(str(issue_attachment.id))
+ issue_attachment.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class IssueSearchEndpoint(BaseAPIView):
+ """Endpoint to search across multiple fields in the issues"""
+
+ use_read_replica = True
+
+ @extend_schema(
+ operation_id="search_work_items",
+ tags=["Work Items"],
+ description="Perform semantic search across issue names, sequence IDs, and project identifiers.",
+ parameters=[
+ WORKSPACE_SLUG_PARAMETER,
+ SEARCH_PARAMETER_REQUIRED,
+ LIMIT_PARAMETER,
+ WORKSPACE_SEARCH_PARAMETER,
+ PROJECT_ID_QUERY_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="Work item search results",
+ response=IssueSearchSerializer,
+ examples=[ISSUE_SEARCH_EXAMPLE],
+ ),
+ 400: BAD_SEARCH_REQUEST_RESPONSE,
+ 401: UNAUTHORIZED_RESPONSE,
+ 403: FORBIDDEN_RESPONSE,
+ 404: WORKSPACE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug):
+ """Search work items
+
+ Perform semantic search across work item names, sequence IDs, and project identifiers.
+ Supports workspace-wide or project-specific search with configurable result limits.
+ """
+ query = request.query_params.get("search", False)
+ limit = request.query_params.get("limit", 10)
+ workspace_search = request.query_params.get("workspace_search", "false")
+ project_id = request.query_params.get("project_id", False)
+
+ if not query:
+ return Response({"issues": []}, status=status.HTTP_200_OK)
+
+ # Build search query
+ fields = ["name", "sequence_id", "project__identifier"]
+ q = Q()
+ for field in fields:
+ if field == "sequence_id":
+ # Match whole integers only (exclude decimal numbers)
+ sequences = re.findall(r"\b\d+\b", query)
+ for sequence_id in sequences:
+ q |= Q(**{"sequence_id": sequence_id})
+ else:
+ q |= Q(**{f"{field}__icontains": query})
+
+ # Filter issues
+ issues = Issue.issue_objects.filter(
+ q,
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ project__archived_at__isnull=True,
+ workspace__slug=slug,
+ )
+
+ # Apply project filter if not searching across workspace
+ if workspace_search == "false" and project_id:
+ issues = issues.filter(project_id=project_id)
+
+ # Get results
+ issue_results = issues.distinct().values(
+ "name",
+ "id",
+ "sequence_id",
+ "project__identifier",
+ "project_id",
+ "workspace__slug",
+ )[: int(limit)]
+
+ return Response({"issues": issue_results}, status=status.HTTP_200_OK)
diff --git a/apps/api/plane/api/views/member.py b/apps/api/plane/api/views/member.py
new file mode 100644
index 00000000..f761d5c9
--- /dev/null
+++ b/apps/api/plane/api/views/member.py
@@ -0,0 +1,133 @@
+# Third Party imports
+from rest_framework.response import Response
+from rest_framework import status
+from drf_spectacular.utils import (
+ extend_schema,
+ OpenApiResponse,
+)
+
+# Module imports
+from .base import BaseAPIView
+from plane.api.serializers import UserLiteSerializer
+from plane.db.models import User, Workspace, WorkspaceMember, ProjectMember
+from plane.app.permissions import ProjectMemberPermission, WorkSpaceAdminPermission
+from plane.utils.openapi import (
+ WORKSPACE_SLUG_PARAMETER,
+ PROJECT_ID_PARAMETER,
+ UNAUTHORIZED_RESPONSE,
+ FORBIDDEN_RESPONSE,
+ WORKSPACE_NOT_FOUND_RESPONSE,
+ PROJECT_NOT_FOUND_RESPONSE,
+ WORKSPACE_MEMBER_EXAMPLE,
+ PROJECT_MEMBER_EXAMPLE,
+)
+
+
+class WorkspaceMemberAPIEndpoint(BaseAPIView):
+ permission_classes = [WorkSpaceAdminPermission]
+ use_read_replica = True
+
+ @extend_schema(
+ operation_id="get_workspace_members",
+ summary="List workspace members",
+ description="Retrieve all users who are members of the specified workspace.",
+ tags=["Members"],
+ parameters=[WORKSPACE_SLUG_PARAMETER],
+ responses={
+ 200: OpenApiResponse(
+ description="List of workspace members with their roles",
+ response={
+ "type": "array",
+ "items": {
+ "allOf": [
+ {"$ref": "#/components/schemas/UserLite"},
+ {
+ "type": "object",
+ "properties": {
+ "role": {
+ "type": "integer",
+ "description": "Member role in the workspace",
+ }
+ },
+ },
+ ]
+ },
+ },
+ examples=[WORKSPACE_MEMBER_EXAMPLE],
+ ),
+ 401: UNAUTHORIZED_RESPONSE,
+ 403: FORBIDDEN_RESPONSE,
+ 404: WORKSPACE_NOT_FOUND_RESPONSE,
+ },
+ )
+ # Get all the users that are present inside the workspace
+ def get(self, request, slug):
+ """List workspace members
+
+ Retrieve all users who are members of the specified workspace.
+ Returns user profiles with their respective workspace roles and permissions.
+ """
+ # Check if the workspace exists
+ if not Workspace.objects.filter(slug=slug).exists():
+ return Response(
+ {"error": "Provided workspace does not exist"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ workspace_members = WorkspaceMember.objects.filter(workspace__slug=slug).select_related("member")
+
+ # Get all the users with their roles
+ users_with_roles = []
+ for workspace_member in workspace_members:
+ user_data = UserLiteSerializer(workspace_member.member).data
+ user_data["role"] = workspace_member.role
+ users_with_roles.append(user_data)
+
+ return Response(users_with_roles, status=status.HTTP_200_OK)
+
+
+# API endpoint to get and insert users inside the workspace
+class ProjectMemberAPIEndpoint(BaseAPIView):
+ permission_classes = [ProjectMemberPermission]
+ use_read_replica = True
+
+ @extend_schema(
+ operation_id="get_project_members",
+ summary="List project members",
+ description="Retrieve all users who are members of the specified project.",
+ tags=["Members"],
+ parameters=[WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER],
+ responses={
+ 200: OpenApiResponse(
+ description="List of project members with their roles",
+ response=UserLiteSerializer,
+ examples=[PROJECT_MEMBER_EXAMPLE],
+ ),
+ 401: UNAUTHORIZED_RESPONSE,
+ 403: FORBIDDEN_RESPONSE,
+ 404: PROJECT_NOT_FOUND_RESPONSE,
+ },
+ )
+ # Get all the users that are present inside the workspace
+ def get(self, request, slug, project_id):
+ """List project members
+
+ Retrieve all users who are members of the specified project.
+ Returns user profiles with their project-specific roles and access levels.
+ """
+ # Check if the workspace exists
+ if not Workspace.objects.filter(slug=slug).exists():
+ return Response(
+ {"error": "Provided workspace does not exist"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Get the workspace members that are present inside the workspace
+ project_members = ProjectMember.objects.filter(project_id=project_id, workspace__slug=slug).values_list(
+ "member_id", flat=True
+ )
+
+ # Get all the users that are present inside the workspace
+ users = UserLiteSerializer(User.objects.filter(id__in=project_members), many=True).data
+
+ return Response(users, status=status.HTTP_200_OK)
diff --git a/apps/api/plane/api/views/module.py b/apps/api/plane/api/views/module.py
new file mode 100644
index 00000000..d79b9408
--- /dev/null
+++ b/apps/api/plane/api/views/module.py
@@ -0,0 +1,1073 @@
+# Python imports
+import json
+
+# Django imports
+from django.core import serializers
+from django.db.models import Count, F, Func, OuterRef, Prefetch, Q
+from django.utils import timezone
+from django.core.serializers.json import DjangoJSONEncoder
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from drf_spectacular.utils import OpenApiResponse, OpenApiRequest
+
+# Module imports
+from plane.api.serializers import (
+ IssueSerializer,
+ ModuleIssueSerializer,
+ ModuleSerializer,
+ ModuleIssueRequestSerializer,
+ ModuleCreateSerializer,
+ ModuleUpdateSerializer,
+)
+from plane.app.permissions import ProjectEntityPermission
+from plane.bgtasks.issue_activities_task import issue_activity
+from plane.db.models import (
+ Issue,
+ FileAsset,
+ IssueLink,
+ Module,
+ ModuleIssue,
+ ModuleLink,
+ Project,
+ ProjectMember,
+ UserFavorite,
+)
+
+from .base import BaseAPIView
+from plane.bgtasks.webhook_task import model_activity
+from plane.utils.host import base_host
+from plane.utils.openapi import (
+ module_docs,
+ module_issue_docs,
+ MODULE_ID_PARAMETER,
+ MODULE_PK_PARAMETER,
+ ISSUE_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ create_paginated_response,
+ # Request Examples
+ MODULE_CREATE_EXAMPLE,
+ MODULE_UPDATE_EXAMPLE,
+ MODULE_ISSUE_REQUEST_EXAMPLE,
+ # Response Examples
+ MODULE_EXAMPLE,
+ MODULE_ISSUE_EXAMPLE,
+ INVALID_REQUEST_RESPONSE,
+ PROJECT_NOT_FOUND_RESPONSE,
+ EXTERNAL_ID_EXISTS_RESPONSE,
+ MODULE_NOT_FOUND_RESPONSE,
+ DELETED_RESPONSE,
+ ADMIN_ONLY_RESPONSE,
+ REQUIRED_FIELDS_RESPONSE,
+ MODULE_ISSUE_NOT_FOUND_RESPONSE,
+ ARCHIVED_RESPONSE,
+ CANNOT_ARCHIVE_RESPONSE,
+ UNARCHIVED_RESPONSE,
+)
+
+
+class ModuleListCreateAPIEndpoint(BaseAPIView):
+ """Module List and Create Endpoint"""
+
+ serializer_class = ModuleSerializer
+ model = Module
+ webhook_event = "module"
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Module.objects.filter(project_id=self.kwargs.get("project_id"))
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("lead")
+ .prefetch_related("members")
+ .prefetch_related(
+ Prefetch(
+ "link_module",
+ queryset=ModuleLink.objects.select_related("module", "created_by"),
+ )
+ )
+ .annotate(
+ total_issues=Count(
+ "issue_module",
+ filter=Q(
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="completed",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="cancelled",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="started",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="unstarted",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="backlog",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ @module_docs(
+ operation_id="create_module",
+ summary="Create module",
+ description="Create a new project module with specified name, description, and timeline.",
+ request=OpenApiRequest(
+ request=ModuleCreateSerializer,
+ examples=[MODULE_CREATE_EXAMPLE],
+ ),
+ responses={
+ 201: OpenApiResponse(
+ description="Module created",
+ response=ModuleSerializer,
+ examples=[MODULE_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 404: PROJECT_NOT_FOUND_RESPONSE,
+ 409: EXTERNAL_ID_EXISTS_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id):
+ """Create module
+
+ Create a new project module with specified name, description, and timeline.
+ Automatically assigns the creator as module lead and tracks activity.
+ """
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ serializer = ModuleCreateSerializer(
+ data=request.data,
+ context={"project_id": project_id, "workspace_id": project.workspace_id},
+ )
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and Module.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ module = Module.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).first()
+ return Response(
+ {
+ "error": "Module with the same external id and external source already exists",
+ "id": str(module.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+ serializer.save()
+ # Send the model activity
+ model_activity.delay(
+ model_name="module",
+ model_id=str(serializer.instance.id),
+ requested_data=request.data,
+ current_instance=None,
+ actor_id=request.user.id,
+ slug=slug,
+ origin=base_host(request=request, is_app=True),
+ )
+ module = Module.objects.get(pk=serializer.instance.id)
+ serializer = ModuleSerializer(module)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ @module_docs(
+ operation_id="list_modules",
+ summary="List modules",
+ description="Retrieve all modules in a project.",
+ parameters=[
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ ModuleSerializer,
+ "PaginatedModuleResponse",
+ "Paginated list of modules",
+ "Paginated Modules",
+ ),
+ 404: OpenApiResponse(description="Module not found"),
+ },
+ )
+ def get(self, request, slug, project_id):
+ """List or retrieve modules
+
+ Retrieve all modules in a project or get details of a specific module.
+ Returns paginated results with module statistics and member information.
+ """
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset().filter(archived_at__isnull=True)),
+ on_results=lambda modules: ModuleSerializer(
+ modules, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+
+class ModuleDetailAPIEndpoint(BaseAPIView):
+ """Module Detail Endpoint"""
+
+ model = Module
+ permission_classes = [ProjectEntityPermission]
+ serializer_class = ModuleSerializer
+ webhook_event = "module"
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Module.objects.filter(project_id=self.kwargs.get("project_id"))
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("lead")
+ .prefetch_related("members")
+ .prefetch_related(
+ Prefetch(
+ "link_module",
+ queryset=ModuleLink.objects.select_related("module", "created_by"),
+ )
+ )
+ .annotate(
+ total_issues=Count(
+ "issue_module",
+ filter=Q(
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="completed",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="cancelled",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="started",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="unstarted",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="backlog",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ @module_docs(
+ operation_id="update_module",
+ summary="Update module",
+ description="Modify an existing module's properties like name, description, status, or timeline.",
+ parameters=[
+ MODULE_PK_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=ModuleUpdateSerializer,
+ examples=[MODULE_UPDATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Module updated successfully",
+ response=ModuleSerializer,
+ examples=[MODULE_EXAMPLE],
+ ),
+ 400: OpenApiResponse(
+ description="Invalid request data",
+ response=ModuleSerializer,
+ examples=[MODULE_UPDATE_EXAMPLE],
+ ),
+ 404: OpenApiResponse(description="Module not found"),
+ 409: OpenApiResponse(description="Module with same external ID already exists"),
+ },
+ )
+ def patch(self, request, slug, project_id, pk):
+ """Update module
+
+ Modify an existing module's properties like name, description, status, or timeline.
+ Tracks all changes in model activity logs for audit purposes.
+ """
+ module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug)
+
+ current_instance = json.dumps(ModuleSerializer(module).data, cls=DjangoJSONEncoder)
+
+ if module.archived_at:
+ return Response(
+ {"error": "Archived module cannot be edited"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ serializer = ModuleSerializer(module, data=request.data, context={"project_id": project_id}, partial=True)
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and (module.external_id != request.data.get("external_id"))
+ and Module.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source", module.external_source),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "Module with the same external id and external source already exists",
+ "id": str(module.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+ serializer.save()
+
+ # Send the model activity
+ model_activity.delay(
+ model_name="module",
+ model_id=str(serializer.instance.id),
+ requested_data=request.data,
+ current_instance=current_instance,
+ actor_id=request.user.id,
+ slug=slug,
+ origin=base_host(request=request, is_app=True),
+ )
+
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ @module_docs(
+ operation_id="retrieve_module",
+ summary="Retrieve module",
+ description="Retrieve details of a specific module.",
+ parameters=[
+ MODULE_PK_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="Module",
+ response=ModuleSerializer,
+ examples=[MODULE_EXAMPLE],
+ ),
+ 404: OpenApiResponse(description="Module not found"),
+ },
+ )
+ def get(self, request, slug, project_id, pk):
+ """Retrieve module
+
+ Retrieve details of a specific module.
+ """
+ queryset = self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
+ data = ModuleSerializer(queryset, fields=self.fields, expand=self.expand).data
+ return Response(data, status=status.HTTP_200_OK)
+
+ @module_docs(
+ operation_id="delete_module",
+ summary="Delete module",
+ description="Permanently remove a module and all its associated issue relationships.",
+ parameters=[
+ MODULE_PK_PARAMETER,
+ ],
+ responses={
+ 204: DELETED_RESPONSE,
+ 403: ADMIN_ONLY_RESPONSE,
+ 404: MODULE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, pk):
+ """Delete module
+
+ Permanently remove a module and all its associated issue relationships.
+ Only admins or the module creator can perform this action.
+ """
+ module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
+ if module.created_by_id != request.user.id and (
+ not ProjectMember.objects.filter(
+ workspace__slug=slug,
+ member=request.user,
+ role=20,
+ project_id=project_id,
+ is_active=True,
+ ).exists()
+ ):
+ return Response(
+ {"error": "Only admin or creator can delete the module"},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ module_issues = list(ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True))
+ issue_activity.delay(
+ type="module.activity.deleted",
+ requested_data=json.dumps(
+ {
+ "module_id": str(pk),
+ "module_name": str(module.name),
+ "issues": [str(issue_id) for issue_id in module_issues],
+ }
+ ),
+ actor_id=str(request.user.id),
+ issue_id=None,
+ project_id=str(project_id),
+ current_instance=json.dumps({"module_name": str(module.name)}),
+ epoch=int(timezone.now().timestamp()),
+ origin=base_host(request=request, is_app=True),
+ )
+ module.delete()
+ # Delete the module issues
+ ModuleIssue.objects.filter(module=pk, project_id=project_id).delete()
+ # Delete the user favorite module
+ UserFavorite.objects.filter(entity_type="module", entity_identifier=pk, project_id=project_id).delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class ModuleIssueListCreateAPIEndpoint(BaseAPIView):
+ """Module Work Item List and Create Endpoint"""
+
+ serializer_class = ModuleIssueSerializer
+ model = ModuleIssue
+ webhook_event = "module_issue"
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ ModuleIssue.objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(module_id=self.kwargs.get("module_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("module")
+ .select_related("issue", "issue__state", "issue__project")
+ .prefetch_related("issue__assignees", "issue__labels")
+ .prefetch_related("module__members")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @module_issue_docs(
+ operation_id="list_module_work_items",
+ summary="List module work items",
+ description="Retrieve all work items assigned to a module with detailed information.",
+ parameters=[
+ MODULE_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ request={},
+ responses={
+ 200: create_paginated_response(
+ IssueSerializer,
+ "PaginatedModuleIssueResponse",
+ "Paginated list of module work items",
+ "Paginated Module Work Items",
+ ),
+ 404: OpenApiResponse(description="Module not found"),
+ },
+ )
+ def get(self, request, slug, project_id, module_id):
+ """List module work items
+
+ Retrieve all work items assigned to a module with detailed information.
+ Returns paginated results including assignees, labels, and attachments.
+ """
+ order_by = request.GET.get("order_by", "created_at")
+ issues = (
+ Issue.issue_objects.filter(issue_module__module_id=module_id, issue_module__deleted_at__isnull=True)
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(bridge_id=F("issue_module__id"))
+ .filter(project_id=project_id)
+ .filter(workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(order_by)
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=FileAsset.objects.filter(
+ issue_id=OuterRef("id"),
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
+ return self.paginate(
+ request=request,
+ queryset=(issues),
+ on_results=lambda issues: IssueSerializer(issues, many=True, fields=self.fields, expand=self.expand).data,
+ )
+
+ @module_issue_docs(
+ operation_id="add_module_work_items",
+ summary="Add Work Items to Module",
+ description="Assign multiple work items to a module or move them from another module. Automatically handles bulk creation and updates with activity tracking.", # noqa: E501
+ parameters=[
+ MODULE_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=ModuleIssueRequestSerializer,
+ examples=[MODULE_ISSUE_REQUEST_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Module issues added",
+ response=ModuleIssueSerializer,
+ examples=[MODULE_ISSUE_EXAMPLE],
+ ),
+ 400: REQUIRED_FIELDS_RESPONSE,
+ 404: MODULE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id, module_id):
+ """Add module work items
+
+ Assign multiple work items to a module or move them from another module.
+ Automatically handles bulk creation and updates with activity tracking.
+ """
+ issues = request.data.get("issues", [])
+ if not len(issues):
+ return Response({"error": "Issues are required"}, status=status.HTTP_400_BAD_REQUEST)
+ module = Module.objects.get(workspace__slug=slug, project_id=project_id, pk=module_id)
+
+ issues = Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk__in=issues).values_list(
+ "id", flat=True
+ )
+
+ module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
+
+ update_module_issue_activity = []
+ records_to_update = []
+ record_to_create = []
+
+ for issue in issues:
+ module_issue = [module_issue for module_issue in module_issues if str(module_issue.issue_id) in issues]
+
+ if len(module_issue):
+ if module_issue[0].module_id != module_id:
+ update_module_issue_activity.append(
+ {
+ "old_module_id": str(module_issue[0].module_id),
+ "new_module_id": str(module_id),
+ "issue_id": str(module_issue[0].issue_id),
+ }
+ )
+ module_issue[0].module_id = module_id
+ records_to_update.append(module_issue[0])
+ else:
+ record_to_create.append(
+ ModuleIssue(
+ module=module,
+ issue_id=issue,
+ project_id=project_id,
+ workspace=module.workspace,
+ created_by=request.user,
+ updated_by=request.user,
+ )
+ )
+
+ ModuleIssue.objects.bulk_create(record_to_create, batch_size=10, ignore_conflicts=True)
+
+ ModuleIssue.objects.bulk_update(records_to_update, ["module"], batch_size=10)
+
+ # Capture Issue Activity
+ issue_activity.delay(
+ type="module.activity.created",
+ requested_data=json.dumps({"modules_list": str(issues)}),
+ actor_id=str(self.request.user.id),
+ issue_id=None,
+ project_id=str(self.kwargs.get("project_id", None)),
+ current_instance=json.dumps(
+ {
+ "updated_module_issues": update_module_issue_activity,
+ "created_module_issues": serializers.serialize("json", record_to_create),
+ }
+ ),
+ epoch=int(timezone.now().timestamp()),
+ origin=base_host(request=request, is_app=True),
+ )
+
+ return Response(
+ ModuleIssueSerializer(self.get_queryset(), many=True).data,
+ status=status.HTTP_200_OK,
+ )
+
+
+class ModuleIssueDetailAPIEndpoint(BaseAPIView):
+ """
+ This viewset automatically provides `list`, `create`, `retrieve`,
+ `update` and `destroy` actions related to module work items.
+
+ """
+
+ serializer_class = ModuleIssueSerializer
+ model = ModuleIssue
+ webhook_event = "module_issue"
+ bulk = True
+ use_read_replica = True
+
+ permission_classes = [ProjectEntityPermission]
+
+ def get_queryset(self):
+ return (
+ ModuleIssue.objects.annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("issue"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(module_id=self.kwargs.get("module_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(project__archived_at__isnull=True)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("module")
+ .select_related("issue", "issue__state", "issue__project")
+ .prefetch_related("issue__assignees", "issue__labels")
+ .prefetch_related("module__members")
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @module_issue_docs(
+ operation_id="retrieve_module_work_item",
+ summary="Retrieve module work item",
+ description="Retrieve details of a specific module work item.",
+ parameters=[
+ MODULE_ID_PARAMETER,
+ ISSUE_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ IssueSerializer,
+ "PaginatedModuleIssueDetailResponse",
+ "Paginated list of module work item details",
+ "Module Work Item Details",
+ ),
+ 404: OpenApiResponse(description="Module not found"),
+ },
+ )
+ def get(self, request, slug, project_id, module_id, issue_id):
+ """List module work items
+
+ Retrieve all work items assigned to a module with detailed information.
+ Returns paginated results including assignees, labels, and attachments.
+ """
+ order_by = request.GET.get("order_by", "created_at")
+ issues = (
+ Issue.issue_objects.filter(
+ issue_module__module_id=module_id,
+ issue_module__deleted_at__isnull=True,
+ pk=issue_id,
+ )
+ .annotate(
+ sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(bridge_id=F("issue_module__id"))
+ .filter(project_id=project_id)
+ .filter(workspace__slug=slug)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("state")
+ .select_related("parent")
+ .prefetch_related("assignees")
+ .prefetch_related("labels")
+ .order_by(order_by)
+ .annotate(
+ link_count=IssueLink.objects.filter(issue=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ attachment_count=FileAsset.objects.filter(
+ issue_id=OuterRef("id"),
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ )
+ return self.paginate(
+ request=request,
+ queryset=(issues),
+ on_results=lambda issues: IssueSerializer(issues, many=True, fields=self.fields, expand=self.expand).data,
+ )
+
+ @module_issue_docs(
+ operation_id="delete_module_work_item",
+ summary="Delete module work item",
+ description="Remove a work item from a module while keeping the work item in the project.",
+ parameters=[
+ MODULE_ID_PARAMETER,
+ ISSUE_ID_PARAMETER,
+ ],
+ responses={
+ 204: DELETED_RESPONSE,
+ 404: MODULE_ISSUE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, module_id, issue_id):
+ """Remove module work item
+
+ Remove a work item from a module while keeping the work item in the project.
+ Records the removal activity for tracking purposes.
+ """
+ module_issue = ModuleIssue.objects.get(
+ workspace__slug=slug,
+ project_id=project_id,
+ module_id=module_id,
+ issue_id=issue_id,
+ )
+ module_issue.delete()
+ issue_activity.delay(
+ type="module.activity.deleted",
+ requested_data=json.dumps({"module_id": str(module_id), "issues": [str(module_issue.issue_id)]}),
+ actor_id=str(request.user.id),
+ issue_id=str(issue_id),
+ project_id=str(project_id),
+ current_instance=None,
+ epoch=int(timezone.now().timestamp()),
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Module.objects.filter(project_id=self.kwargs.get("project_id"))
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(archived_at__isnull=False)
+ .select_related("project")
+ .select_related("workspace")
+ .select_related("lead")
+ .prefetch_related("members")
+ .prefetch_related(
+ Prefetch(
+ "link_module",
+ queryset=ModuleLink.objects.select_related("module", "created_by"),
+ )
+ )
+ .annotate(
+ total_issues=Count(
+ "issue_module",
+ filter=Q(
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ completed_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="completed",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ cancelled_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="cancelled",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ started_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="started",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ unstarted_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="unstarted",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .annotate(
+ backlog_issues=Count(
+ "issue_module__issue__state__group",
+ filter=Q(
+ issue_module__issue__state__group="backlog",
+ issue_module__issue__archived_at__isnull=True,
+ issue_module__issue__is_draft=False,
+ issue_module__deleted_at__isnull=True,
+ ),
+ distinct=True,
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ )
+
+ @module_docs(
+ operation_id="list_archived_modules",
+ summary="List archived modules",
+ description="Retrieve all modules that have been archived in the project.",
+ parameters=[
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ request={},
+ responses={
+ 200: create_paginated_response(
+ ModuleSerializer,
+ "PaginatedArchivedModuleResponse",
+ "Paginated list of archived modules",
+ "Paginated Archived Modules",
+ ),
+ 404: OpenApiResponse(description="Project not found"),
+ },
+ )
+ def get(self, request, slug, project_id):
+ """List archived modules
+
+ Retrieve all modules that have been archived in the project.
+ Returns paginated results with module statistics.
+ """
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda modules: ModuleSerializer(
+ modules, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+ @module_docs(
+ operation_id="archive_module",
+ summary="Archive module",
+ description="Move a module to archived status for historical tracking.",
+ parameters=[
+ MODULE_PK_PARAMETER,
+ ],
+ request={},
+ responses={
+ 204: ARCHIVED_RESPONSE,
+ 400: CANNOT_ARCHIVE_RESPONSE,
+ 404: MODULE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id, pk):
+ """Archive module
+
+ Move a completed module to archived status for historical tracking.
+ Only modules with completed status can be archived.
+ """
+ module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug)
+ if module.status not in ["completed", "cancelled"]:
+ return Response(
+ {"error": "Only completed or cancelled modules can be archived"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ module.archived_at = timezone.now()
+ module.save()
+ UserFavorite.objects.filter(
+ entity_type="module",
+ entity_identifier=pk,
+ project_id=project_id,
+ workspace__slug=slug,
+ ).delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ @module_docs(
+ operation_id="unarchive_module",
+ summary="Unarchive module",
+ description="Restore an archived module to active status, making it available for regular use.",
+ parameters=[
+ MODULE_PK_PARAMETER,
+ ],
+ responses={
+ 204: UNARCHIVED_RESPONSE,
+ 404: MODULE_NOT_FOUND_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, pk):
+ """Unarchive module
+
+ Restore an archived module to active status, making it available for regular use.
+ The module will reappear in active module lists and become fully functional.
+ """
+ module = Module.objects.get(pk=pk, project_id=project_id, workspace__slug=slug)
+ module.archived_at = None
+ module.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apps/api/plane/api/views/project.py b/apps/api/plane/api/views/project.py
new file mode 100644
index 00000000..131932bf
--- /dev/null
+++ b/apps/api/plane/api/views/project.py
@@ -0,0 +1,586 @@
+# Python imports
+import json
+
+# Django imports
+from django.db import IntegrityError
+from django.db.models import Exists, F, Func, OuterRef, Prefetch, Q, Subquery
+from django.utils import timezone
+from django.core.serializers.json import DjangoJSONEncoder
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from rest_framework.serializers import ValidationError
+from drf_spectacular.utils import OpenApiResponse, OpenApiRequest
+
+
+# Module imports
+from plane.db.models import (
+ Cycle,
+ Intake,
+ IssueUserProperty,
+ Module,
+ Project,
+ DeployBoard,
+ ProjectMember,
+ State,
+ Workspace,
+ UserFavorite,
+)
+from plane.bgtasks.webhook_task import model_activity, webhook_activity
+from .base import BaseAPIView
+from plane.utils.host import base_host
+from plane.api.serializers import (
+ ProjectSerializer,
+ ProjectCreateSerializer,
+ ProjectUpdateSerializer,
+)
+from plane.app.permissions import ProjectBasePermission
+from plane.utils.openapi import (
+ project_docs,
+ PROJECT_ID_PARAMETER,
+ PROJECT_PK_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ create_paginated_response,
+ # Request Examples
+ PROJECT_CREATE_EXAMPLE,
+ PROJECT_UPDATE_EXAMPLE,
+ # Response Examples
+ PROJECT_EXAMPLE,
+ PROJECT_NOT_FOUND_RESPONSE,
+ WORKSPACE_NOT_FOUND_RESPONSE,
+ PROJECT_NAME_TAKEN_RESPONSE,
+ DELETED_RESPONSE,
+ ARCHIVED_RESPONSE,
+ UNARCHIVED_RESPONSE,
+)
+
+
+class ProjectListCreateAPIEndpoint(BaseAPIView):
+ """Project List and Create Endpoint"""
+
+ serializer_class = ProjectSerializer
+ model = Project
+ webhook_event = "project"
+ permission_classes = [ProjectBasePermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(
+ Q(
+ project_projectmember__member=self.request.user,
+ project_projectmember__is_active=True,
+ )
+ | Q(network=2)
+ )
+ .select_related("project_lead")
+ .annotate(
+ is_member=Exists(
+ ProjectMember.objects.filter(
+ member=self.request.user,
+ project_id=OuterRef("pk"),
+ workspace__slug=self.kwargs.get("slug"),
+ is_active=True,
+ )
+ )
+ )
+ .annotate(
+ total_members=ProjectMember.objects.filter(
+ project_id=OuterRef("id"), member__is_bot=False, is_active=True
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ total_modules=Module.objects.filter(project_id=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ member_role=ProjectMember.objects.filter(
+ project_id=OuterRef("pk"),
+ member_id=self.request.user.id,
+ is_active=True,
+ ).values("role")
+ )
+ .annotate(
+ is_deployed=Exists(
+ DeployBoard.objects.filter(
+ project_id=OuterRef("pk"),
+ workspace__slug=self.kwargs.get("slug"),
+ )
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @project_docs(
+ operation_id="list_projects",
+ summary="List or retrieve projects",
+ description="Retrieve all projects in a workspace or get details of a specific project.",
+ parameters=[
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ ORDER_BY_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ ProjectSerializer,
+ "PaginatedProjectResponse",
+ "Paginated list of projects",
+ "Paginated Projects",
+ ),
+ 404: PROJECT_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug):
+ """List projects
+
+ Retrieve all projects in a workspace or get details of a specific project.
+ Returns projects ordered by user's custom sort order with member information.
+ """
+ sort_order_query = ProjectMember.objects.filter(
+ member=request.user,
+ project_id=OuterRef("pk"),
+ workspace__slug=self.kwargs.get("slug"),
+ is_active=True,
+ ).values("sort_order")
+ projects = (
+ self.get_queryset()
+ .annotate(sort_order=Subquery(sort_order_query))
+ .prefetch_related(
+ Prefetch(
+ "project_projectmember",
+ queryset=ProjectMember.objects.filter(workspace__slug=slug, is_active=True).select_related(
+ "member"
+ ),
+ )
+ )
+ .order_by(request.GET.get("order_by", "sort_order"))
+ )
+ return self.paginate(
+ request=request,
+ queryset=(projects),
+ on_results=lambda projects: ProjectSerializer(
+ projects, many=True, fields=self.fields, expand=self.expand
+ ).data,
+ )
+
+ @project_docs(
+ operation_id="create_project",
+ summary="Create project",
+ description="Create a new project in the workspace with default states and member assignments.",
+ request=OpenApiRequest(
+ request=ProjectCreateSerializer,
+ examples=[PROJECT_CREATE_EXAMPLE],
+ ),
+ responses={
+ 201: OpenApiResponse(
+ description="Project created successfully",
+ response=ProjectSerializer,
+ examples=[PROJECT_EXAMPLE],
+ ),
+ 404: WORKSPACE_NOT_FOUND_RESPONSE,
+ 409: PROJECT_NAME_TAKEN_RESPONSE,
+ },
+ )
+ def post(self, request, slug):
+ """Create project
+
+ Create a new project in the workspace with default states and member assignments.
+ Automatically adds the creator as admin and sets up default workflow states.
+ """
+ try:
+ workspace = Workspace.objects.get(slug=slug)
+ serializer = ProjectCreateSerializer(data={**request.data}, context={"workspace_id": workspace.id})
+ if serializer.is_valid():
+ serializer.save()
+
+ # Add the user as Administrator to the project
+ _ = ProjectMember.objects.create(project_id=serializer.instance.id, member=request.user, role=20)
+ # Also create the issue property for the user
+ _ = IssueUserProperty.objects.create(project_id=serializer.instance.id, user=request.user)
+
+ if serializer.instance.project_lead is not None and str(serializer.instance.project_lead) != str(
+ request.user.id
+ ):
+ ProjectMember.objects.create(
+ project_id=serializer.instance.id,
+ member_id=serializer.instance.project_lead,
+ role=20,
+ )
+ # Also create the issue property for the user
+ IssueUserProperty.objects.create(
+ project_id=serializer.instance.id,
+ user_id=serializer.instance.project_lead,
+ )
+
+ # Default states
+ states = [
+ {
+ "name": "Backlog",
+ "color": "#60646C",
+ "sequence": 15000,
+ "group": "backlog",
+ "default": True,
+ },
+ {
+ "name": "Todo",
+ "color": "#60646C",
+ "sequence": 25000,
+ "group": "unstarted",
+ },
+ {
+ "name": "In Progress",
+ "color": "#F59E0B",
+ "sequence": 35000,
+ "group": "started",
+ },
+ {
+ "name": "Done",
+ "color": "#46A758",
+ "sequence": 45000,
+ "group": "completed",
+ },
+ {
+ "name": "Cancelled",
+ "color": "#9AA4BC",
+ "sequence": 55000,
+ "group": "cancelled",
+ },
+ ]
+
+ State.objects.bulk_create(
+ [
+ State(
+ name=state["name"],
+ color=state["color"],
+ project=serializer.instance,
+ sequence=state["sequence"],
+ workspace=serializer.instance.workspace,
+ group=state["group"],
+ default=state.get("default", False),
+ created_by=request.user,
+ )
+ for state in states
+ ]
+ )
+
+ project = self.get_queryset().filter(pk=serializer.instance.id).first()
+
+ # Model activity
+ model_activity.delay(
+ model_name="project",
+ model_id=str(project.id),
+ requested_data=request.data,
+ current_instance=None,
+ actor_id=request.user.id,
+ slug=slug,
+ origin=base_host(request=request, is_app=True),
+ )
+
+ serializer = ProjectSerializer(project)
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ except IntegrityError as e:
+ if "already exists" in str(e):
+ return Response(
+ {"name": "The project name is already taken"},
+ status=status.HTTP_409_CONFLICT,
+ )
+ except Workspace.DoesNotExist:
+ return Response({"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND)
+ except ValidationError:
+ return Response(
+ {"identifier": "The project identifier is already taken"},
+ status=status.HTTP_409_CONFLICT,
+ )
+
+
+class ProjectDetailAPIEndpoint(BaseAPIView):
+ """Project Endpoints to update, retrieve and delete endpoint"""
+
+ serializer_class = ProjectSerializer
+ model = Project
+ webhook_event = "project"
+
+ permission_classes = [ProjectBasePermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(
+ Q(
+ project_projectmember__member=self.request.user,
+ project_projectmember__is_active=True,
+ )
+ | Q(network=2)
+ )
+ .select_related("workspace", "workspace__owner", "default_assignee", "project_lead")
+ .annotate(
+ is_member=Exists(
+ ProjectMember.objects.filter(
+ member=self.request.user,
+ project_id=OuterRef("pk"),
+ workspace__slug=self.kwargs.get("slug"),
+ is_active=True,
+ )
+ )
+ )
+ .annotate(
+ total_members=ProjectMember.objects.filter(
+ project_id=OuterRef("id"), member__is_bot=False, is_active=True
+ )
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ total_modules=Module.objects.filter(project_id=OuterRef("id"))
+ .order_by()
+ .annotate(count=Func(F("id"), function="Count"))
+ .values("count")
+ )
+ .annotate(
+ member_role=ProjectMember.objects.filter(
+ project_id=OuterRef("pk"),
+ member_id=self.request.user.id,
+ is_active=True,
+ ).values("role")
+ )
+ .annotate(
+ is_deployed=Exists(
+ DeployBoard.objects.filter(
+ project_id=OuterRef("pk"),
+ workspace__slug=self.kwargs.get("slug"),
+ )
+ )
+ )
+ .order_by(self.kwargs.get("order_by", "-created_at"))
+ .distinct()
+ )
+
+ @project_docs(
+ operation_id="retrieve_project",
+ summary="Retrieve project",
+ description="Retrieve details of a specific project.",
+ parameters=[
+ PROJECT_PK_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="Project details",
+ response=ProjectSerializer,
+ examples=[PROJECT_EXAMPLE],
+ ),
+ 404: PROJECT_NOT_FOUND_RESPONSE,
+ },
+ )
+ def get(self, request, slug, pk):
+ """Retrieve project
+
+ Retrieve details of a specific project.
+ """
+ project = self.get_queryset().get(workspace__slug=slug, pk=pk)
+ serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ @project_docs(
+ operation_id="update_project",
+ summary="Update project",
+ description="Partially update an existing project's properties like name, description, or settings.",
+ parameters=[
+ PROJECT_PK_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=ProjectUpdateSerializer,
+ examples=[PROJECT_UPDATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="Project updated successfully",
+ response=ProjectSerializer,
+ examples=[PROJECT_EXAMPLE],
+ ),
+ 404: PROJECT_NOT_FOUND_RESPONSE,
+ 409: PROJECT_NAME_TAKEN_RESPONSE,
+ },
+ )
+ def patch(self, request, slug, pk):
+ """Update project
+
+ Partially update an existing project's properties like name, description, or settings.
+ Tracks changes in model activity logs for audit purposes.
+ """
+ try:
+ workspace = Workspace.objects.get(slug=slug)
+ project = Project.objects.get(pk=pk)
+ current_instance = json.dumps(ProjectSerializer(project).data, cls=DjangoJSONEncoder)
+
+ intake_view = request.data.get("intake_view", project.intake_view)
+
+ if project.archived_at:
+ return Response(
+ {"error": "Archived project cannot be updated"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ serializer = ProjectUpdateSerializer(
+ project,
+ data={**request.data, "intake_view": intake_view},
+ context={"workspace_id": workspace.id},
+ partial=True,
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+ if serializer.data["intake_view"]:
+ intake = Intake.objects.filter(project=project, is_default=True).first()
+ if not intake:
+ Intake.objects.create(
+ name=f"{project.name} Intake",
+ project=project,
+ is_default=True,
+ )
+
+ project = self.get_queryset().filter(pk=serializer.instance.id).first()
+
+ model_activity.delay(
+ model_name="project",
+ model_id=str(project.id),
+ requested_data=request.data,
+ current_instance=current_instance,
+ actor_id=request.user.id,
+ slug=slug,
+ origin=base_host(request=request, is_app=True),
+ )
+
+ serializer = ProjectSerializer(project)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ except IntegrityError as e:
+ if "already exists" in str(e):
+ return Response(
+ {"name": "The project name is already taken"},
+ status=status.HTTP_409_CONFLICT,
+ )
+ except (Project.DoesNotExist, Workspace.DoesNotExist):
+ return Response({"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND)
+ except ValidationError:
+ return Response(
+ {"identifier": "The project identifier is already taken"},
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ @project_docs(
+ operation_id="delete_project",
+ summary="Delete project",
+ description="Permanently remove a project and all its associated data from the workspace.",
+ parameters=[
+ PROJECT_PK_PARAMETER,
+ ],
+ responses={
+ 204: DELETED_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, pk):
+ """Delete project
+
+ Permanently remove a project and all its associated data from the workspace.
+ Only admins can delete projects and the action cannot be undone.
+ """
+ project = Project.objects.get(pk=pk, workspace__slug=slug)
+ # Delete the user favorite cycle
+ UserFavorite.objects.filter(entity_type="project", entity_identifier=pk, project_id=pk).delete()
+ project.delete()
+ webhook_activity.delay(
+ event="project",
+ verb="deleted",
+ field=None,
+ old_value=None,
+ new_value=None,
+ actor_id=request.user.id,
+ slug=slug,
+ current_site=base_host(request=request, is_app=True),
+ event_id=project.id,
+ old_identifier=None,
+ new_identifier=None,
+ )
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+
+class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView):
+ """Project Archive and Unarchive Endpoint"""
+
+ permission_classes = [ProjectBasePermission]
+
+ @project_docs(
+ operation_id="archive_project",
+ summary="Archive project",
+ description="Move a project to archived status, hiding it from active project lists.",
+ parameters=[
+ PROJECT_ID_PARAMETER,
+ ],
+ request={},
+ responses={
+ 204: ARCHIVED_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id):
+ """Archive project
+
+ Move a project to archived status, hiding it from active project lists.
+ Archived projects remain accessible but are excluded from regular workflows.
+ """
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project.archived_at = timezone.now()
+ project.save()
+ UserFavorite.objects.filter(workspace__slug=slug, project=project_id).delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ @project_docs(
+ operation_id="unarchive_project",
+ summary="Unarchive project",
+ description="Restore an archived project to active status, making it available in regular workflows.",
+ parameters=[
+ PROJECT_ID_PARAMETER,
+ ],
+ request={},
+ responses={
+ 204: UNARCHIVED_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id):
+ """Unarchive project
+
+ Restore an archived project to active status, making it available in regular workflows.
+ The project will reappear in active project lists and become fully functional.
+ """
+ project = Project.objects.get(pk=project_id, workspace__slug=slug)
+ project.archived_at = None
+ project.save()
+ return Response(status=status.HTTP_204_NO_CONTENT)
diff --git a/apps/api/plane/api/views/state.py b/apps/api/plane/api/views/state.py
new file mode 100644
index 00000000..bd91de39
--- /dev/null
+++ b/apps/api/plane/api/views/state.py
@@ -0,0 +1,296 @@
+# Django imports
+from django.db import IntegrityError
+
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from drf_spectacular.utils import OpenApiResponse, OpenApiRequest
+
+# Module imports
+from plane.api.serializers import StateSerializer
+from plane.app.permissions import ProjectEntityPermission
+from plane.db.models import Issue, State
+from .base import BaseAPIView
+from plane.utils.openapi import (
+ state_docs,
+ STATE_ID_PARAMETER,
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ create_paginated_response,
+ # Request Examples
+ STATE_CREATE_EXAMPLE,
+ STATE_UPDATE_EXAMPLE,
+ # Response Examples
+ STATE_EXAMPLE,
+ INVALID_REQUEST_RESPONSE,
+ STATE_NAME_EXISTS_RESPONSE,
+ DELETED_RESPONSE,
+ STATE_CANNOT_DELETE_RESPONSE,
+ EXTERNAL_ID_EXISTS_RESPONSE,
+)
+
+
+class StateListCreateAPIEndpoint(BaseAPIView):
+ """State List and Create Endpoint"""
+
+ serializer_class = StateSerializer
+ model = State
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ State.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(is_triage=False)
+ .filter(project__archived_at__isnull=True)
+ .select_related("project")
+ .select_related("workspace")
+ .distinct()
+ )
+
+ @state_docs(
+ operation_id="create_state",
+ summary="Create state",
+ description="Create a new workflow state for a project with specified name, color, and group.",
+ request=OpenApiRequest(
+ request=StateSerializer,
+ examples=[STATE_CREATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="State created",
+ response=StateSerializer,
+ examples=[STATE_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 409: STATE_NAME_EXISTS_RESPONSE,
+ },
+ )
+ def post(self, request, slug, project_id):
+ """Create state
+
+ Create a new workflow state for a project with specified name, color, and group.
+ Supports external ID tracking for integration purposes.
+ """
+ try:
+ serializer = StateSerializer(data=request.data, context={"project_id": project_id})
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and request.data.get("external_source")
+ and State.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source"),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ state = State.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ external_id=request.data.get("external_id"),
+ external_source=request.data.get("external_source"),
+ ).first()
+ return Response(
+ {
+ "error": "State with the same external id and external source already exists",
+ "id": str(state.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ serializer.save(project_id=project_id)
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ except IntegrityError:
+ state = State.objects.filter(
+ workspace__slug=slug,
+ project_id=project_id,
+ name=request.data.get("name"),
+ ).first()
+ return Response(
+ {
+ "error": "State with the same name already exists in the project",
+ "id": str(state.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+
+ @state_docs(
+ operation_id="list_states",
+ summary="List states",
+ description="Retrieve all workflow states for a project.",
+ parameters=[
+ CURSOR_PARAMETER,
+ PER_PAGE_PARAMETER,
+ FIELDS_PARAMETER,
+ EXPAND_PARAMETER,
+ ],
+ responses={
+ 200: create_paginated_response(
+ StateSerializer,
+ "PaginatedStateResponse",
+ "Paginated list of states",
+ "Paginated States",
+ ),
+ },
+ )
+ def get(self, request, slug, project_id):
+ """List states
+
+ Retrieve all workflow states for a project.
+ Returns paginated results when listing all states.
+ """
+ return self.paginate(
+ request=request,
+ queryset=(self.get_queryset()),
+ on_results=lambda states: StateSerializer(states, many=True, fields=self.fields, expand=self.expand).data,
+ )
+
+
+class StateDetailAPIEndpoint(BaseAPIView):
+ """State Detail Endpoint"""
+
+ serializer_class = StateSerializer
+ model = State
+ permission_classes = [ProjectEntityPermission]
+ use_read_replica = True
+
+ def get_queryset(self):
+ return (
+ State.objects.filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ .filter(
+ project__project_projectmember__member=self.request.user,
+ project__project_projectmember__is_active=True,
+ )
+ .filter(is_triage=False)
+ .filter(project__archived_at__isnull=True)
+ .select_related("project")
+ .select_related("workspace")
+ .distinct()
+ )
+
+ @state_docs(
+ operation_id="retrieve_state",
+ summary="Retrieve state",
+ description="Retrieve details of a specific state.",
+ parameters=[
+ STATE_ID_PARAMETER,
+ ],
+ responses={
+ 200: OpenApiResponse(
+ description="State retrieved",
+ response=StateSerializer,
+ examples=[STATE_EXAMPLE],
+ ),
+ },
+ )
+ def get(self, request, slug, project_id, state_id):
+ """Retrieve state
+
+ Retrieve details of a specific state.
+ Returns paginated results when listing all states.
+ """
+ serializer = StateSerializer(
+ self.get_queryset().get(pk=state_id),
+ fields=self.fields,
+ expand=self.expand,
+ )
+ return Response(serializer.data, status=status.HTTP_200_OK)
+
+ @state_docs(
+ operation_id="delete_state",
+ summary="Delete state",
+ description="Permanently remove a workflow state from a project. Default states and states with existing work items cannot be deleted.", # noqa: E501
+ parameters=[
+ STATE_ID_PARAMETER,
+ ],
+ responses={
+ 204: DELETED_RESPONSE,
+ 400: STATE_CANNOT_DELETE_RESPONSE,
+ },
+ )
+ def delete(self, request, slug, project_id, state_id):
+ """Delete state
+
+ Permanently remove a workflow state from a project.
+ Default states and states with existing work items cannot be deleted.
+ """
+ state = State.objects.get(is_triage=False, pk=state_id, project_id=project_id, workspace__slug=slug)
+
+ if state.default:
+ return Response(
+ {"error": "Default state cannot be deleted"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Check for any issues in the state
+ issue_exist = Issue.issue_objects.filter(state=state_id).exists()
+
+ if issue_exist:
+ return Response(
+ {"error": "The state is not empty, only empty states can be deleted"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ state.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ @state_docs(
+ operation_id="update_state",
+ summary="Update state",
+ description="Partially update an existing workflow state's properties like name, color, or group.",
+ parameters=[
+ STATE_ID_PARAMETER,
+ ],
+ request=OpenApiRequest(
+ request=StateSerializer,
+ examples=[STATE_UPDATE_EXAMPLE],
+ ),
+ responses={
+ 200: OpenApiResponse(
+ description="State updated",
+ response=StateSerializer,
+ examples=[STATE_EXAMPLE],
+ ),
+ 400: INVALID_REQUEST_RESPONSE,
+ 409: EXTERNAL_ID_EXISTS_RESPONSE,
+ },
+ )
+ def patch(self, request, slug, project_id, state_id):
+ """Update state
+
+ Partially update an existing workflow state's properties like name, color, or group.
+ Validates external ID uniqueness if provided.
+ """
+ state = State.objects.get(workspace__slug=slug, project_id=project_id, pk=state_id)
+ serializer = StateSerializer(state, data=request.data, partial=True)
+ if serializer.is_valid():
+ if (
+ request.data.get("external_id")
+ and (state.external_id != str(request.data.get("external_id")))
+ and State.objects.filter(
+ project_id=project_id,
+ workspace__slug=slug,
+ external_source=request.data.get("external_source", state.external_source),
+ external_id=request.data.get("external_id"),
+ ).exists()
+ ):
+ return Response(
+ {
+ "error": "State with the same external id and external source already exists",
+ "id": str(state.id),
+ },
+ status=status.HTTP_409_CONFLICT,
+ )
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
diff --git a/apps/api/plane/api/views/user.py b/apps/api/plane/api/views/user.py
new file mode 100644
index 00000000..b874cec1
--- /dev/null
+++ b/apps/api/plane/api/views/user.py
@@ -0,0 +1,37 @@
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from drf_spectacular.utils import OpenApiResponse
+
+# Module imports
+from plane.api.serializers import UserLiteSerializer
+from plane.api.views.base import BaseAPIView
+from plane.db.models import User
+from plane.utils.openapi.decorators import user_docs
+from plane.utils.openapi import USER_EXAMPLE
+
+
+class UserEndpoint(BaseAPIView):
+ serializer_class = UserLiteSerializer
+ model = User
+
+ @user_docs(
+ operation_id="get_current_user",
+ summary="Get current user",
+ description="Retrieve the authenticated user's profile information including basic details.",
+ responses={
+ 200: OpenApiResponse(
+ description="Current user profile",
+ response=UserLiteSerializer,
+ examples=[USER_EXAMPLE],
+ ),
+ },
+ )
+ def get(self, request):
+ """Get current user
+
+ Retrieve the authenticated user's profile information including basic details.
+ Returns user data based on the current authentication context.
+ """
+ serializer = UserLiteSerializer(request.user)
+ return Response(serializer.data, status=status.HTTP_200_OK)
diff --git a/apps/api/plane/app/__init__.py b/apps/api/plane/app/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/apps/api/plane/app/apps.py b/apps/api/plane/app/apps.py
new file mode 100644
index 00000000..e3277fc4
--- /dev/null
+++ b/apps/api/plane/app/apps.py
@@ -0,0 +1,5 @@
+from django.apps import AppConfig
+
+
+class AppApiConfig(AppConfig):
+ name = "plane.app"
diff --git a/apps/api/plane/app/middleware/__init__.py b/apps/api/plane/app/middleware/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/apps/api/plane/app/middleware/api_authentication.py b/apps/api/plane/app/middleware/api_authentication.py
new file mode 100644
index 00000000..ddabb413
--- /dev/null
+++ b/apps/api/plane/app/middleware/api_authentication.py
@@ -0,0 +1,47 @@
+# Django imports
+from django.utils import timezone
+from django.db.models import Q
+
+# Third party imports
+from rest_framework import authentication
+from rest_framework.exceptions import AuthenticationFailed
+
+# Module imports
+from plane.db.models import APIToken
+
+
+class APIKeyAuthentication(authentication.BaseAuthentication):
+ """
+ Authentication with an API Key
+ """
+
+ www_authenticate_realm = "api"
+ media_type = "application/json"
+ auth_header_name = "X-Api-Key"
+
+ def get_api_token(self, request):
+ return request.headers.get(self.auth_header_name)
+
+ def validate_api_token(self, token):
+ try:
+ api_token = APIToken.objects.get(
+ Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
+ token=token,
+ is_active=True,
+ )
+ except APIToken.DoesNotExist:
+ raise AuthenticationFailed("Given API token is not valid")
+
+ # save api token last used
+ api_token.last_used = timezone.now()
+ api_token.save(update_fields=["last_used"])
+ return (api_token.user, api_token.token)
+
+ def authenticate(self, request):
+ token = self.get_api_token(request=request)
+ if not token:
+ return None
+
+ # Validate the API token
+ user, token = self.validate_api_token(token)
+ return user, token
diff --git a/apps/api/plane/app/permissions/__init__.py b/apps/api/plane/app/permissions/__init__.py
new file mode 100644
index 00000000..95ee038e
--- /dev/null
+++ b/apps/api/plane/app/permissions/__init__.py
@@ -0,0 +1,16 @@
+from .workspace import (
+ WorkSpaceBasePermission,
+ WorkspaceOwnerPermission,
+ WorkSpaceAdminPermission,
+ WorkspaceEntityPermission,
+ WorkspaceViewerPermission,
+ WorkspaceUserPermission,
+)
+from .project import (
+ ProjectBasePermission,
+ ProjectEntityPermission,
+ ProjectMemberPermission,
+ ProjectLitePermission,
+)
+from .base import allow_permission, ROLE
+from .page import ProjectPagePermission
diff --git a/apps/api/plane/app/permissions/base.py b/apps/api/plane/app/permissions/base.py
new file mode 100644
index 00000000..a2b1a18f
--- /dev/null
+++ b/apps/api/plane/app/permissions/base.py
@@ -0,0 +1,73 @@
+from plane.db.models import WorkspaceMember, ProjectMember
+from functools import wraps
+from rest_framework.response import Response
+from rest_framework import status
+
+from enum import Enum
+
+
+class ROLE(Enum):
+ ADMIN = 20
+ MEMBER = 15
+ GUEST = 5
+
+
+def allow_permission(allowed_roles, level="PROJECT", creator=False, model=None):
+ def decorator(view_func):
+ @wraps(view_func)
+ def _wrapped_view(instance, request, *args, **kwargs):
+ # Check for creator if required
+ if creator and model:
+ obj = model.objects.filter(id=kwargs["pk"], created_by=request.user).exists()
+ if obj:
+ return view_func(instance, request, *args, **kwargs)
+
+ # Convert allowed_roles to their values if they are enum members
+ allowed_role_values = [role.value if isinstance(role, ROLE) else role for role in allowed_roles]
+
+ # Check role permissions
+ if level == "WORKSPACE":
+ if WorkspaceMember.objects.filter(
+ member=request.user,
+ workspace__slug=kwargs["slug"],
+ role__in=allowed_role_values,
+ is_active=True,
+ ).exists():
+ return view_func(instance, request, *args, **kwargs)
+ else:
+ is_user_has_allowed_role = ProjectMember.objects.filter(
+ member=request.user,
+ workspace__slug=kwargs["slug"],
+ project_id=kwargs["project_id"],
+ role__in=allowed_role_values,
+ is_active=True,
+ ).exists()
+
+ # Return if the user has the allowed role else if they are workspace admin and part of the project regardless of the role # noqa: E501
+ if is_user_has_allowed_role:
+ return view_func(instance, request, *args, **kwargs)
+ elif (
+ ProjectMember.objects.filter(
+ member=request.user,
+ workspace__slug=kwargs["slug"],
+ project_id=kwargs["project_id"],
+ is_active=True,
+ ).exists()
+ and WorkspaceMember.objects.filter(
+ member=request.user,
+ workspace__slug=kwargs["slug"],
+ role=ROLE.ADMIN.value,
+ is_active=True,
+ ).exists()
+ ):
+ return view_func(instance, request, *args, **kwargs)
+
+ # Return permission denied if no conditions are met
+ return Response(
+ {"error": "You don't have the required permissions."},
+ status=status.HTTP_403_FORBIDDEN,
+ )
+
+ return _wrapped_view
+
+ return decorator
diff --git a/apps/api/plane/app/permissions/page.py b/apps/api/plane/app/permissions/page.py
new file mode 100644
index 00000000..bea878f4
--- /dev/null
+++ b/apps/api/plane/app/permissions/page.py
@@ -0,0 +1,121 @@
+from plane.db.models import ProjectMember, Page
+from plane.app.permissions import ROLE
+
+
+from rest_framework.permissions import BasePermission, SAFE_METHODS
+
+
+# Permission Mappings for workspace members
+ADMIN = ROLE.ADMIN.value
+MEMBER = ROLE.MEMBER.value
+GUEST = ROLE.GUEST.value
+
+
+class ProjectPagePermission(BasePermission):
+ """
+ Custom permission to control access to pages within a workspace
+ based on user roles, page visibility (public/private), and feature flags.
+ """
+
+ def has_permission(self, request, view):
+ """
+ Check basic project-level permissions before checking object-level permissions.
+ """
+ if request.user.is_anonymous:
+ return False
+
+ user_id = request.user.id
+ slug = view.kwargs.get("slug")
+ page_id = view.kwargs.get("page_id")
+ project_id = view.kwargs.get("project_id")
+
+ # Hook for extended validation
+ extended_access, role = self._check_access_and_get_role(request, slug, project_id)
+ if extended_access is False:
+ return False
+
+ if page_id:
+ page = Page.objects.get(id=page_id, workspace__slug=slug)
+
+ # Allow access if the user is the owner of the page
+ if page.owned_by_id == user_id:
+ return True
+
+ # Handle private page access
+ if page.access == Page.PRIVATE_ACCESS:
+ return self._has_private_page_action_access(request, slug, page, project_id)
+
+ # Handle public page access
+ return self._has_public_page_action_access(request, role)
+
+ def _check_project_member_access(self, request, slug, project_id):
+ """
+ Check if the user is a project member.
+ """
+ return (
+ ProjectMember.objects.filter(
+ member=request.user,
+ workspace__slug=slug,
+ is_active=True,
+ project_id=project_id,
+ )
+ .values_list("role", flat=True)
+ .first()
+ )
+
+ def _check_access_and_get_role(self, request, slug, project_id):
+ """
+ Hook for extended access checking
+ Returns: True (allow), False (deny), None (continue with normal flow)
+ """
+ role = self._check_project_member_access(request, slug, project_id)
+ if not role:
+ return False, None
+ return True, role
+
+ def _has_private_page_action_access(self, request, slug, page, project_id):
+ """
+ Check access to private pages. Override for feature flag logic.
+ """
+ # Base implementation: only owner can access private pages
+ return False
+
+ def _check_project_action_access(self, request, role):
+ method = request.method
+
+ # Only admins can create (POST) pages
+ if method == "POST":
+ if role in [ADMIN, MEMBER]:
+ return True
+ return False
+
+ # Safe methods (GET, HEAD, OPTIONS) allowed for all active roles
+ if method in SAFE_METHODS:
+ if role in [ADMIN, MEMBER, GUEST]:
+ return True
+ return False
+
+ # PUT/PATCH: Admins and members can update
+ if method in ["PUT", "PATCH"]:
+ if role in [ADMIN, MEMBER]:
+ return True
+ return False
+
+ # DELETE: Only admins can delete
+ if method == "DELETE":
+ if role in [ADMIN]:
+ return True
+ return False
+
+ # Deny by default
+ return False
+
+ def _has_public_page_action_access(self, request, role):
+ """
+ Check if the user has permission to access a public page
+ and can perform operations on the page.
+ """
+ project_member_exists = self._check_project_action_access(request, role)
+ if not project_member_exists:
+ return False
+ return True
diff --git a/apps/api/plane/app/permissions/project.py b/apps/api/plane/app/permissions/project.py
new file mode 100644
index 00000000..e095ffed
--- /dev/null
+++ b/apps/api/plane/app/permissions/project.py
@@ -0,0 +1,125 @@
+# Third Party imports
+from rest_framework.permissions import SAFE_METHODS, BasePermission
+
+# Module import
+from plane.db.models import ProjectMember, WorkspaceMember
+from plane.db.models.project import ROLE
+
+
+class ProjectBasePermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ ## Safe Methods -> Handle the filtering logic in queryset
+ if request.method in SAFE_METHODS:
+ return WorkspaceMember.objects.filter(
+ workspace__slug=view.workspace_slug, member=request.user, is_active=True
+ ).exists()
+
+ ## Only workspace owners or admins can create the projects
+ if request.method == "POST":
+ return WorkspaceMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
+ is_active=True,
+ ).exists()
+
+ project_member_qs = ProjectMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ project_id=view.project_id,
+ is_active=True,
+ )
+
+ ## Only project admins or workspace admin who is part of the project can access
+
+ if project_member_qs.filter(role=ROLE.ADMIN.value).exists():
+ return True
+ else:
+ return (
+ project_member_qs.exists()
+ and WorkspaceMember.objects.filter(
+ member=request.user,
+ workspace__slug=view.workspace_slug,
+ role=ROLE.ADMIN.value,
+ is_active=True,
+ ).exists()
+ )
+
+
+class ProjectMemberPermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ ## Safe Methods -> Handle the filtering logic in queryset
+ if request.method in SAFE_METHODS:
+ return ProjectMember.objects.filter(
+ workspace__slug=view.workspace_slug, member=request.user, is_active=True
+ ).exists()
+ ## Only workspace owners or admins can create the projects
+ if request.method == "POST":
+ return WorkspaceMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
+ is_active=True,
+ ).exists()
+
+ ## Only Project Admins can update project attributes
+ return ProjectMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
+ project_id=view.project_id,
+ is_active=True,
+ ).exists()
+
+
+class ProjectEntityPermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ # Handle requests based on project__identifier
+ if hasattr(view, "project_identifier") and view.project_identifier:
+ if request.method in SAFE_METHODS:
+ return ProjectMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ project__identifier=view.project_identifier,
+ is_active=True,
+ ).exists()
+
+ ## Safe Methods -> Handle the filtering logic in queryset
+ if request.method in SAFE_METHODS:
+ return ProjectMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ project_id=view.project_id,
+ is_active=True,
+ ).exists()
+
+ ## Only project members or admins can create and edit the project attributes
+ return ProjectMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
+ project_id=view.project_id,
+ is_active=True,
+ ).exists()
+
+
+class ProjectLitePermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ return ProjectMember.objects.filter(
+ workspace__slug=view.workspace_slug,
+ member=request.user,
+ project_id=view.project_id,
+ is_active=True,
+ ).exists()
diff --git a/apps/api/plane/app/permissions/workspace.py b/apps/api/plane/app/permissions/workspace.py
new file mode 100644
index 00000000..8dc791c0
--- /dev/null
+++ b/apps/api/plane/app/permissions/workspace.py
@@ -0,0 +1,106 @@
+# Third Party imports
+from rest_framework.permissions import BasePermission, SAFE_METHODS
+
+# Module imports
+from plane.db.models import WorkspaceMember
+
+
+# Permission Mappings
+Admin = 20
+Member = 15
+Guest = 5
+
+
+# TODO: Move the below logic to python match - python v3.10
+class WorkSpaceBasePermission(BasePermission):
+ def has_permission(self, request, view):
+ # allow anyone to create a workspace
+ if request.user.is_anonymous:
+ return False
+
+ if request.method == "POST":
+ return True
+
+ ## Safe Methods
+ if request.method in SAFE_METHODS:
+ return True
+
+ # allow only admins and owners to update the workspace settings
+ if request.method in ["PUT", "PATCH"]:
+ return WorkspaceMember.objects.filter(
+ member=request.user,
+ workspace__slug=view.workspace_slug,
+ role__in=[Admin, Member],
+ is_active=True,
+ ).exists()
+
+ # allow only owner to delete the workspace
+ if request.method == "DELETE":
+ return WorkspaceMember.objects.filter(
+ member=request.user,
+ workspace__slug=view.workspace_slug,
+ role=Admin,
+ is_active=True,
+ ).exists()
+
+
+class WorkspaceOwnerPermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ return WorkspaceMember.objects.filter(
+ workspace__slug=view.workspace_slug, member=request.user, role=Admin
+ ).exists()
+
+
+class WorkSpaceAdminPermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ return WorkspaceMember.objects.filter(
+ member=request.user,
+ workspace__slug=view.workspace_slug,
+ role__in=[Admin, Member],
+ is_active=True,
+ ).exists()
+
+
+class WorkspaceEntityPermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ ## Safe Methods -> Handle the filtering logic in queryset
+ if request.method in SAFE_METHODS:
+ return WorkspaceMember.objects.filter(
+ workspace__slug=view.workspace_slug, member=request.user, is_active=True
+ ).exists()
+
+ return WorkspaceMember.objects.filter(
+ member=request.user,
+ workspace__slug=view.workspace_slug,
+ role__in=[Admin, Member],
+ is_active=True,
+ ).exists()
+
+
+class WorkspaceViewerPermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ return WorkspaceMember.objects.filter(
+ member=request.user, workspace__slug=view.workspace_slug, is_active=True
+ ).exists()
+
+
+class WorkspaceUserPermission(BasePermission):
+ def has_permission(self, request, view):
+ if request.user.is_anonymous:
+ return False
+
+ return WorkspaceMember.objects.filter(
+ member=request.user, workspace__slug=view.workspace_slug, is_active=True
+ ).exists()
diff --git a/apps/api/plane/app/serializers/__init__.py b/apps/api/plane/app/serializers/__init__.py
new file mode 100644
index 00000000..18be363c
--- /dev/null
+++ b/apps/api/plane/app/serializers/__init__.py
@@ -0,0 +1,130 @@
+from .base import BaseSerializer
+from .user import (
+ UserSerializer,
+ UserLiteSerializer,
+ ChangePasswordSerializer,
+ ResetPasswordSerializer,
+ UserAdminLiteSerializer,
+ UserMeSerializer,
+ UserMeSettingsSerializer,
+ ProfileSerializer,
+ AccountSerializer,
+)
+from .workspace import (
+ WorkSpaceSerializer,
+ WorkSpaceMemberSerializer,
+ WorkSpaceMemberInviteSerializer,
+ WorkspaceLiteSerializer,
+ WorkspaceThemeSerializer,
+ WorkspaceMemberAdminSerializer,
+ WorkspaceMemberMeSerializer,
+ WorkspaceUserPropertiesSerializer,
+ WorkspaceUserLinkSerializer,
+ WorkspaceRecentVisitSerializer,
+ WorkspaceHomePreferenceSerializer,
+ StickySerializer,
+)
+from .project import (
+ ProjectSerializer,
+ ProjectListSerializer,
+ ProjectDetailSerializer,
+ ProjectMemberSerializer,
+ ProjectMemberInviteSerializer,
+ ProjectIdentifierSerializer,
+ ProjectLiteSerializer,
+ ProjectMemberLiteSerializer,
+ DeployBoardSerializer,
+ ProjectMemberAdminSerializer,
+ ProjectPublicMemberSerializer,
+ ProjectMemberRoleSerializer,
+)
+from .state import StateSerializer, StateLiteSerializer
+from .view import IssueViewSerializer, ViewIssueListSerializer
+from .cycle import (
+ CycleSerializer,
+ CycleIssueSerializer,
+ CycleWriteSerializer,
+ CycleUserPropertiesSerializer,
+)
+from .asset import FileAssetSerializer
+from .issue import (
+ IssueCreateSerializer,
+ IssueActivitySerializer,
+ IssueCommentSerializer,
+ IssueUserPropertySerializer,
+ IssueAssigneeSerializer,
+ LabelSerializer,
+ IssueSerializer,
+ IssueFlatSerializer,
+ IssueStateSerializer,
+ IssueLinkSerializer,
+ IssueIntakeSerializer,
+ IssueLiteSerializer,
+ IssueAttachmentSerializer,
+ IssueSubscriberSerializer,
+ IssueReactionSerializer,
+ CommentReactionSerializer,
+ IssueVoteSerializer,
+ IssueRelationSerializer,
+ RelatedIssueSerializer,
+ IssuePublicSerializer,
+ IssueDetailSerializer,
+ IssueReactionLiteSerializer,
+ IssueAttachmentLiteSerializer,
+ IssueLinkLiteSerializer,
+ IssueVersionDetailSerializer,
+ IssueDescriptionVersionDetailSerializer,
+ IssueListDetailSerializer,
+)
+
+from .module import (
+ ModuleDetailSerializer,
+ ModuleWriteSerializer,
+ ModuleSerializer,
+ ModuleIssueSerializer,
+ ModuleLinkSerializer,
+ ModuleUserPropertiesSerializer,
+)
+
+from .api import APITokenSerializer, APITokenReadSerializer
+
+from .importer import ImporterSerializer
+
+from .page import (
+ PageSerializer,
+ PageDetailSerializer,
+ PageVersionSerializer,
+ PageBinaryUpdateSerializer,
+ PageVersionDetailSerializer,
+)
+
+from .estimate import (
+ EstimateSerializer,
+ EstimatePointSerializer,
+ EstimateReadSerializer,
+ WorkspaceEstimateSerializer,
+)
+
+from .intake import (
+ IntakeSerializer,
+ IntakeIssueSerializer,
+ IssueStateIntakeSerializer,
+ IntakeIssueLiteSerializer,
+ IntakeIssueDetailSerializer,
+)
+
+from .analytic import AnalyticViewSerializer
+
+from .notification import NotificationSerializer, UserNotificationPreferenceSerializer
+
+from .exporter import ExporterHistorySerializer
+
+from .webhook import WebhookSerializer, WebhookLogSerializer
+
+from .favorite import UserFavoriteSerializer
+
+from .draft import (
+ DraftIssueCreateSerializer,
+ DraftIssueSerializer,
+ DraftIssueDetailSerializer,
+)
diff --git a/apps/api/plane/app/serializers/analytic.py b/apps/api/plane/app/serializers/analytic.py
new file mode 100644
index 00000000..13b24d14
--- /dev/null
+++ b/apps/api/plane/app/serializers/analytic.py
@@ -0,0 +1,27 @@
+from .base import BaseSerializer
+from plane.db.models import AnalyticView
+from plane.utils.issue_filters import issue_filters
+
+
+class AnalyticViewSerializer(BaseSerializer):
+ class Meta:
+ model = AnalyticView
+ fields = "__all__"
+ read_only_fields = ["workspace", "query"]
+
+ def create(self, validated_data):
+ query_params = validated_data.get("query_dict", {})
+ if bool(query_params):
+ validated_data["query"] = issue_filters(query_params, "POST")
+ else:
+ validated_data["query"] = {}
+ return AnalyticView.objects.create(**validated_data)
+
+ def update(self, instance, validated_data):
+ query_params = validated_data.get("query_data", {})
+ if bool(query_params):
+ validated_data["query"] = issue_filters(query_params, "POST")
+ else:
+ validated_data["query"] = {}
+ validated_data["query"] = issue_filters(query_params, "PATCH")
+ return super().update(instance, validated_data)
diff --git a/apps/api/plane/app/serializers/api.py b/apps/api/plane/app/serializers/api.py
new file mode 100644
index 00000000..009f7a61
--- /dev/null
+++ b/apps/api/plane/app/serializers/api.py
@@ -0,0 +1,37 @@
+from .base import BaseSerializer
+from plane.db.models import APIToken, APIActivityLog
+from rest_framework import serializers
+from django.utils import timezone
+
+
+class APITokenSerializer(BaseSerializer):
+ class Meta:
+ model = APIToken
+ fields = "__all__"
+ read_only_fields = [
+ "token",
+ "expired_at",
+ "created_at",
+ "updated_at",
+ "workspace",
+ "user",
+ ]
+
+
+class APITokenReadSerializer(BaseSerializer):
+ is_active = serializers.SerializerMethodField()
+
+ class Meta:
+ model = APIToken
+ exclude = ("token",)
+
+ def get_is_active(self, obj: APIToken) -> bool:
+ if obj.expired_at is None:
+ return True
+ return timezone.now() < obj.expired_at
+
+
+class APIActivityLogSerializer(BaseSerializer):
+ class Meta:
+ model = APIActivityLog
+ fields = "__all__"
diff --git a/apps/api/plane/app/serializers/asset.py b/apps/api/plane/app/serializers/asset.py
new file mode 100644
index 00000000..560cd353
--- /dev/null
+++ b/apps/api/plane/app/serializers/asset.py
@@ -0,0 +1,9 @@
+from .base import BaseSerializer
+from plane.db.models import FileAsset
+
+
+class FileAssetSerializer(BaseSerializer):
+ class Meta:
+ model = FileAsset
+ fields = "__all__"
+ read_only_fields = ["created_by", "updated_by", "created_at", "updated_at"]
diff --git a/apps/api/plane/app/serializers/base.py b/apps/api/plane/app/serializers/base.py
new file mode 100644
index 00000000..0d8c855c
--- /dev/null
+++ b/apps/api/plane/app/serializers/base.py
@@ -0,0 +1,197 @@
+from rest_framework import serializers
+
+
+class BaseSerializer(serializers.ModelSerializer):
+ id = serializers.PrimaryKeyRelatedField(read_only=True)
+
+
+class DynamicBaseSerializer(BaseSerializer):
+ def __init__(self, *args, **kwargs):
+ # If 'fields' is provided in the arguments, remove it and store it separately.
+ # This is done so as not to pass this custom argument up to the superclass.
+ fields = kwargs.pop("fields", [])
+ self.expand = kwargs.pop("expand", []) or []
+ fields = self.expand
+
+ # Call the initialization of the superclass.
+ super().__init__(*args, **kwargs)
+ # If 'fields' was provided, filter the fields of the serializer accordingly.
+ if fields is not None:
+ self.fields = self._filter_fields(fields)
+
+ def _filter_fields(self, fields):
+ """
+ Adjust the serializer's fields based on the provided 'fields' list.
+
+ :param fields: List or dictionary specifying which fields to include in the serializer.
+ :return: The updated fields for the serializer.
+ """
+ # Check each field_name in the provided fields.
+ for field_name in fields:
+ # If the field is a dictionary (indicating nested fields),
+ # loop through its keys and values.
+ if isinstance(field_name, dict):
+ for key, value in field_name.items():
+ # If the value of this nested field is a list,
+ # perform a recursive filter on it.
+ if isinstance(value, list):
+ self._filter_fields(self.fields[key], value)
+
+ # Create a list to store allowed fields.
+ allowed = []
+ for item in fields:
+ # If the item is a string, it directly represents a field's name.
+ if isinstance(item, str):
+ allowed.append(item)
+ # If the item is a dictionary, it represents a nested field.
+ # Add the key of this dictionary to the allowed list.
+ elif isinstance(item, dict):
+ allowed.append(list(item.keys())[0])
+
+ for field in allowed:
+ if field not in self.fields:
+ from . import (
+ WorkspaceLiteSerializer,
+ ProjectLiteSerializer,
+ UserLiteSerializer,
+ StateLiteSerializer,
+ IssueSerializer,
+ LabelSerializer,
+ CycleIssueSerializer,
+ IssueLiteSerializer,
+ IssueRelationSerializer,
+ IntakeIssueLiteSerializer,
+ IssueReactionLiteSerializer,
+ IssueLinkLiteSerializer,
+ RelatedIssueSerializer,
+ )
+
+ # Expansion mapper
+ expansion = {
+ "user": UserLiteSerializer,
+ "workspace": WorkspaceLiteSerializer,
+ "project": ProjectLiteSerializer,
+ "default_assignee": UserLiteSerializer,
+ "project_lead": UserLiteSerializer,
+ "state": StateLiteSerializer,
+ "created_by": UserLiteSerializer,
+ "issue": IssueSerializer,
+ "actor": UserLiteSerializer,
+ "owned_by": UserLiteSerializer,
+ "members": UserLiteSerializer,
+ "assignees": UserLiteSerializer,
+ "labels": LabelSerializer,
+ "issue_cycle": CycleIssueSerializer,
+ "parent": IssueLiteSerializer,
+ "issue_relation": IssueRelationSerializer,
+ "issue_intake": IntakeIssueLiteSerializer,
+ "issue_related": RelatedIssueSerializer,
+ "issue_reactions": IssueReactionLiteSerializer,
+ "issue_link": IssueLinkLiteSerializer,
+ "sub_issues": IssueLiteSerializer,
+ }
+
+ if field not in self.fields and field in expansion:
+ self.fields[field] = expansion[field](
+ many=(
+ True
+ if field
+ in [
+ "members",
+ "assignees",
+ "labels",
+ "issue_cycle",
+ "issue_relation",
+ "issue_intake",
+ "issue_reactions",
+ "issue_attachment",
+ "issue_link",
+ "sub_issues",
+ "issue_related",
+ ]
+ else False
+ )
+ )
+
+ return self.fields
+
+ def to_representation(self, instance):
+ response = super().to_representation(instance)
+
+ # Ensure 'expand' is iterable before processing
+ if self.expand:
+ for expand in self.expand:
+ if expand in self.fields:
+ # Import all the expandable serializers
+ from . import (
+ WorkspaceLiteSerializer,
+ ProjectLiteSerializer,
+ UserLiteSerializer,
+ StateLiteSerializer,
+ IssueSerializer,
+ LabelSerializer,
+ CycleIssueSerializer,
+ IssueRelationSerializer,
+ IntakeIssueLiteSerializer,
+ IssueLiteSerializer,
+ IssueReactionLiteSerializer,
+ IssueAttachmentLiteSerializer,
+ IssueLinkLiteSerializer,
+ RelatedIssueSerializer,
+ )
+
+ # Expansion mapper
+ expansion = {
+ "user": UserLiteSerializer,
+ "workspace": WorkspaceLiteSerializer,
+ "project": ProjectLiteSerializer,
+ "default_assignee": UserLiteSerializer,
+ "project_lead": UserLiteSerializer,
+ "state": StateLiteSerializer,
+ "created_by": UserLiteSerializer,
+ "issue": IssueSerializer,
+ "actor": UserLiteSerializer,
+ "owned_by": UserLiteSerializer,
+ "members": UserLiteSerializer,
+ "assignees": UserLiteSerializer,
+ "labels": LabelSerializer,
+ "issue_cycle": CycleIssueSerializer,
+ "parent": IssueLiteSerializer,
+ "issue_relation": IssueRelationSerializer,
+ "issue_intake": IntakeIssueLiteSerializer,
+ "issue_related": RelatedIssueSerializer,
+ "issue_reactions": IssueReactionLiteSerializer,
+ "issue_attachment": IssueAttachmentLiteSerializer,
+ "issue_link": IssueLinkLiteSerializer,
+ "sub_issues": IssueLiteSerializer,
+ }
+ # Check if field in expansion then expand the field
+ if expand in expansion:
+ if isinstance(response.get(expand), list):
+ exp_serializer = expansion[expand](getattr(instance, expand), many=True)
+ else:
+ exp_serializer = expansion[expand](getattr(instance, expand))
+ response[expand] = exp_serializer.data
+ else:
+ # You might need to handle this case differently
+ response[expand] = getattr(instance, f"{expand}_id", None)
+
+ # Check if issue_attachments is in fields or expand
+ if "issue_attachments" in self.fields or "issue_attachments" in self.expand:
+ # Import the model here to avoid circular imports
+ from plane.db.models import FileAsset
+
+ issue_id = getattr(instance, "id", None)
+
+ if issue_id:
+ # Fetch related issue_attachments
+ issue_attachments = FileAsset.objects.filter(
+ issue_id=issue_id,
+ entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
+ )
+ # Serialize issue_attachments and add them to the response
+ response["issue_attachments"] = IssueAttachmentLiteSerializer(issue_attachments, many=True).data
+ else:
+ response["issue_attachments"] = []
+
+ return response
diff --git a/apps/api/plane/app/serializers/cycle.py b/apps/api/plane/app/serializers/cycle.py
new file mode 100644
index 00000000..89a5efc0
--- /dev/null
+++ b/apps/api/plane/app/serializers/cycle.py
@@ -0,0 +1,102 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from .issue import IssueStateSerializer
+from plane.db.models import Cycle, CycleIssue, CycleUserProperties
+from plane.utils.timezone_converter import convert_to_utc
+
+
+class CycleWriteSerializer(BaseSerializer):
+ def validate(self, data):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("end_date", None) is not None
+ and data.get("start_date", None) > data.get("end_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed end date")
+ if data.get("start_date", None) is not None and data.get("end_date", None) is not None:
+ project_id = (
+ self.initial_data.get("project_id", None)
+ or (self.instance and self.instance.project_id)
+ or self.context.get("project_id", None)
+ )
+ data["start_date"] = convert_to_utc(
+ date=str(data.get("start_date").date()),
+ project_id=project_id,
+ is_start_date=True,
+ )
+ data["end_date"] = convert_to_utc(
+ date=str(data.get("end_date", None).date()),
+ project_id=project_id,
+ )
+ return data
+
+ class Meta:
+ model = Cycle
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "owned_by", "archived_at"]
+
+
+class CycleSerializer(BaseSerializer):
+ # favorite
+ is_favorite = serializers.BooleanField(read_only=True)
+ total_issues = serializers.IntegerField(read_only=True)
+ # state group wise distribution
+ cancelled_issues = serializers.IntegerField(read_only=True)
+ completed_issues = serializers.IntegerField(read_only=True)
+ started_issues = serializers.IntegerField(read_only=True)
+ unstarted_issues = serializers.IntegerField(read_only=True)
+ backlog_issues = serializers.IntegerField(read_only=True)
+
+ # active | draft | upcoming | completed
+ status = serializers.CharField(read_only=True)
+
+ class Meta:
+ model = Cycle
+ fields = [
+ # necessary fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # model fields
+ "name",
+ "description",
+ "start_date",
+ "end_date",
+ "owned_by_id",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "progress_snapshot",
+ "logo_props",
+ # meta fields
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "status",
+ ]
+ read_only_fields = fields
+
+
+class CycleIssueSerializer(BaseSerializer):
+ issue_detail = IssueStateSerializer(read_only=True, source="issue")
+ sub_issues_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = CycleIssue
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "cycle"]
+
+
+class CycleUserPropertiesSerializer(BaseSerializer):
+ class Meta:
+ model = CycleUserProperties
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "cycle", "user"]
diff --git a/apps/api/plane/app/serializers/draft.py b/apps/api/plane/app/serializers/draft.py
new file mode 100644
index 00000000..b017a03b
--- /dev/null
+++ b/apps/api/plane/app/serializers/draft.py
@@ -0,0 +1,338 @@
+# Django imports
+from django.utils import timezone
+
+# Third Party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from plane.db.models import (
+ User,
+ Issue,
+ Label,
+ State,
+ DraftIssue,
+ DraftIssueAssignee,
+ DraftIssueLabel,
+ DraftIssueCycle,
+ DraftIssueModule,
+ ProjectMember,
+ EstimatePoint,
+)
+from plane.utils.content_validator import (
+ validate_html_content,
+ validate_binary_data,
+)
+from plane.app.permissions import ROLE
+
+
+class DraftIssueCreateSerializer(BaseSerializer):
+ # ids
+ state_id = serializers.PrimaryKeyRelatedField(
+ source="state", queryset=State.objects.all(), required=False, allow_null=True
+ )
+ parent_id = serializers.PrimaryKeyRelatedField(
+ source="parent", queryset=Issue.objects.all(), required=False, allow_null=True
+ )
+ label_ids = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
+ write_only=True,
+ required=False,
+ )
+ assignee_ids = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+
+ class Meta:
+ model = DraftIssue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ assignee_ids = self.initial_data.get("assignee_ids")
+ data["assignee_ids"] = assignee_ids if assignee_ids else []
+ label_ids = self.initial_data.get("label_ids")
+ data["label_ids"] = label_ids if label_ids else []
+ return data
+
+ def validate(self, attrs):
+ if (
+ attrs.get("start_date", None) is not None
+ and attrs.get("target_date", None) is not None
+ and attrs.get("start_date", None) > attrs.get("target_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed target date")
+
+ # Validate description content for security
+ if "description_html" in attrs and attrs["description_html"]:
+ is_valid, error_msg, sanitized_html = validate_html_content(attrs["description_html"])
+ if not is_valid:
+ raise serializers.ValidationError({"error": "html content is not valid"})
+ # Update the attrs with sanitized HTML if available
+ if sanitized_html is not None:
+ attrs["description_html"] = sanitized_html
+
+ if "description_binary" in attrs and attrs["description_binary"]:
+ is_valid, error_msg = validate_binary_data(attrs["description_binary"])
+ if not is_valid:
+ raise serializers.ValidationError({"description_binary": "Invalid binary data"})
+
+ # Validate assignees are from project
+ if attrs.get("assignee_ids", []):
+ attrs["assignee_ids"] = ProjectMember.objects.filter(
+ project_id=self.context["project_id"],
+ role__gte=ROLE.MEMBER.value,
+ is_active=True,
+ member_id__in=attrs["assignee_ids"],
+ ).values_list("member_id", flat=True)
+
+ # Validate labels are from project
+ if attrs.get("label_ids"):
+ label_ids = [label.id for label in attrs["label_ids"]]
+ attrs["label_ids"] = list(
+ Label.objects.filter(project_id=self.context.get("project_id"), id__in=label_ids).values_list(
+ "id", flat=True
+ )
+ )
+
+ # # Check state is from the project only else raise validation error
+ if (
+ attrs.get("state")
+ and not State.objects.filter(
+ project_id=self.context.get("project_id"),
+ pk=attrs.get("state").id,
+ ).exists()
+ ):
+ raise serializers.ValidationError("State is not valid please pass a valid state_id")
+
+ # # Check parent issue is from workspace as it can be cross workspace
+ if (
+ attrs.get("parent")
+ and not Issue.objects.filter(
+ project_id=self.context.get("project_id"),
+ pk=attrs.get("parent").id,
+ ).exists()
+ ):
+ raise serializers.ValidationError("Parent is not valid issue_id please pass a valid issue_id")
+
+ if (
+ attrs.get("estimate_point")
+ and not EstimatePoint.objects.filter(
+ project_id=self.context.get("project_id"),
+ pk=attrs.get("estimate_point").id,
+ ).exists()
+ ):
+ raise serializers.ValidationError("Estimate point is not valid please pass a valid estimate_point_id")
+
+ return attrs
+
+ def create(self, validated_data):
+ assignees = validated_data.pop("assignee_ids", None)
+ labels = validated_data.pop("label_ids", None)
+ modules = validated_data.pop("module_ids", None)
+ cycle_id = self.initial_data.get("cycle_id", None)
+ modules = self.initial_data.get("module_ids", None)
+
+ workspace_id = self.context["workspace_id"]
+ project_id = self.context["project_id"]
+
+ # Create Issue
+ issue = DraftIssue.objects.create(**validated_data, workspace_id=workspace_id, project_id=project_id)
+
+ # Issue Audit Users
+ created_by_id = issue.created_by_id
+ updated_by_id = issue.updated_by_id
+
+ if assignees is not None and len(assignees):
+ DraftIssueAssignee.objects.bulk_create(
+ [
+ DraftIssueAssignee(
+ assignee_id=assignee_id,
+ draft_issue=issue,
+ workspace_id=workspace_id,
+ project_id=project_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for assignee_id in assignees
+ ],
+ batch_size=10,
+ )
+
+ if labels is not None and len(labels):
+ DraftIssueLabel.objects.bulk_create(
+ [
+ DraftIssueLabel(
+ label_id=label_id,
+ draft_issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for label_id in labels
+ ],
+ batch_size=10,
+ )
+
+ if cycle_id is not None:
+ DraftIssueCycle.objects.create(
+ cycle_id=cycle_id,
+ draft_issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+
+ if modules is not None and len(modules):
+ DraftIssueModule.objects.bulk_create(
+ [
+ DraftIssueModule(
+ module_id=module_id,
+ draft_issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for module_id in modules
+ ],
+ batch_size=10,
+ )
+
+ return issue
+
+ def update(self, instance, validated_data):
+ assignees = validated_data.pop("assignee_ids", None)
+ labels = validated_data.pop("label_ids", None)
+ cycle_id = self.context.get("cycle_id", None)
+ modules = self.initial_data.get("module_ids", None)
+
+ # Related models
+ workspace_id = instance.workspace_id
+ project_id = instance.project_id
+
+ created_by_id = instance.created_by_id
+ updated_by_id = instance.updated_by_id
+
+ if assignees is not None:
+ DraftIssueAssignee.objects.filter(draft_issue=instance).delete()
+ DraftIssueAssignee.objects.bulk_create(
+ [
+ DraftIssueAssignee(
+ assignee_id=assignee_id,
+ draft_issue=instance,
+ workspace_id=workspace_id,
+ project_id=project_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for assignee_id in assignees
+ ],
+ batch_size=10,
+ )
+
+ if labels is not None:
+ DraftIssueLabel.objects.filter(draft_issue=instance).delete()
+ DraftIssueLabel.objects.bulk_create(
+ [
+ DraftIssueLabel(
+ label_id=label,
+ draft_issue=instance,
+ workspace_id=workspace_id,
+ project_id=project_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for label in labels
+ ],
+ batch_size=10,
+ )
+
+ if cycle_id != "not_provided":
+ DraftIssueCycle.objects.filter(draft_issue=instance).delete()
+ if cycle_id:
+ DraftIssueCycle.objects.create(
+ cycle_id=cycle_id,
+ draft_issue=instance,
+ workspace_id=workspace_id,
+ project_id=project_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+
+ if modules is not None:
+ DraftIssueModule.objects.filter(draft_issue=instance).delete()
+ DraftIssueModule.objects.bulk_create(
+ [
+ DraftIssueModule(
+ module_id=module_id,
+ draft_issue=instance,
+ workspace_id=workspace_id,
+ project_id=project_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for module_id in modules
+ ],
+ batch_size=10,
+ )
+
+ # Time updation occurs even when other related models are updated
+ instance.updated_at = timezone.now()
+ return super().update(instance, validated_data)
+
+
+class DraftIssueSerializer(BaseSerializer):
+ # ids
+ cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
+ module_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
+
+ # Many to many
+ label_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
+ assignee_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
+
+ class Meta:
+ model = DraftIssue
+ fields = [
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "type_id",
+ "description_html",
+ ]
+ read_only_fields = fields
+
+
+class DraftIssueDetailSerializer(DraftIssueSerializer):
+ description_html = serializers.CharField()
+
+ class Meta(DraftIssueSerializer.Meta):
+ fields = DraftIssueSerializer.Meta.fields + ["description_html"]
+ read_only_fields = fields
diff --git a/apps/api/plane/app/serializers/estimate.py b/apps/api/plane/app/serializers/estimate.py
new file mode 100644
index 00000000..b2d65ef8
--- /dev/null
+++ b/apps/api/plane/app/serializers/estimate.py
@@ -0,0 +1,46 @@
+# Module imports
+from .base import BaseSerializer
+
+from plane.db.models import Estimate, EstimatePoint
+
+from rest_framework import serializers
+
+
+class EstimateSerializer(BaseSerializer):
+ class Meta:
+ model = Estimate
+ fields = "__all__"
+ read_only_fields = ["workspace", "project"]
+
+
+class EstimatePointSerializer(BaseSerializer):
+ def validate(self, data):
+ if not data:
+ raise serializers.ValidationError("Estimate points are required")
+ value = data.get("value")
+ if value and len(value) > 20:
+ raise serializers.ValidationError("Value can't be more than 20 characters")
+ return data
+
+ class Meta:
+ model = EstimatePoint
+ fields = "__all__"
+ read_only_fields = ["estimate", "workspace", "project"]
+
+
+class EstimateReadSerializer(BaseSerializer):
+ points = EstimatePointSerializer(read_only=True, many=True)
+
+ class Meta:
+ model = Estimate
+ fields = "__all__"
+ read_only_fields = ["points", "name", "description"]
+
+
+class WorkspaceEstimateSerializer(BaseSerializer):
+ points = EstimatePointSerializer(read_only=True, many=True)
+
+ class Meta:
+ model = Estimate
+ fields = "__all__"
+ read_only_fields = ["points", "name", "description"]
diff --git a/apps/api/plane/app/serializers/exporter.py b/apps/api/plane/app/serializers/exporter.py
new file mode 100644
index 00000000..5c78cfa6
--- /dev/null
+++ b/apps/api/plane/app/serializers/exporter.py
@@ -0,0 +1,26 @@
+# Module imports
+from .base import BaseSerializer
+from plane.db.models import ExporterHistory
+from .user import UserLiteSerializer
+
+
+class ExporterHistorySerializer(BaseSerializer):
+ initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
+
+ class Meta:
+ model = ExporterHistory
+ fields = [
+ "id",
+ "created_at",
+ "updated_at",
+ "project",
+ "provider",
+ "status",
+ "url",
+ "initiated_by",
+ "initiated_by_detail",
+ "token",
+ "created_by",
+ "updated_by",
+ ]
+ read_only_fields = fields
diff --git a/apps/api/plane/app/serializers/favorite.py b/apps/api/plane/app/serializers/favorite.py
new file mode 100644
index 00000000..246461f8
--- /dev/null
+++ b/apps/api/plane/app/serializers/favorite.py
@@ -0,0 +1,85 @@
+from rest_framework import serializers
+
+from plane.db.models import UserFavorite, Cycle, Module, Issue, IssueView, Page, Project
+
+
+class ProjectFavoriteLiteSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Project
+ fields = ["id", "name", "logo_props"]
+
+
+class PageFavoriteLiteSerializer(serializers.ModelSerializer):
+ project_id = serializers.SerializerMethodField()
+
+ class Meta:
+ model = Page
+ fields = ["id", "name", "logo_props", "project_id"]
+
+ def get_project_id(self, obj):
+ project = obj.projects.first() # This gets the first project related to the Page
+ return project.id if project else None
+
+
+class CycleFavoriteLiteSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Cycle
+ fields = ["id", "name", "logo_props", "project_id"]
+
+
+class ModuleFavoriteLiteSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = Module
+ fields = ["id", "name", "logo_props", "project_id"]
+
+
+class ViewFavoriteSerializer(serializers.ModelSerializer):
+ class Meta:
+ model = IssueView
+ fields = ["id", "name", "logo_props", "project_id"]
+
+
+def get_entity_model_and_serializer(entity_type):
+ entity_map = {
+ "cycle": (Cycle, CycleFavoriteLiteSerializer),
+ "issue": (Issue, None),
+ "module": (Module, ModuleFavoriteLiteSerializer),
+ "view": (IssueView, ViewFavoriteSerializer),
+ "page": (Page, PageFavoriteLiteSerializer),
+ "project": (Project, ProjectFavoriteLiteSerializer),
+ "folder": (None, None),
+ }
+ return entity_map.get(entity_type, (None, None))
+
+
+class UserFavoriteSerializer(serializers.ModelSerializer):
+ entity_data = serializers.SerializerMethodField()
+
+ class Meta:
+ model = UserFavorite
+ fields = [
+ "id",
+ "entity_type",
+ "entity_identifier",
+ "entity_data",
+ "name",
+ "is_folder",
+ "sequence",
+ "parent",
+ "workspace_id",
+ "project_id",
+ ]
+ read_only_fields = ["workspace", "created_by", "updated_by"]
+
+ def get_entity_data(self, obj):
+ entity_type = obj.entity_type
+ entity_identifier = obj.entity_identifier
+
+ entity_model, entity_serializer = get_entity_model_and_serializer(entity_type)
+ if entity_model and entity_serializer:
+ try:
+ entity = entity_model.objects.get(pk=entity_identifier)
+ return entity_serializer(entity).data
+ except entity_model.DoesNotExist:
+ return None
+ return None
diff --git a/apps/api/plane/app/serializers/importer.py b/apps/api/plane/app/serializers/importer.py
new file mode 100644
index 00000000..8997f639
--- /dev/null
+++ b/apps/api/plane/app/serializers/importer.py
@@ -0,0 +1,16 @@
+# Module imports
+from .base import BaseSerializer
+from .user import UserLiteSerializer
+from .project import ProjectLiteSerializer
+from .workspace import WorkspaceLiteSerializer
+from plane.db.models import Importer
+
+
+class ImporterSerializer(BaseSerializer):
+ initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
+ project_detail = ProjectLiteSerializer(source="project", read_only=True)
+ workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+
+ class Meta:
+ model = Importer
+ fields = "__all__"
diff --git a/apps/api/plane/app/serializers/intake.py b/apps/api/plane/app/serializers/intake.py
new file mode 100644
index 00000000..7bc25822
--- /dev/null
+++ b/apps/api/plane/app/serializers/intake.py
@@ -0,0 +1,90 @@
+# Third party frameworks
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer
+from .issue import IssueIntakeSerializer, LabelLiteSerializer, IssueDetailSerializer
+from .project import ProjectLiteSerializer
+from .state import StateLiteSerializer
+from .user import UserLiteSerializer
+from plane.db.models import Intake, IntakeIssue, Issue
+
+
+class IntakeSerializer(BaseSerializer):
+ project_detail = ProjectLiteSerializer(source="project", read_only=True)
+ pending_issue_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = Intake
+ fields = "__all__"
+ read_only_fields = ["project", "workspace"]
+
+
+class IntakeIssueSerializer(BaseSerializer):
+ issue = IssueIntakeSerializer(read_only=True)
+
+ class Meta:
+ model = IntakeIssue
+ fields = [
+ "id",
+ "status",
+ "duplicate_to",
+ "snoozed_till",
+ "source",
+ "issue",
+ "created_by",
+ ]
+ read_only_fields = ["project", "workspace"]
+
+ def to_representation(self, instance):
+ # Pass the annotated fields to the Issue instance if they exist
+ if hasattr(instance, "label_ids"):
+ instance.issue.label_ids = instance.label_ids
+ return super().to_representation(instance)
+
+
+class IntakeIssueDetailSerializer(BaseSerializer):
+ issue = IssueDetailSerializer(read_only=True)
+ duplicate_issue_detail = IssueIntakeSerializer(read_only=True, source="duplicate_to")
+
+ class Meta:
+ model = IntakeIssue
+ fields = [
+ "id",
+ "status",
+ "duplicate_to",
+ "snoozed_till",
+ "duplicate_issue_detail",
+ "source",
+ "issue",
+ ]
+ read_only_fields = ["project", "workspace"]
+
+ def to_representation(self, instance):
+ # Pass the annotated fields to the Issue instance if they exist
+ if hasattr(instance, "assignee_ids"):
+ instance.issue.assignee_ids = instance.assignee_ids
+ if hasattr(instance, "label_ids"):
+ instance.issue.label_ids = instance.label_ids
+
+ return super().to_representation(instance)
+
+
+class IntakeIssueLiteSerializer(BaseSerializer):
+ class Meta:
+ model = IntakeIssue
+ fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
+ read_only_fields = fields
+
+
+class IssueStateIntakeSerializer(BaseSerializer):
+ state_detail = StateLiteSerializer(read_only=True, source="state")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
+ assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
+ sub_issues_count = serializers.IntegerField(read_only=True)
+ issue_intake = IntakeIssueLiteSerializer(read_only=True, many=True)
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
diff --git a/apps/api/plane/app/serializers/issue.py b/apps/api/plane/app/serializers/issue.py
new file mode 100644
index 00000000..583b62fd
--- /dev/null
+++ b/apps/api/plane/app/serializers/issue.py
@@ -0,0 +1,1001 @@
+# Django imports
+from django.utils import timezone
+from django.core.validators import URLValidator
+from django.core.exceptions import ValidationError
+from django.db import IntegrityError
+
+# Third Party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer, DynamicBaseSerializer
+from .user import UserLiteSerializer
+from .state import StateLiteSerializer
+from .project import ProjectLiteSerializer
+from .workspace import WorkspaceLiteSerializer
+from plane.db.models import (
+ User,
+ Issue,
+ IssueActivity,
+ IssueComment,
+ IssueUserProperty,
+ IssueAssignee,
+ IssueSubscriber,
+ IssueLabel,
+ Label,
+ CycleIssue,
+ Cycle,
+ Module,
+ ModuleIssue,
+ IssueLink,
+ FileAsset,
+ IssueReaction,
+ CommentReaction,
+ IssueVote,
+ IssueRelation,
+ State,
+ IssueVersion,
+ IssueDescriptionVersion,
+ ProjectMember,
+ EstimatePoint,
+)
+from plane.utils.content_validator import (
+ validate_html_content,
+ validate_binary_data,
+)
+
+
+class IssueFlatSerializer(BaseSerializer):
+ ## Contain only flat fields
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "name",
+ "description",
+ "description_html",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "sort_order",
+ "is_draft",
+ ]
+
+
+class IssueProjectLiteSerializer(BaseSerializer):
+ project_detail = ProjectLiteSerializer(source="project", read_only=True)
+
+ class Meta:
+ model = Issue
+ fields = ["id", "project_detail", "name", "sequence_id"]
+ read_only_fields = fields
+
+
+##TODO: Find a better way to write this serializer
+## Find a better approach to save manytomany?
+class IssueCreateSerializer(BaseSerializer):
+ # ids
+ state_id = serializers.PrimaryKeyRelatedField(
+ source="state", queryset=State.objects.all(), required=False, allow_null=True
+ )
+ parent_id = serializers.PrimaryKeyRelatedField(
+ source="parent", queryset=Issue.objects.all(), required=False, allow_null=True
+ )
+ label_ids = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
+ write_only=True,
+ required=False,
+ )
+ assignee_ids = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+ project_id = serializers.UUIDField(source="project.id", read_only=True)
+ workspace_id = serializers.UUIDField(source="workspace.id", read_only=True)
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ assignee_ids = self.initial_data.get("assignee_ids")
+ data["assignee_ids"] = assignee_ids if assignee_ids else []
+ label_ids = self.initial_data.get("label_ids")
+ data["label_ids"] = label_ids if label_ids else []
+ return data
+
+ def validate(self, attrs):
+ if (
+ attrs.get("start_date", None) is not None
+ and attrs.get("target_date", None) is not None
+ and attrs.get("start_date", None) > attrs.get("target_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed target date")
+
+ # Validate description content for security
+ if "description_html" in attrs and attrs["description_html"]:
+ is_valid, error_msg, sanitized_html = validate_html_content(attrs["description_html"])
+ if not is_valid:
+ raise serializers.ValidationError({"error": "html content is not valid"})
+ # Update the attrs with sanitized HTML if available
+ if sanitized_html is not None:
+ attrs["description_html"] = sanitized_html
+
+ if "description_binary" in attrs and attrs["description_binary"]:
+ is_valid, error_msg = validate_binary_data(attrs["description_binary"])
+ if not is_valid:
+ raise serializers.ValidationError({"description_binary": "Invalid binary data"})
+
+ # Validate assignees are from project
+ if attrs.get("assignee_ids", []):
+ attrs["assignee_ids"] = ProjectMember.objects.filter(
+ project_id=self.context["project_id"],
+ role__gte=15,
+ is_active=True,
+ member_id__in=attrs["assignee_ids"],
+ ).values_list("member_id", flat=True)
+
+ # Validate labels are from project
+ if attrs.get("label_ids"):
+ label_ids = [label.id for label in attrs["label_ids"]]
+ attrs["label_ids"] = list(
+ Label.objects.filter(
+ project_id=self.context.get("project_id"),
+ id__in=label_ids,
+ ).values_list("id", flat=True)
+ )
+
+ # Check state is from the project only else raise validation error
+ if (
+ attrs.get("state")
+ and not State.objects.filter(
+ project_id=self.context.get("project_id"),
+ pk=attrs.get("state").id,
+ ).exists()
+ ):
+ raise serializers.ValidationError("State is not valid please pass a valid state_id")
+
+ # Check parent issue is from workspace as it can be cross workspace
+ if (
+ attrs.get("parent")
+ and not Issue.objects.filter(
+ project_id=self.context.get("project_id"),
+ pk=attrs.get("parent").id,
+ ).exists()
+ ):
+ raise serializers.ValidationError("Parent is not valid issue_id please pass a valid issue_id")
+
+ if (
+ attrs.get("estimate_point")
+ and not EstimatePoint.objects.filter(
+ project_id=self.context.get("project_id"),
+ pk=attrs.get("estimate_point").id,
+ ).exists()
+ ):
+ raise serializers.ValidationError("Estimate point is not valid please pass a valid estimate_point_id")
+
+ return attrs
+
+ def create(self, validated_data):
+ assignees = validated_data.pop("assignee_ids", None)
+ labels = validated_data.pop("label_ids", None)
+
+ project_id = self.context["project_id"]
+ workspace_id = self.context["workspace_id"]
+ default_assignee_id = self.context["default_assignee_id"]
+
+ # Create Issue
+ issue = Issue.objects.create(**validated_data, project_id=project_id)
+
+ # Issue Audit Users
+ created_by_id = issue.created_by_id
+ updated_by_id = issue.updated_by_id
+
+ if assignees is not None and len(assignees):
+ try:
+ IssueAssignee.objects.bulk_create(
+ [
+ IssueAssignee(
+ assignee_id=assignee_id,
+ issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for assignee_id in assignees
+ ],
+ batch_size=10,
+ )
+ except IntegrityError:
+ pass
+ else:
+ # Then assign it to default assignee, if it is a valid assignee
+ if (
+ default_assignee_id is not None
+ and ProjectMember.objects.filter(
+ member_id=default_assignee_id,
+ project_id=project_id,
+ role__gte=15,
+ is_active=True,
+ ).exists()
+ ):
+ try:
+ IssueAssignee.objects.create(
+ assignee_id=default_assignee_id,
+ issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ except IntegrityError:
+ pass
+
+ if labels is not None and len(labels):
+ try:
+ IssueLabel.objects.bulk_create(
+ [
+ IssueLabel(
+ label_id=label_id,
+ issue=issue,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for label_id in labels
+ ],
+ batch_size=10,
+ )
+ except IntegrityError:
+ pass
+
+ return issue
+
+ def update(self, instance, validated_data):
+ assignees = validated_data.pop("assignee_ids", None)
+ labels = validated_data.pop("label_ids", None)
+
+ # Related models
+ project_id = instance.project_id
+ workspace_id = instance.workspace_id
+ created_by_id = instance.created_by_id
+ updated_by_id = instance.updated_by_id
+
+ if assignees is not None:
+ IssueAssignee.objects.filter(issue=instance).delete()
+ try:
+ IssueAssignee.objects.bulk_create(
+ [
+ IssueAssignee(
+ assignee_id=assignee_id,
+ issue=instance,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for assignee_id in assignees
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+ except IntegrityError:
+ pass
+
+ if labels is not None:
+ IssueLabel.objects.filter(issue=instance).delete()
+ try:
+ IssueLabel.objects.bulk_create(
+ [
+ IssueLabel(
+ label_id=label_id,
+ issue=instance,
+ project_id=project_id,
+ workspace_id=workspace_id,
+ created_by_id=created_by_id,
+ updated_by_id=updated_by_id,
+ )
+ for label_id in labels
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+ except IntegrityError:
+ pass
+
+ # Time updation occues even when other related models are updated
+ instance.updated_at = timezone.now()
+ return super().update(instance, validated_data)
+
+
+class IssueActivitySerializer(BaseSerializer):
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+ issue_detail = IssueFlatSerializer(read_only=True, source="issue")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
+ source_data = serializers.SerializerMethodField()
+
+ def get_source_data(self, obj):
+ if hasattr(obj, "issue") and hasattr(obj.issue, "source_data") and obj.issue.source_data:
+ return {
+ "source": obj.issue.source_data[0].source,
+ "source_email": obj.issue.source_data[0].source_email,
+ "extra": obj.issue.source_data[0].extra,
+ }
+ return None
+
+ class Meta:
+ model = IssueActivity
+ fields = "__all__"
+
+
+class IssueUserPropertySerializer(BaseSerializer):
+ class Meta:
+ model = IssueUserProperty
+ fields = "__all__"
+ read_only_fields = ["user", "workspace", "project"]
+
+
+class LabelSerializer(BaseSerializer):
+ class Meta:
+ model = Label
+ fields = [
+ "parent",
+ "name",
+ "color",
+ "id",
+ "project_id",
+ "workspace_id",
+ "sort_order",
+ ]
+ read_only_fields = ["workspace", "project"]
+
+
+class LabelLiteSerializer(BaseSerializer):
+ class Meta:
+ model = Label
+ fields = ["id", "name", "color"]
+
+
+class IssueLabelSerializer(BaseSerializer):
+ class Meta:
+ model = IssueLabel
+ fields = "__all__"
+ read_only_fields = ["workspace", "project"]
+
+
+class IssueRelationSerializer(BaseSerializer):
+ id = serializers.UUIDField(source="related_issue.id", read_only=True)
+ project_id = serializers.PrimaryKeyRelatedField(source="related_issue.project_id", read_only=True)
+ sequence_id = serializers.IntegerField(source="related_issue.sequence_id", read_only=True)
+ name = serializers.CharField(source="related_issue.name", read_only=True)
+ relation_type = serializers.CharField(read_only=True)
+ state_id = serializers.UUIDField(source="related_issue.state.id", read_only=True)
+ priority = serializers.CharField(source="related_issue.priority", read_only=True)
+ assignee_ids = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+
+ class Meta:
+ model = IssueRelation
+ fields = [
+ "id",
+ "project_id",
+ "sequence_id",
+ "relation_type",
+ "name",
+ "state_id",
+ "priority",
+ "assignee_ids",
+ "created_by",
+ "created_at",
+ "updated_at",
+ "updated_by",
+ ]
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "created_at",
+ "updated_by",
+ "updated_at",
+ ]
+
+
+class RelatedIssueSerializer(BaseSerializer):
+ id = serializers.UUIDField(source="issue.id", read_only=True)
+ project_id = serializers.PrimaryKeyRelatedField(source="issue.project_id", read_only=True)
+ sequence_id = serializers.IntegerField(source="issue.sequence_id", read_only=True)
+ name = serializers.CharField(source="issue.name", read_only=True)
+ relation_type = serializers.CharField(read_only=True)
+ state_id = serializers.UUIDField(source="issue.state.id", read_only=True)
+ priority = serializers.CharField(source="issue.priority", read_only=True)
+ assignee_ids = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+
+ class Meta:
+ model = IssueRelation
+ fields = [
+ "id",
+ "project_id",
+ "sequence_id",
+ "relation_type",
+ "name",
+ "state_id",
+ "priority",
+ "assignee_ids",
+ "created_by",
+ "created_at",
+ "updated_by",
+ "updated_at",
+ ]
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "created_at",
+ "updated_by",
+ "updated_at",
+ ]
+
+
+class IssueAssigneeSerializer(BaseSerializer):
+ assignee_details = UserLiteSerializer(read_only=True, source="assignee")
+
+ class Meta:
+ model = IssueAssignee
+ fields = "__all__"
+
+
+class CycleBaseSerializer(BaseSerializer):
+ class Meta:
+ model = Cycle
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueCycleDetailSerializer(BaseSerializer):
+ cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
+
+ class Meta:
+ model = CycleIssue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class ModuleBaseSerializer(BaseSerializer):
+ class Meta:
+ model = Module
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueModuleDetailSerializer(BaseSerializer):
+ module_detail = ModuleBaseSerializer(read_only=True, source="module")
+
+ class Meta:
+ model = ModuleIssue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueLinkSerializer(BaseSerializer):
+ created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
+
+ class Meta:
+ model = IssueLink
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "issue",
+ ]
+
+ def to_internal_value(self, data):
+ # Modify the URL before validation by appending http:// if missing
+ url = data.get("url", "")
+ if url and not url.startswith(("http://", "https://")):
+ data["url"] = "http://" + url
+
+ return super().to_internal_value(data)
+
+ def validate_url(self, value):
+ # Use Django's built-in URLValidator for validation
+ url_validator = URLValidator()
+ try:
+ url_validator(value)
+ except ValidationError:
+ raise serializers.ValidationError({"error": "Invalid URL format."})
+
+ return value
+
+ # Validation if url already exists
+ def create(self, validated_data):
+ if IssueLink.objects.filter(url=validated_data.get("url"), issue_id=validated_data.get("issue_id")).exists():
+ raise serializers.ValidationError({"error": "URL already exists for this Issue"})
+ return IssueLink.objects.create(**validated_data)
+
+ def update(self, instance, validated_data):
+ if (
+ IssueLink.objects.filter(url=validated_data.get("url"), issue_id=instance.issue_id)
+ .exclude(pk=instance.id)
+ .exists()
+ ):
+ raise serializers.ValidationError({"error": "URL already exists for this Issue"})
+
+ return super().update(instance, validated_data)
+
+
+class IssueLinkLiteSerializer(BaseSerializer):
+ class Meta:
+ model = IssueLink
+ fields = [
+ "id",
+ "issue_id",
+ "title",
+ "url",
+ "metadata",
+ "created_by_id",
+ "created_at",
+ ]
+ read_only_fields = fields
+
+
+class IssueAttachmentSerializer(BaseSerializer):
+ asset_url = serializers.CharField(read_only=True)
+
+ class Meta:
+ model = FileAsset
+ fields = "__all__"
+ read_only_fields = [
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "workspace",
+ "project",
+ "issue",
+ ]
+
+
+class IssueAttachmentLiteSerializer(DynamicBaseSerializer):
+ class Meta:
+ model = FileAsset
+ fields = [
+ "id",
+ "asset",
+ "attributes",
+ # "issue_id",
+ "created_by",
+ "updated_at",
+ "updated_by",
+ "asset_url",
+ ]
+ read_only_fields = fields
+
+
+class IssueReactionSerializer(BaseSerializer):
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+
+ class Meta:
+ model = IssueReaction
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "issue", "actor", "deleted_at"]
+
+
+class IssueReactionLiteSerializer(DynamicBaseSerializer):
+ display_name = serializers.CharField(source="actor.display_name", read_only=True)
+
+ class Meta:
+ model = IssueReaction
+ fields = ["id", "actor", "issue", "reaction", "display_name"]
+
+
+class CommentReactionSerializer(BaseSerializer):
+ display_name = serializers.CharField(source="actor.display_name", read_only=True)
+
+ class Meta:
+ model = CommentReaction
+ fields = [
+ "id",
+ "actor",
+ "comment",
+ "reaction",
+ "display_name",
+ "deleted_at",
+ "workspace",
+ "project",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ ]
+ read_only_fields = ["workspace", "project", "comment", "actor", "deleted_at", "created_by", "updated_by"]
+
+
+class IssueVoteSerializer(BaseSerializer):
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+
+ class Meta:
+ model = IssueVote
+ fields = ["issue", "vote", "workspace", "project", "actor", "actor_detail"]
+ read_only_fields = fields
+
+
+class IssueCommentSerializer(BaseSerializer):
+ actor_detail = UserLiteSerializer(read_only=True, source="actor")
+ issue_detail = IssueFlatSerializer(read_only=True, source="issue")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
+ comment_reactions = CommentReactionSerializer(read_only=True, many=True)
+ is_member = serializers.BooleanField(read_only=True)
+
+ class Meta:
+ model = IssueComment
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "issue",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class IssueStateFlatSerializer(BaseSerializer):
+ state_detail = StateLiteSerializer(read_only=True, source="state")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+
+ class Meta:
+ model = Issue
+ fields = ["id", "sequence_id", "name", "state_detail", "project_detail"]
+
+
+# Issue Serializer with state details
+class IssueStateSerializer(DynamicBaseSerializer):
+ label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
+ state_detail = StateLiteSerializer(read_only=True, source="state")
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
+ sub_issues_count = serializers.IntegerField(read_only=True)
+ attachment_count = serializers.IntegerField(read_only=True)
+ link_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
+
+
+class IssueIntakeSerializer(DynamicBaseSerializer):
+ label_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "name",
+ "priority",
+ "sequence_id",
+ "project_id",
+ "created_at",
+ "label_ids",
+ "created_by",
+ ]
+ read_only_fields = fields
+
+
+class IssueSerializer(DynamicBaseSerializer):
+ # ids
+ cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
+ module_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
+
+ # Many to many
+ label_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
+ assignee_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
+
+ # Count items
+ sub_issues_count = serializers.IntegerField(read_only=True)
+ attachment_count = serializers.IntegerField(read_only=True)
+ link_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "name",
+ "state_id",
+ "sort_order",
+ "completed_at",
+ "estimate_point",
+ "priority",
+ "start_date",
+ "target_date",
+ "sequence_id",
+ "project_id",
+ "parent_id",
+ "cycle_id",
+ "module_ids",
+ "label_ids",
+ "assignee_ids",
+ "sub_issues_count",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "attachment_count",
+ "link_count",
+ "is_draft",
+ "archived_at",
+ ]
+ read_only_fields = fields
+
+
+class IssueListDetailSerializer(serializers.Serializer):
+ def __init__(self, *args, **kwargs):
+ # Extract expand parameter and store it as instance variable
+ self.expand = kwargs.pop("expand", []) or []
+ # Extract fields parameter and store it as instance variable
+ self.fields = kwargs.pop("fields", []) or []
+ super().__init__(*args, **kwargs)
+
+ def get_module_ids(self, obj):
+ return [module.module_id for module in obj.issue_module.all()]
+
+ def get_label_ids(self, obj):
+ return [label.label_id for label in obj.label_issue.all()]
+
+ def get_assignee_ids(self, obj):
+ return [assignee.assignee_id for assignee in obj.issue_assignee.all()]
+
+ def to_representation(self, instance):
+ data = {
+ # Basic fields
+ "id": instance.id,
+ "name": instance.name,
+ "state_id": instance.state_id,
+ "sort_order": instance.sort_order,
+ "completed_at": instance.completed_at,
+ "estimate_point": instance.estimate_point_id,
+ "priority": instance.priority,
+ "start_date": instance.start_date,
+ "target_date": instance.target_date,
+ "sequence_id": instance.sequence_id,
+ "project_id": instance.project_id,
+ "parent_id": instance.parent_id,
+ "created_at": instance.created_at,
+ "updated_at": instance.updated_at,
+ "created_by": instance.created_by_id,
+ "updated_by": instance.updated_by_id,
+ "is_draft": instance.is_draft,
+ "archived_at": instance.archived_at,
+ # Computed fields
+ "cycle_id": instance.cycle_id,
+ "module_ids": self.get_module_ids(instance),
+ "label_ids": self.get_label_ids(instance),
+ "assignee_ids": self.get_assignee_ids(instance),
+ "sub_issues_count": instance.sub_issues_count,
+ "attachment_count": instance.attachment_count,
+ "link_count": instance.link_count,
+ }
+
+ # Handle expanded fields only when requested - using direct field access
+ if self.expand:
+ if "issue_relation" in self.expand:
+ relations = []
+ for relation in instance.issue_relation.all():
+ related_issue = relation.related_issue
+ # If the related issue is deleted, skip it
+ if not related_issue:
+ continue
+ # Add the related issue to the relations list
+ relations.append(
+ {
+ "id": related_issue.id,
+ "project_id": related_issue.project_id,
+ "sequence_id": related_issue.sequence_id,
+ "name": related_issue.name,
+ "relation_type": relation.relation_type,
+ "state_id": related_issue.state_id,
+ "priority": related_issue.priority,
+ "created_by": related_issue.created_by_id,
+ "created_at": related_issue.created_at,
+ "updated_at": related_issue.updated_at,
+ "updated_by": related_issue.updated_by_id,
+ }
+ )
+ data["issue_relation"] = relations
+
+ if "issue_related" in self.expand:
+ related = []
+ for relation in instance.issue_related.all():
+ issue = relation.issue
+ # If the related issue is deleted, skip it
+ if not issue:
+ continue
+ # Add the related issue to the related list
+ related.append(
+ {
+ "id": issue.id,
+ "project_id": issue.project_id,
+ "sequence_id": issue.sequence_id,
+ "name": issue.name,
+ "relation_type": relation.relation_type,
+ "state_id": issue.state_id,
+ "priority": issue.priority,
+ "created_by": issue.created_by_id,
+ "created_at": issue.created_at,
+ "updated_at": issue.updated_at,
+ "updated_by": issue.updated_by_id,
+ }
+ )
+ data["issue_related"] = related
+
+ return data
+
+
+class IssueLiteSerializer(DynamicBaseSerializer):
+ class Meta:
+ model = Issue
+ fields = ["id", "sequence_id", "project_id"]
+ read_only_fields = fields
+
+
+class IssueDetailSerializer(IssueSerializer):
+ description_html = serializers.CharField()
+ is_subscribed = serializers.BooleanField(read_only=True)
+ is_intake = serializers.BooleanField(read_only=True)
+
+ class Meta(IssueSerializer.Meta):
+ fields = IssueSerializer.Meta.fields + [
+ "description_html",
+ "is_subscribed",
+ "is_intake",
+ ]
+ read_only_fields = fields
+
+
+class IssuePublicSerializer(BaseSerializer):
+ project_detail = ProjectLiteSerializer(read_only=True, source="project")
+ state_detail = StateLiteSerializer(read_only=True, source="state")
+ reactions = IssueReactionSerializer(read_only=True, many=True, source="issue_reactions")
+ votes = IssueVoteSerializer(read_only=True, many=True)
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "name",
+ "description_html",
+ "sequence_id",
+ "state",
+ "state_detail",
+ "project",
+ "project_detail",
+ "workspace",
+ "priority",
+ "target_date",
+ "reactions",
+ "votes",
+ ]
+ read_only_fields = fields
+
+
+class IssueSubscriberSerializer(BaseSerializer):
+ class Meta:
+ model = IssueSubscriber
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "issue"]
+
+
+class IssueVersionDetailSerializer(BaseSerializer):
+ class Meta:
+ model = IssueVersion
+ fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
+ "parent",
+ "state",
+ "estimate_point",
+ "name",
+ "priority",
+ "start_date",
+ "target_date",
+ "assignees",
+ "sequence_id",
+ "labels",
+ "sort_order",
+ "completed_at",
+ "archived_at",
+ "is_draft",
+ "external_source",
+ "external_id",
+ "type",
+ "cycle",
+ "modules",
+ "meta",
+ "name",
+ "last_saved_at",
+ "owned_by",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ ]
+ read_only_fields = ["workspace", "project", "issue"]
+
+
+class IssueDescriptionVersionDetailSerializer(BaseSerializer):
+ class Meta:
+ model = IssueDescriptionVersion
+ fields = [
+ "id",
+ "workspace",
+ "project",
+ "issue",
+ "description_binary",
+ "description_html",
+ "description_stripped",
+ "description_json",
+ "last_saved_at",
+ "owned_by",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ ]
+ read_only_fields = ["workspace", "project", "issue"]
diff --git a/apps/api/plane/app/serializers/module.py b/apps/api/plane/app/serializers/module.py
new file mode 100644
index 00000000..b5e2953c
--- /dev/null
+++ b/apps/api/plane/app/serializers/module.py
@@ -0,0 +1,276 @@
+# Third Party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer, DynamicBaseSerializer
+from .project import ProjectLiteSerializer
+
+# Django imports
+from django.core.validators import URLValidator
+from django.core.exceptions import ValidationError
+
+from plane.db.models import (
+ User,
+ Module,
+ ModuleMember,
+ ModuleIssue,
+ ModuleLink,
+ ModuleUserProperties,
+)
+
+
+class ModuleWriteSerializer(BaseSerializer):
+ lead_id = serializers.PrimaryKeyRelatedField(
+ source="lead", queryset=User.objects.all(), required=False, allow_null=True
+ )
+ member_ids = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
+ write_only=True,
+ required=False,
+ )
+
+ class Meta:
+ model = Module
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "archived_at",
+ "deleted_at",
+ ]
+
+ def to_representation(self, instance):
+ data = super().to_representation(instance)
+ data["member_ids"] = [str(member.id) for member in instance.members.all()]
+ return data
+
+ def validate(self, data):
+ if (
+ data.get("start_date", None) is not None
+ and data.get("target_date", None) is not None
+ and data.get("start_date", None) > data.get("target_date", None)
+ ):
+ raise serializers.ValidationError("Start date cannot exceed target date")
+ return data
+
+ def create(self, validated_data):
+ members = validated_data.pop("member_ids", None)
+ project = self.context["project"]
+
+ module_name = validated_data.get("name")
+ if module_name:
+ # Lookup for the module name in the module table for that project
+ if Module.objects.filter(name=module_name, project=project).exists():
+ raise serializers.ValidationError({"error": "Module with this name already exists"})
+
+ module = Module.objects.create(**validated_data, project=project)
+ if members is not None:
+ ModuleMember.objects.bulk_create(
+ [
+ ModuleMember(
+ module=module,
+ member=member,
+ project=project,
+ workspace=project.workspace,
+ created_by=module.created_by,
+ updated_by=module.updated_by,
+ )
+ for member in members
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
+ return module
+
+ def update(self, instance, validated_data):
+ members = validated_data.pop("member_ids", None)
+ module_name = validated_data.get("name")
+ if module_name:
+ # Lookup for the module name in the module table for that project
+ if Module.objects.filter(name=module_name, project=instance.project).exclude(id=instance.id).exists():
+ raise serializers.ValidationError({"error": "Module with this name already exists"})
+
+ if members is not None:
+ ModuleMember.objects.filter(module=instance).delete()
+ ModuleMember.objects.bulk_create(
+ [
+ ModuleMember(
+ module=instance,
+ member=member,
+ project=instance.project,
+ workspace=instance.project.workspace,
+ created_by=instance.created_by,
+ updated_by=instance.updated_by,
+ )
+ for member in members
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
+ return super().update(instance, validated_data)
+
+
+class ModuleFlatSerializer(BaseSerializer):
+ class Meta:
+ model = Module
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ ]
+
+
+class ModuleIssueSerializer(BaseSerializer):
+ module_detail = ModuleFlatSerializer(read_only=True, source="module")
+ issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
+ sub_issues_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = ModuleIssue
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "module",
+ ]
+
+
+class ModuleLinkSerializer(BaseSerializer):
+ class Meta:
+ model = ModuleLink
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "module",
+ ]
+
+ def to_internal_value(self, data):
+ # Modify the URL before validation by appending http:// if missing
+ url = data.get("url", "")
+ if url and not url.startswith(("http://", "https://")):
+ data["url"] = "http://" + url
+
+ return super().to_internal_value(data)
+
+ def validate_url(self, value):
+ # Use Django's built-in URLValidator for validation
+ url_validator = URLValidator()
+ try:
+ url_validator(value)
+ except ValidationError:
+ raise serializers.ValidationError({"error": "Invalid URL format."})
+
+ return value
+
+ def create(self, validated_data):
+ validated_data["url"] = self.validate_url(validated_data.get("url"))
+ if ModuleLink.objects.filter(url=validated_data.get("url"), module_id=validated_data.get("module_id")).exists():
+ raise serializers.ValidationError({"error": "URL already exists."})
+ return super().create(validated_data)
+
+ def update(self, instance, validated_data):
+ validated_data["url"] = self.validate_url(validated_data.get("url"))
+ if (
+ ModuleLink.objects.filter(url=validated_data.get("url"), module_id=instance.module_id)
+ .exclude(pk=instance.id)
+ .exists()
+ ):
+ raise serializers.ValidationError({"error": "URL already exists for this Issue"})
+
+ return super().update(instance, validated_data)
+
+
+class ModuleSerializer(DynamicBaseSerializer):
+ member_ids = serializers.ListField(child=serializers.UUIDField(), required=False, allow_null=True)
+ is_favorite = serializers.BooleanField(read_only=True)
+ total_issues = serializers.IntegerField(read_only=True)
+ cancelled_issues = serializers.IntegerField(read_only=True)
+ completed_issues = serializers.IntegerField(read_only=True)
+ started_issues = serializers.IntegerField(read_only=True)
+ unstarted_issues = serializers.IntegerField(read_only=True)
+ backlog_issues = serializers.IntegerField(read_only=True)
+ total_estimate_points = serializers.FloatField(read_only=True)
+ completed_estimate_points = serializers.FloatField(read_only=True)
+
+ class Meta:
+ model = Module
+ fields = [
+ # Required fields
+ "id",
+ "workspace_id",
+ "project_id",
+ # Model fields
+ "name",
+ "description",
+ "description_text",
+ "description_html",
+ "start_date",
+ "target_date",
+ "status",
+ "lead_id",
+ "member_ids",
+ "view_props",
+ "sort_order",
+ "external_source",
+ "external_id",
+ "logo_props",
+ # computed fields
+ "total_estimate_points",
+ "completed_estimate_points",
+ "is_favorite",
+ "total_issues",
+ "cancelled_issues",
+ "completed_issues",
+ "started_issues",
+ "unstarted_issues",
+ "backlog_issues",
+ "created_at",
+ "updated_at",
+ "archived_at",
+ ]
+ read_only_fields = fields
+
+
+class ModuleDetailSerializer(ModuleSerializer):
+ link_module = ModuleLinkSerializer(read_only=True, many=True)
+ sub_issues = serializers.IntegerField(read_only=True)
+ backlog_estimate_points = serializers.FloatField(read_only=True)
+ unstarted_estimate_points = serializers.FloatField(read_only=True)
+ started_estimate_points = serializers.FloatField(read_only=True)
+ cancelled_estimate_points = serializers.FloatField(read_only=True)
+
+ class Meta(ModuleSerializer.Meta):
+ fields = ModuleSerializer.Meta.fields + [
+ "link_module",
+ "sub_issues",
+ "backlog_estimate_points",
+ "unstarted_estimate_points",
+ "started_estimate_points",
+ "cancelled_estimate_points",
+ ]
+
+
+class ModuleUserPropertiesSerializer(BaseSerializer):
+ class Meta:
+ model = ModuleUserProperties
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "module", "user"]
diff --git a/apps/api/plane/app/serializers/notification.py b/apps/api/plane/app/serializers/notification.py
new file mode 100644
index 00000000..58007ec2
--- /dev/null
+++ b/apps/api/plane/app/serializers/notification.py
@@ -0,0 +1,24 @@
+# Module imports
+from .base import BaseSerializer
+from .user import UserLiteSerializer
+from plane.db.models import Notification, UserNotificationPreference
+
+# Third Party imports
+from rest_framework import serializers
+
+
+class NotificationSerializer(BaseSerializer):
+ triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by")
+ is_inbox_issue = serializers.BooleanField(read_only=True)
+ is_intake_issue = serializers.BooleanField(read_only=True)
+ is_mentioned_notification = serializers.BooleanField(read_only=True)
+
+ class Meta:
+ model = Notification
+ fields = "__all__"
+
+
+class UserNotificationPreferenceSerializer(BaseSerializer):
+ class Meta:
+ model = UserNotificationPreference
+ fields = "__all__"
diff --git a/apps/api/plane/app/serializers/page.py b/apps/api/plane/app/serializers/page.py
new file mode 100644
index 00000000..3aecbafd
--- /dev/null
+++ b/apps/api/plane/app/serializers/page.py
@@ -0,0 +1,221 @@
+# Third party imports
+from rest_framework import serializers
+import base64
+
+# Module imports
+from .base import BaseSerializer
+from plane.utils.content_validator import (
+ validate_binary_data,
+ validate_html_content,
+)
+from plane.db.models import (
+ Page,
+ PageLabel,
+ Label,
+ ProjectPage,
+ Project,
+ PageVersion,
+)
+
+
+class PageSerializer(BaseSerializer):
+ is_favorite = serializers.BooleanField(read_only=True)
+ labels = serializers.ListField(
+ child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
+ write_only=True,
+ required=False,
+ )
+ # Many to many
+ label_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
+ project_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
+
+ class Meta:
+ model = Page
+ fields = [
+ "id",
+ "name",
+ "owned_by",
+ "access",
+ "color",
+ "labels",
+ "parent",
+ "is_favorite",
+ "is_locked",
+ "archived_at",
+ "workspace",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ "view_props",
+ "logo_props",
+ "label_ids",
+ "project_ids",
+ ]
+ read_only_fields = ["workspace", "owned_by"]
+
+ def create(self, validated_data):
+ labels = validated_data.pop("labels", None)
+ project_id = self.context["project_id"]
+ owned_by_id = self.context["owned_by_id"]
+ description = self.context["description"]
+ description_binary = self.context["description_binary"]
+ description_html = self.context["description_html"]
+
+ # Get the workspace id from the project
+ project = Project.objects.get(pk=project_id)
+
+ # Create the page
+ page = Page.objects.create(
+ **validated_data,
+ description=description,
+ description_binary=description_binary,
+ description_html=description_html,
+ owned_by_id=owned_by_id,
+ workspace_id=project.workspace_id,
+ )
+
+ # Create the project page
+ ProjectPage.objects.create(
+ workspace_id=page.workspace_id,
+ project_id=project_id,
+ page_id=page.id,
+ created_by_id=page.created_by_id,
+ updated_by_id=page.updated_by_id,
+ )
+
+ # Create page labels
+ if labels is not None:
+ PageLabel.objects.bulk_create(
+ [
+ PageLabel(
+ label=label,
+ page=page,
+ workspace_id=page.workspace_id,
+ created_by_id=page.created_by_id,
+ updated_by_id=page.updated_by_id,
+ )
+ for label in labels
+ ],
+ batch_size=10,
+ )
+ return page
+
+ def update(self, instance, validated_data):
+ labels = validated_data.pop("labels", None)
+ if labels is not None:
+ PageLabel.objects.filter(page=instance).delete()
+ PageLabel.objects.bulk_create(
+ [
+ PageLabel(
+ label=label,
+ page=instance,
+ workspace_id=instance.workspace_id,
+ created_by_id=instance.created_by_id,
+ updated_by_id=instance.updated_by_id,
+ )
+ for label in labels
+ ],
+ batch_size=10,
+ )
+
+ return super().update(instance, validated_data)
+
+
+class PageDetailSerializer(PageSerializer):
+ description_html = serializers.CharField()
+
+ class Meta(PageSerializer.Meta):
+ fields = PageSerializer.Meta.fields + ["description_html"]
+
+
+class PageVersionSerializer(BaseSerializer):
+ class Meta:
+ model = PageVersion
+ fields = [
+ "id",
+ "workspace",
+ "page",
+ "last_saved_at",
+ "owned_by",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ ]
+ read_only_fields = ["workspace", "page"]
+
+
+class PageVersionDetailSerializer(BaseSerializer):
+ class Meta:
+ model = PageVersion
+ fields = [
+ "id",
+ "workspace",
+ "page",
+ "last_saved_at",
+ "description_binary",
+ "description_html",
+ "description_json",
+ "owned_by",
+ "created_at",
+ "updated_at",
+ "created_by",
+ "updated_by",
+ ]
+ read_only_fields = ["workspace", "page"]
+
+
+class PageBinaryUpdateSerializer(serializers.Serializer):
+ """Serializer for updating page binary description with validation"""
+
+ description_binary = serializers.CharField(required=False, allow_blank=True)
+ description_html = serializers.CharField(required=False, allow_blank=True)
+ description = serializers.JSONField(required=False, allow_null=True)
+
+ def validate_description_binary(self, value):
+ """Validate the base64-encoded binary data"""
+ if not value:
+ return value
+
+ try:
+ # Decode the base64 data
+ binary_data = base64.b64decode(value)
+
+ # Validate the binary data
+ is_valid, error_message = validate_binary_data(binary_data)
+ if not is_valid:
+ raise serializers.ValidationError(f"Invalid binary data: {error_message}")
+
+ return binary_data
+ except Exception as e:
+ if isinstance(e, serializers.ValidationError):
+ raise
+ raise serializers.ValidationError("Failed to decode base64 data")
+
+ def validate_description_html(self, value):
+ """Validate the HTML content"""
+ if not value:
+ return value
+
+ # Use the validation function from utils
+ is_valid, error_message, sanitized_html = validate_html_content(value)
+ if not is_valid:
+ raise serializers.ValidationError(error_message)
+
+ # Return sanitized HTML if available, otherwise return original
+ return sanitized_html if sanitized_html is not None else value
+
+ def update(self, instance, validated_data):
+ """Update the page instance with validated data"""
+ if "description_binary" in validated_data:
+ instance.description_binary = validated_data.get("description_binary")
+
+ if "description_html" in validated_data:
+ instance.description_html = validated_data.get("description_html")
+
+ if "description" in validated_data:
+ instance.description = validated_data.get("description")
+
+ instance.save()
+ return instance
diff --git a/apps/api/plane/app/serializers/project.py b/apps/api/plane/app/serializers/project.py
new file mode 100644
index 00000000..c709093a
--- /dev/null
+++ b/apps/api/plane/app/serializers/project.py
@@ -0,0 +1,203 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer, DynamicBaseSerializer
+from plane.app.serializers.workspace import WorkspaceLiteSerializer
+from plane.app.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
+from plane.db.models import (
+ Project,
+ ProjectMember,
+ ProjectMemberInvite,
+ ProjectIdentifier,
+ DeployBoard,
+ ProjectPublicMember,
+)
+from plane.utils.content_validator import (
+ validate_html_content,
+)
+
+
+class ProjectSerializer(BaseSerializer):
+ workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
+ inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
+
+ class Meta:
+ model = Project
+ fields = "__all__"
+ read_only_fields = ["workspace", "deleted_at"]
+
+ def validate_name(self, name):
+ project_id = self.instance.id if self.instance else None
+ workspace_id = self.context["workspace_id"]
+
+ project = Project.objects.filter(name=name, workspace_id=workspace_id)
+
+ if project_id:
+ project = project.exclude(id=project_id)
+
+ if project.exists():
+ raise serializers.ValidationError(
+ detail="PROJECT_NAME_ALREADY_EXIST",
+ )
+
+ return name
+
+ def validate_identifier(self, identifier):
+ project_id = self.instance.id if self.instance else None
+ workspace_id = self.context["workspace_id"]
+
+ project = Project.objects.filter(identifier=identifier, workspace_id=workspace_id)
+
+ if project_id:
+ project = project.exclude(id=project_id)
+
+ if project.exists():
+ raise serializers.ValidationError(
+ detail="PROJECT_IDENTIFIER_ALREADY_EXIST",
+ )
+
+ return identifier
+
+ def validate(self, data):
+ # Validate description content for security
+ if "description_html" in data and data["description_html"]:
+ is_valid, error_msg, sanitized_html = validate_html_content(str(data["description_html"]))
+ # Update the data with sanitized HTML if available
+ if sanitized_html is not None:
+ data["description_html"] = sanitized_html
+
+ if not is_valid:
+ raise serializers.ValidationError({"error": "html content is not valid"})
+
+ return data
+
+ def create(self, validated_data):
+ workspace_id = self.context["workspace_id"]
+
+ project = Project.objects.create(**validated_data, workspace_id=workspace_id)
+
+ ProjectIdentifier.objects.create(name=project.identifier, project=project, workspace_id=workspace_id)
+
+ return project
+
+
+class ProjectLiteSerializer(BaseSerializer):
+ class Meta:
+ model = Project
+ fields = [
+ "id",
+ "identifier",
+ "name",
+ "cover_image",
+ "cover_image_url",
+ "logo_props",
+ "description",
+ ]
+ read_only_fields = fields
+
+
+class ProjectListSerializer(DynamicBaseSerializer):
+ is_favorite = serializers.BooleanField(read_only=True)
+ sort_order = serializers.FloatField(read_only=True)
+ member_role = serializers.IntegerField(read_only=True)
+ anchor = serializers.CharField(read_only=True)
+ members = serializers.SerializerMethodField()
+ cover_image_url = serializers.CharField(read_only=True)
+ inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
+
+ def get_members(self, obj):
+ project_members = getattr(obj, "members_list", None)
+ if project_members is not None:
+ # Filter members by the project ID
+ return [member.member_id for member in project_members if member.is_active and not member.member.is_bot]
+ return []
+
+ class Meta:
+ model = Project
+ fields = "__all__"
+
+
+class ProjectDetailSerializer(BaseSerializer):
+ # workspace = WorkSpaceSerializer(read_only=True)
+ default_assignee = UserLiteSerializer(read_only=True)
+ project_lead = UserLiteSerializer(read_only=True)
+ is_favorite = serializers.BooleanField(read_only=True)
+ sort_order = serializers.FloatField(read_only=True)
+ member_role = serializers.IntegerField(read_only=True)
+ anchor = serializers.CharField(read_only=True)
+
+ class Meta:
+ model = Project
+ fields = "__all__"
+
+
+class ProjectMemberSerializer(BaseSerializer):
+ workspace = WorkspaceLiteSerializer(read_only=True)
+ project = ProjectLiteSerializer(read_only=True)
+ member = UserLiteSerializer(read_only=True)
+
+ class Meta:
+ model = ProjectMember
+ fields = "__all__"
+
+
+class ProjectMemberAdminSerializer(BaseSerializer):
+ workspace = WorkspaceLiteSerializer(read_only=True)
+ project = ProjectLiteSerializer(read_only=True)
+ member = UserAdminLiteSerializer(read_only=True)
+
+ class Meta:
+ model = ProjectMember
+ fields = "__all__"
+
+
+class ProjectMemberRoleSerializer(DynamicBaseSerializer):
+ original_role = serializers.IntegerField(source="role", read_only=True)
+
+ class Meta:
+ model = ProjectMember
+ fields = ("id", "role", "member", "project", "original_role", "created_at")
+ read_only_fields = ["original_role", "created_at"]
+
+
+class ProjectMemberInviteSerializer(BaseSerializer):
+ project = ProjectLiteSerializer(read_only=True)
+ workspace = WorkspaceLiteSerializer(read_only=True)
+
+ class Meta:
+ model = ProjectMemberInvite
+ fields = "__all__"
+
+
+class ProjectIdentifierSerializer(BaseSerializer):
+ class Meta:
+ model = ProjectIdentifier
+ fields = "__all__"
+
+
+class ProjectMemberLiteSerializer(BaseSerializer):
+ member = UserLiteSerializer(read_only=True)
+ is_subscribed = serializers.BooleanField(read_only=True)
+
+ class Meta:
+ model = ProjectMember
+ fields = ["member", "id", "is_subscribed"]
+ read_only_fields = fields
+
+
+class DeployBoardSerializer(BaseSerializer):
+ project_details = ProjectLiteSerializer(read_only=True, source="project")
+ workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
+
+ class Meta:
+ model = DeployBoard
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "anchor"]
+
+
+class ProjectPublicMemberSerializer(BaseSerializer):
+ class Meta:
+ model = ProjectPublicMember
+ fields = "__all__"
+ read_only_fields = ["workspace", "project", "member"]
diff --git a/apps/api/plane/app/serializers/state.py b/apps/api/plane/app/serializers/state.py
new file mode 100644
index 00000000..29d8cf30
--- /dev/null
+++ b/apps/api/plane/app/serializers/state.py
@@ -0,0 +1,32 @@
+# Module imports
+from .base import BaseSerializer
+from rest_framework import serializers
+
+from plane.db.models import State
+
+
+class StateSerializer(BaseSerializer):
+ order = serializers.FloatField(required=False)
+
+ class Meta:
+ model = State
+ fields = [
+ "id",
+ "project_id",
+ "workspace_id",
+ "name",
+ "color",
+ "group",
+ "default",
+ "description",
+ "sequence",
+ "order",
+ ]
+ read_only_fields = ["workspace", "project"]
+
+
+class StateLiteSerializer(BaseSerializer):
+ class Meta:
+ model = State
+ fields = ["id", "name", "color", "group"]
+ read_only_fields = fields
diff --git a/apps/api/plane/app/serializers/user.py b/apps/api/plane/app/serializers/user.py
new file mode 100644
index 00000000..670667a8
--- /dev/null
+++ b/apps/api/plane/app/serializers/user.py
@@ -0,0 +1,207 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module import
+from plane.db.models import Account, Profile, User, Workspace, WorkspaceMemberInvite
+from plane.utils.url import contains_url
+
+from .base import BaseSerializer
+
+
+class UserSerializer(BaseSerializer):
+ def validate_first_name(self, value):
+ if contains_url(value):
+ raise serializers.ValidationError("First name cannot contain a URL.")
+ return value
+
+ def validate_last_name(self, value):
+ if contains_url(value):
+ raise serializers.ValidationError("Last name cannot contain a URL.")
+ return value
+
+ class Meta:
+ model = User
+ # Exclude password field from the serializer
+ fields = [field.name for field in User._meta.fields if field.name != "password"]
+ # Make all system fields and email read only
+ read_only_fields = [
+ "id",
+ "username",
+ "mobile_number",
+ "email",
+ "token",
+ "created_at",
+ "updated_at",
+ "is_superuser",
+ "is_staff",
+ "is_managed",
+ "last_active",
+ "last_login_time",
+ "last_logout_time",
+ "last_login_ip",
+ "last_logout_ip",
+ "last_login_uagent",
+ "last_location",
+ "last_login_medium",
+ "created_location",
+ "is_bot",
+ "is_password_autoset",
+ "is_email_verified",
+ "is_active",
+ "token_updated_at",
+ ]
+
+ # If the user has already filled first name or last name then he is onboarded
+ def get_is_onboarded(self, obj):
+ return bool(obj.first_name) or bool(obj.last_name)
+
+
+class UserMeSerializer(BaseSerializer):
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "avatar",
+ "cover_image",
+ "avatar_url",
+ "cover_image_url",
+ "date_joined",
+ "display_name",
+ "email",
+ "first_name",
+ "last_name",
+ "is_active",
+ "is_bot",
+ "is_email_verified",
+ "user_timezone",
+ "username",
+ "is_password_autoset",
+ "is_email_verified",
+ "last_login_medium",
+ ]
+ read_only_fields = fields
+
+
+class UserMeSettingsSerializer(BaseSerializer):
+ workspace = serializers.SerializerMethodField()
+
+ class Meta:
+ model = User
+ fields = ["id", "email", "workspace"]
+ read_only_fields = fields
+
+ def get_workspace(self, obj):
+ workspace_invites = WorkspaceMemberInvite.objects.filter(email=obj.email).count()
+
+ # profile
+ profile = Profile.objects.get(user=obj)
+ if (
+ profile.last_workspace_id is not None
+ and Workspace.objects.filter(
+ pk=profile.last_workspace_id,
+ workspace_member__member=obj.id,
+ workspace_member__is_active=True,
+ ).exists()
+ ):
+ workspace = Workspace.objects.filter(
+ pk=profile.last_workspace_id,
+ workspace_member__member=obj.id,
+ workspace_member__is_active=True,
+ ).first()
+ logo_asset_url = workspace.logo_asset.asset_url if workspace.logo_asset is not None else ""
+ return {
+ "last_workspace_id": profile.last_workspace_id,
+ "last_workspace_slug": (workspace.slug if workspace is not None else ""),
+ "last_workspace_name": (workspace.name if workspace is not None else ""),
+ "last_workspace_logo": (logo_asset_url),
+ "fallback_workspace_id": profile.last_workspace_id,
+ "fallback_workspace_slug": (workspace.slug if workspace is not None else ""),
+ "invites": workspace_invites,
+ }
+ else:
+ fallback_workspace = (
+ Workspace.objects.filter(workspace_member__member_id=obj.id, workspace_member__is_active=True)
+ .order_by("created_at")
+ .first()
+ )
+ return {
+ "last_workspace_id": None,
+ "last_workspace_slug": None,
+ "fallback_workspace_id": (fallback_workspace.id if fallback_workspace is not None else None),
+ "fallback_workspace_slug": (fallback_workspace.slug if fallback_workspace is not None else None),
+ "invites": workspace_invites,
+ }
+
+
+class UserLiteSerializer(BaseSerializer):
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "first_name",
+ "last_name",
+ "avatar",
+ "avatar_url",
+ "is_bot",
+ "display_name",
+ ]
+ read_only_fields = ["id", "is_bot"]
+
+
+class UserAdminLiteSerializer(BaseSerializer):
+ class Meta:
+ model = User
+ fields = [
+ "id",
+ "first_name",
+ "last_name",
+ "avatar",
+ "avatar_url",
+ "is_bot",
+ "display_name",
+ "email",
+ "last_login_medium",
+ ]
+ read_only_fields = ["id", "is_bot"]
+
+
+class ChangePasswordSerializer(serializers.Serializer):
+ model = User
+
+ """
+ Serializer for password change endpoint.
+ """
+ old_password = serializers.CharField(required=True)
+ new_password = serializers.CharField(required=True, min_length=8)
+ confirm_password = serializers.CharField(required=True, min_length=8)
+
+ def validate(self, data):
+ if data.get("old_password") == data.get("new_password"):
+ raise serializers.ValidationError({"error": "New password cannot be same as old password."})
+
+ if data.get("new_password") != data.get("confirm_password"):
+ raise serializers.ValidationError({"error": "Confirm password should be same as the new password."})
+
+ return data
+
+
+class ResetPasswordSerializer(serializers.Serializer):
+ """
+ Serializer for password change endpoint.
+ """
+
+ new_password = serializers.CharField(required=True, min_length=8)
+
+
+class ProfileSerializer(BaseSerializer):
+ class Meta:
+ model = Profile
+ fields = "__all__"
+ read_only_fields = ["user"]
+
+
+class AccountSerializer(BaseSerializer):
+ class Meta:
+ model = Account
+ fields = "__all__"
+ read_only_fields = ["user"]
diff --git a/apps/api/plane/app/serializers/view.py b/apps/api/plane/app/serializers/view.py
new file mode 100644
index 00000000..bf7ff972
--- /dev/null
+++ b/apps/api/plane/app/serializers/view.py
@@ -0,0 +1,82 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import DynamicBaseSerializer
+from plane.db.models import IssueView
+from plane.utils.issue_filters import issue_filters
+
+
+class ViewIssueListSerializer(serializers.Serializer):
+ def get_assignee_ids(self, instance):
+ return [assignee.assignee_id for assignee in instance.issue_assignee.all()]
+
+ def get_label_ids(self, instance):
+ return [label.label_id for label in instance.label_issue.all()]
+
+ def get_module_ids(self, instance):
+ return [module.module_id for module in instance.issue_module.all()]
+
+ def to_representation(self, instance):
+ data = {
+ "id": instance.id,
+ "name": instance.name,
+ "state_id": instance.state_id,
+ "sort_order": instance.sort_order,
+ "completed_at": instance.completed_at,
+ "estimate_point": instance.estimate_point_id,
+ "priority": instance.priority,
+ "start_date": instance.start_date,
+ "target_date": instance.target_date,
+ "sequence_id": instance.sequence_id,
+ "project_id": instance.project_id,
+ "parent_id": instance.parent_id,
+ "cycle_id": instance.cycle_id,
+ "sub_issues_count": instance.sub_issues_count,
+ "created_at": instance.created_at,
+ "updated_at": instance.updated_at,
+ "created_by": instance.created_by_id,
+ "updated_by": instance.updated_by_id,
+ "attachment_count": instance.attachment_count,
+ "link_count": instance.link_count,
+ "is_draft": instance.is_draft,
+ "archived_at": instance.archived_at,
+ "state__group": instance.state.group if instance.state else None,
+ "assignee_ids": self.get_assignee_ids(instance),
+ "label_ids": self.get_label_ids(instance),
+ "module_ids": self.get_module_ids(instance),
+ }
+ return data
+
+
+class IssueViewSerializer(DynamicBaseSerializer):
+ is_favorite = serializers.BooleanField(read_only=True)
+
+ class Meta:
+ model = IssueView
+ fields = "__all__"
+ read_only_fields = [
+ "workspace",
+ "project",
+ "query",
+ "owned_by",
+ "access",
+ "is_locked",
+ ]
+
+ def create(self, validated_data):
+ query_params = validated_data.get("filters", {})
+ if bool(query_params):
+ validated_data["query"] = issue_filters(query_params, "POST")
+ else:
+ validated_data["query"] = {}
+ return IssueView.objects.create(**validated_data)
+
+ def update(self, instance, validated_data):
+ query_params = validated_data.get("filters", {})
+ if bool(query_params):
+ validated_data["query"] = issue_filters(query_params, "POST")
+ else:
+ validated_data["query"] = {}
+ validated_data["query"] = issue_filters(query_params, "PATCH")
+ return super().update(instance, validated_data)
diff --git a/apps/api/plane/app/serializers/webhook.py b/apps/api/plane/app/serializers/webhook.py
new file mode 100644
index 00000000..ef193e24
--- /dev/null
+++ b/apps/api/plane/app/serializers/webhook.py
@@ -0,0 +1,98 @@
+# Python imports
+import socket
+import ipaddress
+from urllib.parse import urlparse
+
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import DynamicBaseSerializer
+from plane.db.models import Webhook, WebhookLog
+from plane.db.models.webhook import validate_domain, validate_schema
+
+
+class WebhookSerializer(DynamicBaseSerializer):
+ url = serializers.URLField(validators=[validate_schema, validate_domain])
+
+ def create(self, validated_data):
+ url = validated_data.get("url", None)
+
+ # Extract the hostname from the URL
+ hostname = urlparse(url).hostname
+ if not hostname:
+ raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
+
+ # Resolve the hostname to IP addresses
+ try:
+ ip_addresses = socket.getaddrinfo(hostname, None)
+ except socket.gaierror:
+ raise serializers.ValidationError({"url": "Hostname could not be resolved."})
+
+ if not ip_addresses:
+ raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
+
+ for addr in ip_addresses:
+ ip = ipaddress.ip_address(addr[4][0])
+ if ip.is_loopback:
+ raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
+
+ # Additional validation for multiple request domains and their subdomains
+ request = self.context.get("request")
+ disallowed_domains = ["plane.so"] # Add your disallowed domains here
+ if request:
+ request_host = request.get_host().split(":")[0] # Remove port if present
+ disallowed_domains.append(request_host)
+
+ # Check if hostname is a subdomain or exact match of any disallowed domain
+ if any(hostname == domain or hostname.endswith("." + domain) for domain in disallowed_domains):
+ raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
+
+ return Webhook.objects.create(**validated_data)
+
+ def update(self, instance, validated_data):
+ url = validated_data.get("url", None)
+ if url:
+ # Extract the hostname from the URL
+ hostname = urlparse(url).hostname
+ if not hostname:
+ raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
+
+ # Resolve the hostname to IP addresses
+ try:
+ ip_addresses = socket.getaddrinfo(hostname, None)
+ except socket.gaierror:
+ raise serializers.ValidationError({"url": "Hostname could not be resolved."})
+
+ if not ip_addresses:
+ raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
+
+ for addr in ip_addresses:
+ ip = ipaddress.ip_address(addr[4][0])
+ if ip.is_loopback:
+ raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
+
+ # Additional validation for multiple request domains and their subdomains
+ request = self.context.get("request")
+ disallowed_domains = ["plane.so"] # Add your disallowed domains here
+ if request:
+ request_host = request.get_host().split(":")[0] # Remove port if present
+ disallowed_domains.append(request_host)
+
+ # Check if hostname is a subdomain or exact match of any disallowed domain
+ if any(hostname == domain or hostname.endswith("." + domain) for domain in disallowed_domains):
+ raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
+
+ return super().update(instance, validated_data)
+
+ class Meta:
+ model = Webhook
+ fields = "__all__"
+ read_only_fields = ["workspace", "secret_key", "deleted_at"]
+
+
+class WebhookLogSerializer(DynamicBaseSerializer):
+ class Meta:
+ model = WebhookLog
+ fields = "__all__"
+ read_only_fields = ["workspace", "webhook"]
diff --git a/apps/api/plane/app/serializers/workspace.py b/apps/api/plane/app/serializers/workspace.py
new file mode 100644
index 00000000..ba59f242
--- /dev/null
+++ b/apps/api/plane/app/serializers/workspace.py
@@ -0,0 +1,329 @@
+# Third party imports
+from rest_framework import serializers
+
+# Module imports
+from .base import BaseSerializer, DynamicBaseSerializer
+from .user import UserLiteSerializer, UserAdminLiteSerializer
+
+
+from plane.db.models import (
+ Workspace,
+ WorkspaceMember,
+ WorkspaceMemberInvite,
+ WorkspaceTheme,
+ WorkspaceUserProperties,
+ WorkspaceUserLink,
+ UserRecentVisit,
+ Issue,
+ Page,
+ Project,
+ ProjectMember,
+ WorkspaceHomePreference,
+ Sticky,
+ WorkspaceUserPreference,
+)
+from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
+from plane.utils.url import contains_url
+from plane.utils.content_validator import (
+ validate_html_content,
+ validate_binary_data,
+)
+
+# Django imports
+from django.core.validators import URLValidator
+from django.core.exceptions import ValidationError
+import re
+
+
+class WorkSpaceSerializer(DynamicBaseSerializer):
+ total_members = serializers.IntegerField(read_only=True)
+ logo_url = serializers.CharField(read_only=True)
+ role = serializers.IntegerField(read_only=True)
+
+ def validate_name(self, value):
+ # Check if the name contains a URL
+ if contains_url(value):
+ raise serializers.ValidationError("Name must not contain URLs")
+ return value
+
+ def validate_slug(self, value):
+ # Check if the slug is restricted
+ if value in RESTRICTED_WORKSPACE_SLUGS:
+ raise serializers.ValidationError("Slug is not valid")
+ # Slug should only contain alphanumeric characters, hyphens, and underscores
+ if not re.match(r"^[a-zA-Z0-9_-]+$", value):
+ raise serializers.ValidationError(
+ "Slug can only contain letters, numbers, hyphens (-), and underscores (_)"
+ )
+ return value
+
+ class Meta:
+ model = Workspace
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "created_by",
+ "updated_by",
+ "created_at",
+ "updated_at",
+ "owner",
+ "logo_url",
+ ]
+
+
+class WorkspaceLiteSerializer(BaseSerializer):
+ class Meta:
+ model = Workspace
+ fields = ["name", "slug", "id", "logo_url"]
+ read_only_fields = fields
+
+
+class WorkSpaceMemberSerializer(DynamicBaseSerializer):
+ member = UserLiteSerializer(read_only=True)
+
+ class Meta:
+ model = WorkspaceMember
+ fields = "__all__"
+
+
+class WorkspaceMemberMeSerializer(BaseSerializer):
+ draft_issue_count = serializers.IntegerField(read_only=True)
+
+ class Meta:
+ model = WorkspaceMember
+ fields = "__all__"
+
+
+class WorkspaceMemberAdminSerializer(DynamicBaseSerializer):
+ member = UserAdminLiteSerializer(read_only=True)
+
+ class Meta:
+ model = WorkspaceMember
+ fields = "__all__"
+
+
+class WorkSpaceMemberInviteSerializer(BaseSerializer):
+ workspace = WorkspaceLiteSerializer(read_only=True)
+ invite_link = serializers.SerializerMethodField()
+
+ def get_invite_link(self, obj):
+ return f"/workspace-invitations/?invitation_id={obj.id}&email={obj.email}&slug={obj.workspace.slug}"
+
+ class Meta:
+ model = WorkspaceMemberInvite
+ fields = "__all__"
+ read_only_fields = [
+ "id",
+ "email",
+ "token",
+ "workspace",
+ "message",
+ "responded_at",
+ "created_at",
+ "updated_at",
+ "invite_link",
+ ]
+
+
+class WorkspaceThemeSerializer(BaseSerializer):
+ class Meta:
+ model = WorkspaceTheme
+ fields = "__all__"
+ read_only_fields = ["workspace", "actor"]
+
+
+class WorkspaceUserPropertiesSerializer(BaseSerializer):
+ class Meta:
+ model = WorkspaceUserProperties
+ fields = "__all__"
+ read_only_fields = ["workspace", "user"]
+
+
+class WorkspaceUserLinkSerializer(BaseSerializer):
+ class Meta:
+ model = WorkspaceUserLink
+ fields = "__all__"
+ read_only_fields = ["workspace", "owner"]
+
+ def to_internal_value(self, data):
+ url = data.get("url", "")
+ if url and not url.startswith(("http://", "https://")):
+ data["url"] = "http://" + url
+
+ return super().to_internal_value(data)
+
+ def validate_url(self, value):
+ url_validator = URLValidator()
+ try:
+ url_validator(value)
+ except ValidationError:
+ raise serializers.ValidationError({"error": "Invalid URL format."})
+
+ return value
+
+ def create(self, validated_data):
+ # Filtering the WorkspaceUserLink with the given url to check if the link already exists.
+
+ url = validated_data.get("url")
+
+ workspace_user_link = WorkspaceUserLink.objects.filter(
+ url=url,
+ workspace_id=validated_data.get("workspace_id"),
+ owner_id=validated_data.get("owner_id"),
+ )
+
+ if workspace_user_link.exists():
+ raise serializers.ValidationError({"error": "URL already exists for this workspace and owner"})
+
+ return super().create(validated_data)
+
+ def update(self, instance, validated_data):
+ # Filtering the WorkspaceUserLink with the given url to check if the link already exists.
+
+ url = validated_data.get("url")
+
+ workspace_user_link = WorkspaceUserLink.objects.filter(
+ url=url, workspace_id=instance.workspace_id, owner=instance.owner
+ )
+
+ if workspace_user_link.exclude(pk=instance.id).exists():
+ raise serializers.ValidationError({"error": "URL already exists for this workspace and owner"})
+
+ return super().update(instance, validated_data)
+
+
+class IssueRecentVisitSerializer(serializers.ModelSerializer):
+ project_identifier = serializers.SerializerMethodField()
+ assignees = serializers.SerializerMethodField()
+
+ class Meta:
+ model = Issue
+ fields = [
+ "id",
+ "name",
+ "state",
+ "priority",
+ "assignees",
+ "type",
+ "sequence_id",
+ "project_id",
+ "project_identifier",
+ ]
+
+ def get_project_identifier(self, obj):
+ project = obj.project
+ return project.identifier if project else None
+
+ def get_assignees(self, obj):
+ return list(obj.assignees.filter(issue_assignee__deleted_at__isnull=True).values_list("id", flat=True))
+
+
+class ProjectRecentVisitSerializer(serializers.ModelSerializer):
+ project_members = serializers.SerializerMethodField()
+
+ class Meta:
+ model = Project
+ fields = ["id", "name", "logo_props", "project_members", "identifier"]
+
+ def get_project_members(self, obj):
+ members = ProjectMember.objects.filter(project_id=obj.id, member__is_bot=False, is_active=True).values_list(
+ "member", flat=True
+ )
+
+ return members
+
+
+class PageRecentVisitSerializer(serializers.ModelSerializer):
+ project_id = serializers.SerializerMethodField()
+ project_identifier = serializers.SerializerMethodField()
+
+ class Meta:
+ model = Page
+ fields = [
+ "id",
+ "name",
+ "logo_props",
+ "project_id",
+ "owned_by",
+ "project_identifier",
+ ]
+
+ def get_project_id(self, obj):
+ return obj.project_id if hasattr(obj, "project_id") else obj.projects.values_list("id", flat=True).first()
+
+ def get_project_identifier(self, obj):
+ project = obj.projects.first()
+
+ return project.identifier if project else None
+
+
+def get_entity_model_and_serializer(entity_type):
+ entity_map = {
+ "issue": (Issue, IssueRecentVisitSerializer),
+ "page": (Page, PageRecentVisitSerializer),
+ "project": (Project, ProjectRecentVisitSerializer),
+ }
+ return entity_map.get(entity_type, (None, None))
+
+
+class WorkspaceRecentVisitSerializer(BaseSerializer):
+ entity_data = serializers.SerializerMethodField()
+
+ class Meta:
+ model = UserRecentVisit
+ fields = ["id", "entity_name", "entity_identifier", "entity_data", "visited_at"]
+ read_only_fields = ["workspace", "owner", "created_by", "updated_by"]
+
+ def get_entity_data(self, obj):
+ entity_name = obj.entity_name
+ entity_identifier = obj.entity_identifier
+
+ entity_model, entity_serializer = get_entity_model_and_serializer(entity_name)
+
+ if entity_model and entity_serializer:
+ try:
+ entity = entity_model.objects.get(pk=entity_identifier)
+
+ return entity_serializer(entity).data
+ except entity_model.DoesNotExist:
+ return None
+ return None
+
+
+class WorkspaceHomePreferenceSerializer(BaseSerializer):
+ class Meta:
+ model = WorkspaceHomePreference
+ fields = ["key", "is_enabled", "sort_order"]
+ read_only_fields = ["workspace", "created_by", "updated_by"]
+
+
+class StickySerializer(BaseSerializer):
+ class Meta:
+ model = Sticky
+ fields = "__all__"
+ read_only_fields = ["workspace", "owner"]
+ extra_kwargs = {"name": {"required": False}}
+
+ def validate(self, data):
+ # Validate description content for security
+ if "description_html" in data and data["description_html"]:
+ is_valid, error_msg, sanitized_html = validate_html_content(data["description_html"])
+ if not is_valid:
+ raise serializers.ValidationError({"error": "html content is not valid"})
+ # Update the data with sanitized HTML if available
+ if sanitized_html is not None:
+ data["description_html"] = sanitized_html
+
+ if "description_binary" in data and data["description_binary"]:
+ is_valid, error_msg = validate_binary_data(data["description_binary"])
+ if not is_valid:
+ raise serializers.ValidationError({"description_binary": "Invalid binary data"})
+
+ return data
+
+
+class WorkspaceUserPreferenceSerializer(BaseSerializer):
+ class Meta:
+ model = WorkspaceUserPreference
+ fields = ["key", "is_pinned", "sort_order"]
+ read_only_fields = ["workspace", "created_by", "updated_by"]
diff --git a/apps/api/plane/app/urls/__init__.py b/apps/api/plane/app/urls/__init__.py
new file mode 100644
index 00000000..3feab4cb
--- /dev/null
+++ b/apps/api/plane/app/urls/__init__.py
@@ -0,0 +1,43 @@
+from .analytic import urlpatterns as analytic_urls
+from .api import urlpatterns as api_urls
+from .asset import urlpatterns as asset_urls
+from .cycle import urlpatterns as cycle_urls
+from .estimate import urlpatterns as estimate_urls
+from .external import urlpatterns as external_urls
+from .intake import urlpatterns as intake_urls
+from .issue import urlpatterns as issue_urls
+from .module import urlpatterns as module_urls
+from .notification import urlpatterns as notification_urls
+from .page import urlpatterns as page_urls
+from .project import urlpatterns as project_urls
+from .search import urlpatterns as search_urls
+from .state import urlpatterns as state_urls
+from .user import urlpatterns as user_urls
+from .views import urlpatterns as view_urls
+from .webhook import urlpatterns as webhook_urls
+from .workspace import urlpatterns as workspace_urls
+from .timezone import urlpatterns as timezone_urls
+from .exporter import urlpatterns as exporter_urls
+
+urlpatterns = [
+ *analytic_urls,
+ *asset_urls,
+ *cycle_urls,
+ *estimate_urls,
+ *external_urls,
+ *intake_urls,
+ *issue_urls,
+ *module_urls,
+ *notification_urls,
+ *page_urls,
+ *project_urls,
+ *search_urls,
+ *state_urls,
+ *user_urls,
+ *view_urls,
+ *workspace_urls,
+ *api_urls,
+ *webhook_urls,
+ *timezone_urls,
+ *exporter_urls,
+]
diff --git a/apps/api/plane/app/urls/analytic.py b/apps/api/plane/app/urls/analytic.py
new file mode 100644
index 00000000..df6ad249
--- /dev/null
+++ b/apps/api/plane/app/urls/analytic.py
@@ -0,0 +1,86 @@
+from django.urls import path
+
+
+from plane.app.views import (
+ AnalyticsEndpoint,
+ AnalyticViewViewset,
+ SavedAnalyticEndpoint,
+ ExportAnalyticsEndpoint,
+ AdvanceAnalyticsEndpoint,
+ AdvanceAnalyticsStatsEndpoint,
+ AdvanceAnalyticsChartEndpoint,
+ DefaultAnalyticsEndpoint,
+ ProjectStatsEndpoint,
+ ProjectAdvanceAnalyticsEndpoint,
+ ProjectAdvanceAnalyticsStatsEndpoint,
+ ProjectAdvanceAnalyticsChartEndpoint,
+)
+
+
+urlpatterns = [
+ path(
+ "workspaces//analytics/",
+ AnalyticsEndpoint.as_view(),
+ name="plane-analytics",
+ ),
+ path(
+ "workspaces//analytic-view/",
+ AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
+ name="analytic-view",
+ ),
+ path(
+ "workspaces//analytic-view//",
+ AnalyticViewViewset.as_view({"get": "retrieve", "patch": "partial_update", "delete": "destroy"}),
+ name="analytic-view",
+ ),
+ path(
+ "workspaces//saved-analytic-view//",
+ SavedAnalyticEndpoint.as_view(),
+ name="saved-analytic-view",
+ ),
+ path(
+ "workspaces/