Initial commit: Plane
Some checks failed
Branch Build CE / Build Setup (push) Has been cancelled
Branch Build CE / Build-Push Admin Docker Image (push) Has been cancelled
Branch Build CE / Build-Push Web Docker Image (push) Has been cancelled
Branch Build CE / Build-Push Space Docker Image (push) Has been cancelled
Branch Build CE / Build-Push Live Collaboration Docker Image (push) Has been cancelled
Branch Build CE / Build-Push API Server Docker Image (push) Has been cancelled
Branch Build CE / Build-Push Proxy Docker Image (push) Has been cancelled
Branch Build CE / Build-Push AIO Docker Image (push) Has been cancelled
Branch Build CE / Upload Build Assets (push) Has been cancelled
Branch Build CE / Build Release (push) Has been cancelled
CodeQL / Analyze (javascript) (push) Has been cancelled
CodeQL / Analyze (python) (push) Has been cancelled
Codespell / Check for spelling errors (push) Has been cancelled
Sync Repositories / sync_changes (push) Has been cancelled

Synced from upstream: 8853637e981ed7d8a6cff32bd98e7afe20f54362
This commit is contained in:
chuan
2025-11-07 00:00:52 +08:00
commit 8ebde8aa05
4886 changed files with 462270 additions and 0 deletions

25
apps/api/.coveragerc Normal file
View File

@@ -0,0 +1,25 @@
[run]
source = plane
omit =
*/tests/*
*/migrations/*
*/settings/*
*/wsgi.py
*/asgi.py
*/urls.py
manage.py
*/admin.py
*/apps.py
[report]
exclude_lines =
pragma: no cover
def __repr__
if self.debug:
raise NotImplementedError
if __name__ == .__main__.
pass
raise ImportError
[html]
directory = htmlcov

72
apps/api/.env.example Normal file
View File

@@ -0,0 +1,72 @@
# Backend
# Debug value for api server use it as 0 for production use
DEBUG=0
CORS_ALLOWED_ORIGINS="http://localhost:3000,http://localhost:3001,http://localhost:3002,http://localhost:3100"
# Database Settings
POSTGRES_USER="plane"
POSTGRES_PASSWORD="plane"
POSTGRES_HOST="plane-db"
POSTGRES_DB="plane"
POSTGRES_PORT=5432
DATABASE_URL=postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}
# Redis Settings
REDIS_HOST="plane-redis"
REDIS_PORT="6379"
REDIS_URL="redis://${REDIS_HOST}:6379/"
# RabbitMQ Settings
RABBITMQ_HOST="plane-mq"
RABBITMQ_PORT="5672"
RABBITMQ_USER="plane"
RABBITMQ_PASSWORD="plane"
RABBITMQ_VHOST="plane"
# AWS Settings
AWS_REGION=""
AWS_ACCESS_KEY_ID="access-key"
AWS_SECRET_ACCESS_KEY="secret-key"
AWS_S3_ENDPOINT_URL="http://localhost:9000"
# Changing this requires change in the proxy config for uploads if using minio setup
AWS_S3_BUCKET_NAME="uploads"
# Maximum file upload limit
FILE_SIZE_LIMIT=5242880
# Settings related to Docker
DOCKERIZED=1 # deprecated
# set to 1 If using the pre-configured minio setup
USE_MINIO=0
# Email redirections and minio domain settings
WEB_URL="http://localhost:8000"
# Gunicorn Workers
GUNICORN_WORKERS=2
# Base URLs
ADMIN_BASE_URL="http://localhost:3001"
ADMIN_BASE_PATH="/god-mode"
SPACE_BASE_URL="http://localhost:3002"
SPACE_BASE_PATH="/spaces"
APP_BASE_URL="http://localhost:3000"
APP_BASE_PATH=""
LIVE_BASE_URL="http://localhost:3100"
LIVE_BASE_PATH="/live"
LIVE_SERVER_SECRET_KEY="secret-key"
# Hard delete files after days
HARD_DELETE_AFTER_DAYS=60
# Force HTTPS for handling SSL Termination
MINIO_ENDPOINT_SSL=0
# API key rate limit
API_KEY_RATE_LIMIT="60/minute"

58
apps/api/Dockerfile.api Normal file
View File

@@ -0,0 +1,58 @@
FROM python:3.12.10-alpine
# set environment variables
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV INSTANCE_CHANGELOG_URL=https://sites.plane.so/pages/691ef037bcfe416a902e48cb55f59891/
# Update system packages for security
RUN apk update && apk upgrade
WORKDIR /code
RUN apk add --no-cache --upgrade \
"libpq" \
"libxslt" \
"xmlsec" \
"ca-certificates" \
"openssl"
COPY requirements.txt ./
COPY requirements ./requirements
RUN apk add --no-cache libffi-dev
RUN apk add --no-cache --virtual .build-deps \
"bash~=5.2" \
"g++" \
"gcc" \
"cargo" \
"git" \
"make" \
"postgresql-dev" \
"libc-dev" \
"linux-headers" \
&& \
pip install -r requirements.txt --compile --no-cache-dir \
&& \
apk del .build-deps \
&& \
rm -rf /var/cache/apk/*
# Add in Django deps and generate Django's static files
COPY manage.py manage.py
COPY plane plane/
COPY templates templates/
COPY package.json package.json
RUN apk --no-cache add "bash~=5.2"
COPY ./bin ./bin/
RUN mkdir -p /code/plane/logs
RUN chmod +x ./bin/*
RUN chmod -R 777 /code
# Expose container port and run entry point script
EXPOSE 8000
CMD ["./bin/docker-entrypoint-api.sh"]

46
apps/api/Dockerfile.dev Normal file
View File

@@ -0,0 +1,46 @@
FROM python:3.12.5-alpine AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV INSTANCE_CHANGELOG_URL https://sites.plane.so/pages/691ef037bcfe416a902e48cb55f59891/
RUN apk --no-cache add \
"bash~=5.2" \
"libpq" \
"libxslt" \
"nodejs-current" \
"xmlsec" \
"libffi-dev" \
"bash~=5.2" \
"g++" \
"gcc" \
"cargo" \
"git" \
"make" \
"postgresql-dev" \
"libc-dev" \
"linux-headers"
WORKDIR /code
COPY requirements.txt ./requirements.txt
ADD requirements ./requirements
# Install the local development settings
RUN pip install -r requirements/local.txt --compile --no-cache-dir
COPY . .
RUN mkdir -p /code/plane/logs
RUN chmod -R +x /code/bin
RUN chmod -R 777 /code
# Expose container port and run entry point script
EXPOSE 8000
CMD [ "./bin/docker-entrypoint-api-local.sh" ]

View File

@@ -0,0 +1,34 @@
#!/bin/bash
set -e
python manage.py wait_for_db
# Wait for migrations
python manage.py wait_for_migrations
# Create the default bucket
#!/bin/bash
# Collect system information
HOSTNAME=$(hostname)
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
CPU_INFO=$(cat /proc/cpuinfo)
MEMORY_INFO=$(free -h)
DISK_INFO=$(df -h)
# Concatenate information and compute SHA-256 hash
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
# Export the variables
export MACHINE_SIGNATURE=$SIGNATURE
# Register instance
python manage.py register_instance "$MACHINE_SIGNATURE"
# Load the configuration variable
python manage.py configure_instance
# Create the default bucket
python manage.py create_bucket
# Clear Cache before starting to remove stale values
python manage.py clear_cache
python manage.py runserver 0.0.0.0:8000 --settings=plane.settings.local

View File

@@ -0,0 +1,35 @@
#!/bin/bash
set -e
python manage.py wait_for_db
# Wait for migrations
python manage.py wait_for_migrations
# Create the default bucket
#!/bin/bash
# Collect system information
HOSTNAME=$(hostname)
MAC_ADDRESS=$(ip link show | awk '/ether/ {print $2}' | head -n 1)
CPU_INFO=$(cat /proc/cpuinfo)
MEMORY_INFO=$(free -h)
DISK_INFO=$(df -h)
# Concatenate information and compute SHA-256 hash
SIGNATURE=$(echo "$HOSTNAME$MAC_ADDRESS$CPU_INFO$MEMORY_INFO$DISK_INFO" | sha256sum | awk '{print $1}')
# Export the variables
export MACHINE_SIGNATURE=$SIGNATURE
# Register instance
python manage.py register_instance "$MACHINE_SIGNATURE"
# Load the configuration variable
python manage.py configure_instance
# Create the default bucket
python manage.py create_bucket
# Clear Cache before starting to remove stale values
python manage.py clear_cache
exec gunicorn -w "$GUNICORN_WORKERS" -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:"${PORT:-8000}" --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

View File

@@ -0,0 +1,8 @@
#!/bin/bash
set -e
python manage.py wait_for_db
# Wait for migrations
python manage.py wait_for_migrations
# Run the processes
celery -A plane beat -l info

View File

@@ -0,0 +1,6 @@
#!/bin/bash
set -e
python manage.py wait_for_db $1
python manage.py migrate $1

View File

@@ -0,0 +1,8 @@
#!/bin/bash
set -e
python manage.py wait_for_db
# Wait for migrations
python manage.py wait_for_migrations
# Run the processes
celery -A plane worker -l info

15
apps/api/manage.py Normal file
View File

@@ -0,0 +1,15 @@
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)

7
apps/api/package.json Normal file
View File

@@ -0,0 +1,7 @@
{
"name": "plane-api",
"version": "1.1.0",
"license": "AGPL-3.0",
"private": true,
"description": "API server powering Plane's backend"
}

View File

@@ -0,0 +1,3 @@
from .celery import app as celery_app
__all__ = ("celery_app",)

View File

View File

@@ -0,0 +1,5 @@
from django.apps import AppConfig
class AnalyticsConfig(AppConfig):
name = "plane.analytics"

View File

View File

@@ -0,0 +1,12 @@
from django.apps import AppConfig
class ApiConfig(AppConfig):
name = "plane.api"
def ready(self):
# Import authentication extensions to register them with drf-spectacular
try:
import plane.utils.openapi.auth # noqa
except ImportError:
pass

View File

@@ -0,0 +1,47 @@
# Django imports
from django.utils import timezone
from django.db.models import Q
# Third party imports
from rest_framework import authentication
from rest_framework.exceptions import AuthenticationFailed
# Module imports
from plane.db.models import APIToken
class APIKeyAuthentication(authentication.BaseAuthentication):
"""
Authentication with an API Key
"""
www_authenticate_realm = "api"
media_type = "application/json"
auth_header_name = "X-Api-Key"
def get_api_token(self, request):
return request.headers.get(self.auth_header_name)
def validate_api_token(self, token):
try:
api_token = APIToken.objects.get(
Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
token=token,
is_active=True,
)
except APIToken.DoesNotExist:
raise AuthenticationFailed("Given API token is not valid")
# save api token last used
api_token.last_used = timezone.now()
api_token.save(update_fields=["last_used"])
return (api_token.user, api_token.token)
def authenticate(self, request):
token = self.get_api_token(request=request)
if not token:
return None
# Validate the API token
user, token = self.validate_api_token(token)
return user, token

View File

@@ -0,0 +1,87 @@
# python imports
import os
# Third party imports
from rest_framework.throttling import SimpleRateThrottle
class ApiKeyRateThrottle(SimpleRateThrottle):
scope = "api_key"
rate = os.environ.get("API_KEY_RATE_LIMIT", "60/minute")
def get_cache_key(self, request, view):
# Retrieve the API key from the request header
api_key = request.headers.get("X-Api-Key")
if not api_key:
return None # Allow the request if there's no API key
# Use the API key as part of the cache key
return f"{self.scope}:{api_key}"
def allow_request(self, request, view):
allowed = super().allow_request(request, view)
if allowed:
now = self.timer()
# Calculate the remaining limit and reset time
history = self.cache.get(self.key, [])
# Remove old histories
while history and history[-1] <= now - self.duration:
history.pop()
# Calculate the requests
num_requests = len(history)
# Check available requests
available = self.num_requests - num_requests
# Unix timestamp for when the rate limit will reset
reset_time = int(now + self.duration)
# Add headers
request.META["X-RateLimit-Remaining"] = max(0, available)
request.META["X-RateLimit-Reset"] = reset_time
return allowed
class ServiceTokenRateThrottle(SimpleRateThrottle):
scope = "service_token"
rate = "300/minute"
def get_cache_key(self, request, view):
# Retrieve the API key from the request header
api_key = request.headers.get("X-Api-Key")
if not api_key:
return None # Allow the request if there's no API key
# Use the API key as part of the cache key
return f"{self.scope}:{api_key}"
def allow_request(self, request, view):
allowed = super().allow_request(request, view)
if allowed:
now = self.timer()
# Calculate the remaining limit and reset time
history = self.cache.get(self.key, [])
# Remove old histories
while history and history[-1] <= now - self.duration:
history.pop()
# Calculate the requests
num_requests = len(history)
# Check available requests
available = self.num_requests - num_requests
# Unix timestamp for when the rate limit will reset
reset_time = int(now + self.duration)
# Add headers
request.META["X-RateLimit-Remaining"] = max(0, available)
request.META["X-RateLimit-Reset"] = reset_time
return allowed

View File

@@ -0,0 +1,57 @@
from .user import UserLiteSerializer
from .workspace import WorkspaceLiteSerializer
from .project import (
ProjectSerializer,
ProjectLiteSerializer,
ProjectCreateSerializer,
ProjectUpdateSerializer,
)
from .issue import (
IssueSerializer,
LabelCreateUpdateSerializer,
LabelSerializer,
IssueLinkSerializer,
IssueCommentSerializer,
IssueAttachmentSerializer,
IssueActivitySerializer,
IssueExpandSerializer,
IssueLiteSerializer,
IssueAttachmentUploadSerializer,
IssueSearchSerializer,
IssueCommentCreateSerializer,
IssueLinkCreateSerializer,
IssueLinkUpdateSerializer,
)
from .state import StateLiteSerializer, StateSerializer
from .cycle import (
CycleSerializer,
CycleIssueSerializer,
CycleLiteSerializer,
CycleIssueRequestSerializer,
TransferCycleIssueRequestSerializer,
CycleCreateSerializer,
CycleUpdateSerializer,
)
from .module import (
ModuleSerializer,
ModuleIssueSerializer,
ModuleLiteSerializer,
ModuleIssueRequestSerializer,
ModuleCreateSerializer,
ModuleUpdateSerializer,
)
from .intake import (
IntakeIssueSerializer,
IntakeIssueCreateSerializer,
IntakeIssueUpdateSerializer,
)
from .estimate import EstimatePointSerializer
from .asset import (
UserAssetUploadSerializer,
AssetUpdateSerializer,
GenericAssetUploadSerializer,
GenericAssetUpdateSerializer,
FileAssetSerializer,
)
from .invite import WorkspaceInviteSerializer
from .member import ProjectMemberSerializer

View File

@@ -0,0 +1,119 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from plane.db.models import FileAsset
class UserAssetUploadSerializer(serializers.Serializer):
"""
Serializer for user asset upload requests.
This serializer validates the metadata required to generate a presigned URL
for uploading user profile assets (avatar or cover image) directly to S3 storage.
Supports JPEG, PNG, WebP, JPG, and GIF image formats with size validation.
"""
name = serializers.CharField(help_text="Original filename of the asset")
type = serializers.ChoiceField(
choices=[
("image/jpeg", "JPEG"),
("image/png", "PNG"),
("image/webp", "WebP"),
("image/jpg", "JPG"),
("image/gif", "GIF"),
],
default="image/jpeg",
help_text="MIME type of the file",
style={"placeholder": "image/jpeg"},
)
size = serializers.IntegerField(help_text="File size in bytes")
entity_type = serializers.ChoiceField(
choices=[
(FileAsset.EntityTypeContext.USER_AVATAR, "User Avatar"),
(FileAsset.EntityTypeContext.USER_COVER, "User Cover"),
],
help_text="Type of user asset",
)
class AssetUpdateSerializer(serializers.Serializer):
"""
Serializer for asset status updates after successful upload completion.
Handles post-upload asset metadata updates including attribute modifications
and upload confirmation for S3-based file storage workflows.
"""
attributes = serializers.JSONField(required=False, help_text="Additional attributes to update for the asset")
class GenericAssetUploadSerializer(serializers.Serializer):
"""
Serializer for generic asset upload requests with project association.
Validates metadata for generating presigned URLs for workspace assets including
project association, external system tracking, and file validation for
document management and content storage workflows.
"""
name = serializers.CharField(help_text="Original filename of the asset")
type = serializers.CharField(required=False, help_text="MIME type of the file")
size = serializers.IntegerField(help_text="File size in bytes")
project_id = serializers.UUIDField(
required=False,
help_text="UUID of the project to associate with the asset",
style={"placeholder": "123e4567-e89b-12d3-a456-426614174000"},
)
external_id = serializers.CharField(
required=False,
help_text="External identifier for the asset (for integration tracking)",
)
external_source = serializers.CharField(
required=False, help_text="External source system (for integration tracking)"
)
class GenericAssetUpdateSerializer(serializers.Serializer):
"""
Serializer for generic asset upload confirmation and status management.
Handles post-upload status updates for workspace assets including
upload completion marking and metadata finalization.
"""
is_uploaded = serializers.BooleanField(default=True, help_text="Whether the asset has been successfully uploaded")
class FileAssetSerializer(BaseSerializer):
"""
Comprehensive file asset serializer with complete metadata and URL generation.
Provides full file asset information including storage metadata, access URLs,
relationship data, and upload status for complete asset management workflows.
"""
asset_url = serializers.CharField(read_only=True)
class Meta:
model = FileAsset
fields = "__all__"
read_only_fields = [
"id",
"created_by",
"updated_by",
"created_at",
"updated_at",
"workspace",
"project",
"issue",
"comment",
"page",
"draft_issue",
"user",
"is_deleted",
"deleted_at",
"storage_metadata",
"asset_url",
]

View File

@@ -0,0 +1,114 @@
# Third party imports
from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer):
"""
Base serializer providing common functionality for all model serializers.
Features field filtering, dynamic expansion of related fields, and standardized
primary key handling for consistent API responses across the application.
"""
id = serializers.PrimaryKeyRelatedField(read_only=True)
def __init__(self, *args, **kwargs):
# If 'fields' is provided in the arguments, remove it and store it separately.
# This is done so as not to pass this custom argument up to the superclass.
fields = kwargs.pop("fields", [])
self.expand = kwargs.pop("expand", []) or []
# Call the initialization of the superclass.
super().__init__(*args, **kwargs)
# If 'fields' was provided, filter the fields of the serializer accordingly.
if fields:
self.fields = self._filter_fields(fields=fields)
def _filter_fields(self, fields):
"""
Adjust the serializer's fields based on the provided 'fields' list.
:param fields: List or dictionary specifying which
fields to include in the serializer.
:return: The updated fields for the serializer.
"""
# Check each field_name in the provided fields.
for field_name in fields:
# If the field is a dictionary (indicating nested fields),
# loop through its keys and values.
if isinstance(field_name, dict):
for key, value in field_name.items():
# If the value of this nested field is a list,
# perform a recursive filter on it.
if isinstance(value, list):
self._filter_fields(self.fields[key], value)
# Create a list to store allowed fields.
allowed = []
for item in fields:
# If the item is a string, it directly represents a field's name.
if isinstance(item, str):
allowed.append(item)
# If the item is a dictionary, it represents a nested field.
# Add the key of this dictionary to the allowed list.
elif isinstance(item, dict):
allowed.append(list(item.keys())[0])
# Convert the current serializer's fields and the allowed fields to sets.
existing = set(self.fields)
allowed = set(allowed)
# Remove fields from the serializer that aren't in the 'allowed' list.
for field_name in existing - allowed:
self.fields.pop(field_name)
return self.fields
def to_representation(self, instance):
response = super().to_representation(instance)
# Ensure 'expand' is iterable before processing
if self.expand:
for expand in self.expand:
if expand in self.fields:
# Import all the expandable serializers
from . import (
IssueSerializer,
IssueLiteSerializer,
ProjectLiteSerializer,
StateLiteSerializer,
UserLiteSerializer,
WorkspaceLiteSerializer,
EstimatePointSerializer,
)
# Expansion mapper
expansion = {
"user": UserLiteSerializer,
"workspace": WorkspaceLiteSerializer,
"project": ProjectLiteSerializer,
"default_assignee": UserLiteSerializer,
"project_lead": UserLiteSerializer,
"state": StateLiteSerializer,
"created_by": UserLiteSerializer,
"updated_by": UserLiteSerializer,
"issue": IssueSerializer,
"actor": UserLiteSerializer,
"owned_by": UserLiteSerializer,
"members": UserLiteSerializer,
"parent": IssueLiteSerializer,
"estimate_point": EstimatePointSerializer,
}
# Check if field in expansion then expand the field
if expand in expansion:
if isinstance(response.get(expand), list):
exp_serializer = expansion[expand](getattr(instance, expand), many=True)
else:
exp_serializer = expansion[expand](getattr(instance, expand))
response[expand] = exp_serializer.data
else:
# You might need to handle this case differently
response[expand] = getattr(instance, f"{expand}_id", None)
return response

View File

@@ -0,0 +1,186 @@
# Third party imports
import pytz
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from plane.db.models import Cycle, CycleIssue, User
from plane.utils.timezone_converter import convert_to_utc
class CycleCreateSerializer(BaseSerializer):
"""
Serializer for creating cycles with timezone handling and date validation.
Manages cycle creation including project timezone conversion, date range validation,
and UTC normalization for time-bound iteration planning and sprint management.
"""
owned_by = serializers.PrimaryKeyRelatedField(
queryset=User.objects.all(),
required=False,
allow_null=True,
help_text="User who owns the cycle. If not provided, defaults to the current user.",
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
project = self.context.get("project")
if project and project.timezone:
project_timezone = pytz.timezone(project.timezone)
self.fields["start_date"].timezone = project_timezone
self.fields["end_date"].timezone = project_timezone
class Meta:
model = Cycle
fields = [
"name",
"description",
"start_date",
"end_date",
"owned_by",
"external_source",
"external_id",
"timezone",
]
read_only_fields = [
"id",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"deleted_at",
]
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date")
if data.get("start_date", None) is not None and data.get("end_date", None) is not None:
project_id = self.initial_data.get("project_id") or (
self.instance.project_id if self.instance and hasattr(self.instance, "project_id") else None
)
if not project_id:
raise serializers.ValidationError("Project ID is required")
data["start_date"] = convert_to_utc(
date=str(data.get("start_date").date()),
project_id=project_id,
is_start_date=True,
)
data["end_date"] = convert_to_utc(
date=str(data.get("end_date", None).date()),
project_id=project_id,
)
if not data.get("owned_by"):
data["owned_by"] = self.context["request"].user
return data
class CycleUpdateSerializer(CycleCreateSerializer):
"""
Serializer for updating cycles with enhanced ownership management.
Extends cycle creation with update-specific features including ownership
assignment and modification tracking for cycle lifecycle management.
"""
class Meta(CycleCreateSerializer.Meta):
model = Cycle
fields = CycleCreateSerializer.Meta.fields + [
"owned_by",
]
class CycleSerializer(BaseSerializer):
"""
Cycle serializer with comprehensive project metrics and time tracking.
Provides cycle details including work item counts by status, progress estimates,
and time-bound iteration data for project management and sprint planning.
"""
total_issues = serializers.IntegerField(read_only=True)
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
total_estimates = serializers.FloatField(read_only=True)
completed_estimates = serializers.FloatField(read_only=True)
started_estimates = serializers.FloatField(read_only=True)
class Meta:
model = Cycle
fields = "__all__"
read_only_fields = [
"id",
"created_at",
"updated_at",
"created_by",
"updated_by",
"workspace",
"project",
"owned_by",
"deleted_at",
]
class CycleIssueSerializer(BaseSerializer):
"""
Serializer for cycle-issue relationships with sub-issue counting.
Manages the association between cycles and work items, including
hierarchical issue tracking for nested work item structures.
"""
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
model = CycleIssue
fields = "__all__"
read_only_fields = ["workspace", "project", "cycle"]
class CycleLiteSerializer(BaseSerializer):
"""
Lightweight cycle serializer for minimal data transfer.
Provides essential cycle information without computed metrics,
optimized for list views and reference lookups.
"""
class Meta:
model = Cycle
fields = "__all__"
class CycleIssueRequestSerializer(serializers.Serializer):
"""
Serializer for bulk work item assignment to cycles.
Validates work item ID lists for batch operations including
cycle assignment and sprint planning workflows.
"""
issues = serializers.ListField(child=serializers.UUIDField(), help_text="List of issue IDs to add to the cycle")
class TransferCycleIssueRequestSerializer(serializers.Serializer):
"""
Serializer for transferring work items between cycles.
Handles work item migration between cycles including validation
and relationship updates for sprint reallocation workflows.
"""
new_cycle_id = serializers.UUIDField(help_text="ID of the target cycle to transfer issues to")

View File

@@ -0,0 +1,17 @@
# Module imports
from plane.db.models import EstimatePoint
from .base import BaseSerializer
class EstimatePointSerializer(BaseSerializer):
"""
Serializer for project estimation points and story point values.
Handles numeric estimation data for work item sizing and sprint planning,
providing standardized point values for project velocity calculations.
"""
class Meta:
model = EstimatePoint
fields = ["id", "value"]
read_only_fields = fields

View File

@@ -0,0 +1,117 @@
# Module imports
from .base import BaseSerializer
from .issue import IssueExpandSerializer
from plane.db.models import IntakeIssue, Issue
from rest_framework import serializers
class IssueForIntakeSerializer(BaseSerializer):
"""
Serializer for work item data within intake submissions.
Handles essential work item fields for intake processing including
content validation and priority assignment for triage workflows.
"""
class Meta:
model = Issue
fields = [
"name",
"description",
"description_html",
"priority",
]
read_only_fields = [
"id",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IntakeIssueCreateSerializer(BaseSerializer):
"""
Serializer for creating intake work items with embedded issue data.
Manages intake work item creation including nested issue creation,
status assignment, and source tracking for issue queue management.
"""
issue = IssueForIntakeSerializer(help_text="Issue data for the intake issue")
class Meta:
model = IntakeIssue
fields = ["issue"]
class IntakeIssueSerializer(BaseSerializer):
"""
Comprehensive serializer for intake work items with expanded issue details.
Provides full intake work item data including embedded issue information,
status tracking, and triage metadata for issue queue management.
"""
issue_detail = IssueExpandSerializer(read_only=True, source="issue")
inbox = serializers.UUIDField(source="intake.id", read_only=True)
class Meta:
model = IntakeIssue
fields = "__all__"
read_only_fields = [
"id",
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IntakeIssueUpdateSerializer(BaseSerializer):
"""
Serializer for updating intake work items and their associated issues.
Handles intake work item modifications including status changes, triage decisions,
and embedded issue updates for issue queue processing workflows.
"""
issue = IssueForIntakeSerializer(required=False, help_text="Issue data to update in the intake issue")
class Meta:
model = IntakeIssue
fields = [
"status",
"snoozed_till",
"duplicate_to",
"source",
"source_email",
"issue",
]
read_only_fields = [
"id",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssueDataSerializer(serializers.Serializer):
"""
Serializer for nested work item data in intake request payloads.
Validates core work item fields within intake requests including
content formatting, priority levels, and metadata for issue creation.
"""
name = serializers.CharField(max_length=255, help_text="Issue name")
description_html = serializers.CharField(required=False, allow_null=True, help_text="Issue description HTML")
priority = serializers.ChoiceField(choices=Issue.PRIORITY_CHOICES, default="none", help_text="Issue priority")

View File

@@ -0,0 +1,56 @@
# Django imports
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from rest_framework import serializers
# Module imports
from plane.db.models import WorkspaceMemberInvite
from .base import BaseSerializer
from plane.app.permissions.base import ROLE
class WorkspaceInviteSerializer(BaseSerializer):
"""
Serializer for workspace invites.
"""
class Meta:
model = WorkspaceMemberInvite
fields = [
"id",
"email",
"role",
"created_at",
"updated_at",
"responded_at",
"accepted",
]
read_only_fields = [
"id",
"workspace",
"created_at",
"updated_at",
"responded_at",
"accepted",
]
def validate_email(self, value):
try:
validate_email(value)
except ValidationError:
raise serializers.ValidationError("Invalid email address", code="INVALID_EMAIL_ADDRESS")
return value
def validate_role(self, value):
if value not in [ROLE.ADMIN.value, ROLE.MEMBER.value, ROLE.GUEST.value]:
raise serializers.ValidationError("Invalid role", code="INVALID_WORKSPACE_MEMBER_ROLE")
return value
def validate(self, data):
slug = self.context["slug"]
if (
data.get("email")
and WorkspaceMemberInvite.objects.filter(email=data["email"], workspace__slug=slug).exists()
):
raise serializers.ValidationError("Email already invited", code="EMAIL_ALREADY_INVITED")
return data

View File

@@ -0,0 +1,697 @@
# Django imports
from django.utils import timezone
from lxml import html
from django.db import IntegrityError
# Third party imports
from rest_framework import serializers
# Module imports
from plane.db.models import (
Issue,
IssueType,
IssueActivity,
IssueAssignee,
FileAsset,
IssueComment,
IssueLabel,
IssueLink,
Label,
ProjectMember,
State,
User,
EstimatePoint,
)
from plane.utils.content_validator import (
validate_html_content,
validate_binary_data,
)
from .base import BaseSerializer
from .cycle import CycleLiteSerializer, CycleSerializer
from .module import ModuleLiteSerializer, ModuleSerializer
from .state import StateLiteSerializer
from .user import UserLiteSerializer
# Django imports
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
class IssueSerializer(BaseSerializer):
"""
Comprehensive work item serializer with full relationship management.
Handles complete work item lifecycle including assignees, labels, validation,
and related model updates. Supports dynamic field expansion and HTML content
processing.
"""
assignees = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.values_list("id", flat=True)),
write_only=True,
required=False,
)
labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.values_list("id", flat=True)),
write_only=True,
required=False,
)
type_id = serializers.PrimaryKeyRelatedField(
source="type", queryset=IssueType.objects.all(), required=False, allow_null=True
)
class Meta:
model = Issue
read_only_fields = ["id", "workspace", "project", "updated_by", "updated_at"]
exclude = ["description", "description_stripped"]
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("target_date", None) is not None
and data.get("start_date", None) > data.get("target_date", None)
):
raise serializers.ValidationError("Start date cannot exceed target date")
try:
if data.get("description_html", None) is not None:
parsed = html.fromstring(data["description_html"])
parsed_str = html.tostring(parsed, encoding="unicode")
data["description_html"] = parsed_str
except Exception:
raise serializers.ValidationError("Invalid HTML passed")
# Validate description content for security
if data.get("description_html"):
is_valid, error_msg, sanitized_html = validate_html_content(data["description_html"])
if not is_valid:
raise serializers.ValidationError({"error": "html content is not valid"})
# Update the data with sanitized HTML if available
if sanitized_html is not None:
data["description_html"] = sanitized_html
if data.get("description_binary"):
is_valid, error_msg = validate_binary_data(data["description_binary"])
if not is_valid:
raise serializers.ValidationError({"description_binary": "Invalid binary data"})
# Validate assignees are from project
if data.get("assignees", []):
data["assignees"] = ProjectMember.objects.filter(
project_id=self.context.get("project_id"),
is_active=True,
role__gte=15,
member_id__in=data["assignees"],
).values_list("member_id", flat=True)
# Validate labels are from project
if data.get("labels", []):
data["labels"] = Label.objects.filter(
project_id=self.context.get("project_id"), id__in=data["labels"]
).values_list("id", flat=True)
# Check state is from the project only else raise validation error
if (
data.get("state")
and not State.objects.filter(project_id=self.context.get("project_id"), pk=data.get("state").id).exists()
):
raise serializers.ValidationError("State is not valid please pass a valid state_id")
# Check parent issue is from workspace as it can be cross workspace
if (
data.get("parent")
and not Issue.objects.filter(
workspace_id=self.context.get("workspace_id"),
project_id=self.context.get("project_id"),
pk=data.get("parent").id,
).exists()
):
raise serializers.ValidationError("Parent is not valid issue_id please pass a valid issue_id")
if (
data.get("estimate_point")
and not EstimatePoint.objects.filter(
workspace_id=self.context.get("workspace_id"),
project_id=self.context.get("project_id"),
pk=data.get("estimate_point").id,
).exists()
):
raise serializers.ValidationError("Estimate point is not valid please pass a valid estimate_point_id")
return data
def create(self, validated_data):
assignees = validated_data.pop("assignees", None)
labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
workspace_id = self.context["workspace_id"]
default_assignee_id = self.context["default_assignee_id"]
issue_type = validated_data.pop("type", None)
if not issue_type:
# Get default issue type
issue_type = IssueType.objects.filter(project_issue_types__project_id=project_id, is_default=True).first()
issue_type = issue_type
issue = Issue.objects.create(**validated_data, project_id=project_id, type=issue_type)
# Issue Audit Users
created_by_id = issue.created_by_id
updated_by_id = issue.updated_by_id
if assignees is not None and len(assignees):
try:
IssueAssignee.objects.bulk_create(
[
IssueAssignee(
assignee_id=assignee_id,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for assignee_id in assignees
],
batch_size=10,
)
except IntegrityError:
pass
else:
try:
# Then assign it to default assignee, if it is a valid assignee
if (
default_assignee_id is not None
and ProjectMember.objects.filter(
member_id=default_assignee_id,
project_id=project_id,
role__gte=15,
is_active=True,
).exists()
):
IssueAssignee.objects.create(
assignee_id=default_assignee_id,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
except IntegrityError:
pass
if labels is not None and len(labels):
try:
IssueLabel.objects.bulk_create(
[
IssueLabel(
label_id=label_id,
issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for label_id in labels
],
batch_size=10,
)
except IntegrityError:
pass
return issue
def update(self, instance, validated_data):
assignees = validated_data.pop("assignees", None)
labels = validated_data.pop("labels", None)
# Related models
project_id = instance.project_id
workspace_id = instance.workspace_id
created_by_id = instance.created_by_id
updated_by_id = instance.updated_by_id
if assignees is not None:
IssueAssignee.objects.filter(issue=instance).delete()
try:
IssueAssignee.objects.bulk_create(
[
IssueAssignee(
assignee_id=assignee_id,
issue=instance,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for assignee_id in assignees
],
batch_size=10,
ignore_conflicts=True,
)
except IntegrityError:
pass
if labels is not None:
IssueLabel.objects.filter(issue=instance).delete()
try:
IssueLabel.objects.bulk_create(
[
IssueLabel(
label_id=label_id,
issue=instance,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for label_id in labels
],
batch_size=10,
ignore_conflicts=True,
)
except IntegrityError:
pass
# Time updation occues even when other related models are updated
instance.updated_at = timezone.now()
return super().update(instance, validated_data)
def to_representation(self, instance):
data = super().to_representation(instance)
if "assignees" in self.fields:
if "assignees" in self.expand:
from .user import UserLiteSerializer
data["assignees"] = UserLiteSerializer(
User.objects.filter(
pk__in=IssueAssignee.objects.filter(issue=instance).values_list("assignee_id", flat=True)
),
many=True,
).data
else:
data["assignees"] = [
str(assignee)
for assignee in IssueAssignee.objects.filter(issue=instance).values_list("assignee_id", flat=True)
]
if "labels" in self.fields:
if "labels" in self.expand:
data["labels"] = LabelSerializer(
Label.objects.filter(
pk__in=IssueLabel.objects.filter(issue=instance).values_list("label_id", flat=True)
),
many=True,
).data
else:
data["labels"] = [
str(label) for label in IssueLabel.objects.filter(issue=instance).values_list("label_id", flat=True)
]
return data
class IssueLiteSerializer(BaseSerializer):
"""
Lightweight work item serializer for minimal data transfer.
Provides essential work item identifiers optimized for list views,
references, and performance-critical operations.
"""
class Meta:
model = Issue
fields = ["id", "sequence_id", "project_id"]
read_only_fields = fields
class LabelCreateUpdateSerializer(BaseSerializer):
"""
Serializer for creating and updating work item labels.
Manages label metadata including colors, descriptions, hierarchy,
and sorting for work item categorization and filtering.
"""
class Meta:
model = Label
fields = [
"name",
"color",
"description",
"external_source",
"external_id",
"parent",
"sort_order",
]
read_only_fields = [
"id",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"deleted_at",
]
class LabelSerializer(BaseSerializer):
"""
Full serializer for work item labels with complete metadata.
Provides comprehensive label information including hierarchical relationships,
visual properties, and organizational data for work item tagging.
"""
class Meta:
model = Label
fields = "__all__"
read_only_fields = [
"id",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"deleted_at",
]
class IssueLinkCreateSerializer(BaseSerializer):
"""
Serializer for creating work item external links with validation.
Handles URL validation, format checking, and duplicate prevention
for attaching external resources to work items.
"""
class Meta:
model = IssueLink
fields = ["title", "url", "issue_id"]
read_only_fields = [
"id",
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
def validate_url(self, value):
# Check URL format
validate_url = URLValidator()
try:
validate_url(value)
except ValidationError:
raise serializers.ValidationError("Invalid URL format.")
# Check URL scheme
if not value.startswith(("http://", "https://")):
raise serializers.ValidationError("Invalid URL scheme.")
return value
# Validation if url already exists
def create(self, validated_data):
if IssueLink.objects.filter(url=validated_data.get("url"), issue_id=validated_data.get("issue_id")).exists():
raise serializers.ValidationError({"error": "URL already exists for this Issue"})
return IssueLink.objects.create(**validated_data)
class IssueLinkUpdateSerializer(IssueLinkCreateSerializer):
"""
Serializer for updating work item external links.
Extends link creation with update-specific validation to prevent
URL conflicts and maintain link integrity during modifications.
"""
class Meta(IssueLinkCreateSerializer.Meta):
model = IssueLink
fields = IssueLinkCreateSerializer.Meta.fields + [
"issue_id",
]
read_only_fields = IssueLinkCreateSerializer.Meta.read_only_fields
def update(self, instance, validated_data):
if (
IssueLink.objects.filter(url=validated_data.get("url"), issue_id=instance.issue_id)
.exclude(pk=instance.id)
.exists()
):
raise serializers.ValidationError({"error": "URL already exists for this Issue"})
return super().update(instance, validated_data)
class IssueLinkSerializer(BaseSerializer):
"""
Full serializer for work item external links.
Provides complete link information including metadata and timestamps
for managing external resource associations with work items.
"""
class Meta:
model = IssueLink
fields = "__all__"
read_only_fields = [
"id",
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssueAttachmentSerializer(BaseSerializer):
"""
Serializer for work item file attachments.
Manages file asset associations with work items including metadata,
storage information, and access control for document management.
"""
class Meta:
model = FileAsset
fields = "__all__"
read_only_fields = [
"id",
"workspace",
"project",
"issue",
"updated_by",
"updated_at",
]
class IssueCommentCreateSerializer(BaseSerializer):
"""
Serializer for creating work item comments.
Handles comment creation with JSON and HTML content support,
access control, and external integration tracking.
"""
class Meta:
model = IssueComment
fields = [
"comment_json",
"comment_html",
"access",
"external_source",
"external_id",
]
read_only_fields = [
"id",
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
"deleted_at",
"actor",
"comment_stripped",
"edited_at",
]
class IssueCommentSerializer(BaseSerializer):
"""
Full serializer for work item comments with membership context.
Provides complete comment data including member status, content formatting,
and edit tracking for collaborative work item discussions.
"""
is_member = serializers.BooleanField(read_only=True)
class Meta:
model = IssueComment
read_only_fields = [
"id",
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
exclude = ["comment_stripped", "comment_json"]
def validate(self, data):
try:
if data.get("comment_html", None) is not None:
parsed = html.fromstring(data["comment_html"])
parsed_str = html.tostring(parsed, encoding="unicode")
data["comment_html"] = parsed_str
except Exception:
raise serializers.ValidationError("Invalid HTML passed")
return data
class IssueActivitySerializer(BaseSerializer):
"""
Serializer for work item activity and change history.
Tracks and represents work item modifications, state changes,
and user interactions for audit trails and activity feeds.
"""
class Meta:
model = IssueActivity
exclude = ["created_by", "updated_by"]
class CycleIssueSerializer(BaseSerializer):
"""
Serializer for work items within cycles.
Provides cycle context for work items including cycle metadata
and timing information for sprint and iteration management.
"""
cycle = CycleSerializer(read_only=True)
class Meta:
fields = ["cycle"]
class ModuleIssueSerializer(BaseSerializer):
"""
Serializer for work items within modules.
Provides module context for work items including module metadata
and organizational information for feature-based work grouping.
"""
module = ModuleSerializer(read_only=True)
class Meta:
fields = ["module"]
class LabelLiteSerializer(BaseSerializer):
"""
Lightweight label serializer for minimal data transfer.
Provides essential label information with visual properties,
optimized for UI display and performance-critical operations.
"""
class Meta:
model = Label
fields = ["id", "name", "color"]
class IssueExpandSerializer(BaseSerializer):
"""
Extended work item serializer with full relationship expansion.
Provides work items with expanded related data including cycles, modules,
labels, assignees, and states for comprehensive data representation.
"""
cycle = CycleLiteSerializer(source="issue_cycle.cycle", read_only=True)
module = ModuleLiteSerializer(source="issue_module.module", read_only=True)
labels = serializers.SerializerMethodField()
assignees = serializers.SerializerMethodField()
state = StateLiteSerializer(read_only=True)
def get_labels(self, obj):
expand = self.context.get("expand", [])
if "labels" in expand:
# Use prefetched data
return LabelLiteSerializer([il.label for il in obj.label_issue.all()], many=True).data
return [il.label_id for il in obj.label_issue.all()]
def get_assignees(self, obj):
expand = self.context.get("expand", [])
if "assignees" in expand:
return UserLiteSerializer([ia.assignee for ia in obj.issue_assignee.all()], many=True).data
return [ia.assignee_id for ia in obj.issue_assignee.all()]
class Meta:
model = Issue
fields = "__all__"
read_only_fields = [
"id",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssueAttachmentUploadSerializer(serializers.Serializer):
"""
Serializer for work item attachment upload request validation.
Handles file upload metadata validation including size, type, and external
integration tracking for secure work item document attachment workflows.
"""
name = serializers.CharField(help_text="Original filename of the asset")
type = serializers.CharField(required=False, help_text="MIME type of the file")
size = serializers.IntegerField(help_text="File size in bytes")
external_id = serializers.CharField(
required=False,
help_text="External identifier for the asset (for integration tracking)",
)
external_source = serializers.CharField(
required=False, help_text="External source system (for integration tracking)"
)
class IssueSearchSerializer(serializers.Serializer):
"""
Serializer for work item search result data formatting.
Provides standardized search result structure including work item identifiers,
project context, and workspace information for search API responses.
"""
id = serializers.CharField(required=True, help_text="Issue ID")
name = serializers.CharField(required=True, help_text="Issue name")
sequence_id = serializers.CharField(required=True, help_text="Issue sequence ID")
project__identifier = serializers.CharField(required=True, help_text="Project identifier")
project_id = serializers.CharField(required=True, help_text="Project ID")
workspace__slug = serializers.CharField(required=True, help_text="Workspace slug")

View File

@@ -0,0 +1,39 @@
# Third party imports
from rest_framework import serializers
# Module imports
from plane.db.models import ProjectMember, WorkspaceMember
from .base import BaseSerializer
from plane.db.models import User
from plane.utils.permissions import ROLE
class ProjectMemberSerializer(BaseSerializer):
"""
Serializer for project members.
"""
member = serializers.PrimaryKeyRelatedField(
queryset=User.objects.all(),
required=True,
)
def validate_member(self, value):
slug = self.context.get("slug")
if not slug:
raise serializers.ValidationError("Slug is required", code="INVALID_SLUG")
if not value:
raise serializers.ValidationError("Member is required", code="INVALID_MEMBER")
if not WorkspaceMember.objects.filter(workspace__slug=slug, member=value).exists():
raise serializers.ValidationError("Member not found in workspace", code="INVALID_MEMBER")
return value
def validate_role(self, value):
if value not in [ROLE.ADMIN.value, ROLE.MEMBER.value, ROLE.GUEST.value]:
raise serializers.ValidationError("Invalid role", code="INVALID_ROLE")
return value
class Meta:
model = ProjectMember
fields = ["id", "member", "role"]
read_only_fields = ["id"]

View File

@@ -0,0 +1,272 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from plane.db.models import (
User,
Module,
ModuleLink,
ModuleMember,
ModuleIssue,
ProjectMember,
)
class ModuleCreateSerializer(BaseSerializer):
"""
Serializer for creating modules with member validation and date checking.
Handles module creation including member assignment validation, date range
verification, and duplicate name prevention for feature-based
project organization setup.
"""
members = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
class Meta:
model = Module
fields = [
"name",
"description",
"start_date",
"target_date",
"status",
"lead",
"members",
"external_source",
"external_id",
]
read_only_fields = [
"id",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"deleted_at",
]
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("target_date", None) is not None
and data.get("start_date", None) > data.get("target_date", None)
):
raise serializers.ValidationError("Start date cannot exceed target date")
if data.get("members", []):
data["members"] = ProjectMember.objects.filter(
project_id=self.context.get("project_id"), member_id__in=data["members"]
).values_list("member_id", flat=True)
return data
def create(self, validated_data):
members = validated_data.pop("members", None)
project_id = self.context["project_id"]
workspace_id = self.context["workspace_id"]
module_name = validated_data.get("name")
if module_name:
# Lookup for the module name in the module table for that project
module = Module.objects.filter(name=module_name, project_id=project_id).first()
if module:
raise serializers.ValidationError(
{
"id": str(module.id),
"code": "MODULE_NAME_ALREADY_EXISTS",
"error": "Module with this name already exists",
"message": "Module with this name already exists",
}
)
module = Module.objects.create(**validated_data, project_id=project_id)
if members is not None:
ModuleMember.objects.bulk_create(
[
ModuleMember(
module=module,
member_id=str(member),
project_id=project_id,
workspace_id=workspace_id,
created_by=module.created_by,
updated_by=module.updated_by,
)
for member in members
],
batch_size=10,
ignore_conflicts=True,
)
return module
class ModuleUpdateSerializer(ModuleCreateSerializer):
"""
Serializer for updating modules with enhanced validation and member management.
Extends module creation with update-specific validations including
member reassignment, name conflict checking,
and relationship management for module modifications.
"""
class Meta(ModuleCreateSerializer.Meta):
model = Module
fields = ModuleCreateSerializer.Meta.fields + [
"members",
]
read_only_fields = ModuleCreateSerializer.Meta.read_only_fields
def update(self, instance, validated_data):
members = validated_data.pop("members", None)
module_name = validated_data.get("name")
if module_name:
# Lookup for the module name in the module table for that project
if Module.objects.filter(name=module_name, project=instance.project).exclude(id=instance.id).exists():
raise serializers.ValidationError({"error": "Module with this name already exists"})
if members is not None:
ModuleMember.objects.filter(module=instance).delete()
ModuleMember.objects.bulk_create(
[
ModuleMember(
module=instance,
member_id=str(member),
project=instance.project,
workspace=instance.project.workspace,
created_by=instance.created_by,
updated_by=instance.updated_by,
)
for member in members
],
batch_size=10,
ignore_conflicts=True,
)
return super().update(instance, validated_data)
class ModuleSerializer(BaseSerializer):
"""
Comprehensive module serializer with work item metrics and member management.
Provides complete module data including work item counts by status, member
relationships, and progress tracking for feature-based project organization.
"""
members = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
total_issues = serializers.IntegerField(read_only=True)
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
class Meta:
model = Module
fields = "__all__"
read_only_fields = [
"id",
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"deleted_at",
]
def to_representation(self, instance):
data = super().to_representation(instance)
data["members"] = [str(member.id) for member in instance.members.all()]
return data
class ModuleIssueSerializer(BaseSerializer):
"""
Serializer for module-work item relationships with sub-item counting.
Manages the association between modules and work items, including
hierarchical issue tracking for nested work item structures.
"""
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
model = ModuleIssue
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"module",
]
class ModuleLinkSerializer(BaseSerializer):
"""
Serializer for module external links with URL validation.
Handles external resource associations with modules including
URL validation and duplicate prevention for reference management.
"""
class Meta:
model = ModuleLink
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"module",
]
# Validation if url already exists
def create(self, validated_data):
if ModuleLink.objects.filter(url=validated_data.get("url"), module_id=validated_data.get("module_id")).exists():
raise serializers.ValidationError({"error": "URL already exists for this Issue"})
return ModuleLink.objects.create(**validated_data)
class ModuleLiteSerializer(BaseSerializer):
"""
Lightweight module serializer for minimal data transfer.
Provides essential module information without computed metrics,
optimized for list views and reference lookups.
"""
class Meta:
model = Module
fields = "__all__"
class ModuleIssueRequestSerializer(serializers.Serializer):
"""
Serializer for bulk work item assignment to modules.
Validates work item ID lists for batch operations including
module assignment and work item organization workflows.
"""
issues = serializers.ListField(
child=serializers.UUIDField(),
help_text="List of issue IDs to add to the module",
)

View File

@@ -0,0 +1,285 @@
# Third party imports
import random
from rest_framework import serializers
# Module imports
from plane.db.models import (
Project,
ProjectIdentifier,
WorkspaceMember,
State,
Estimate,
)
from plane.utils.content_validator import (
validate_html_content,
)
from .base import BaseSerializer
class ProjectCreateSerializer(BaseSerializer):
"""
Serializer for creating projects with workspace validation.
Handles project creation including identifier validation, member verification,
and workspace association for new project initialization.
"""
PROJECT_ICON_DEFAULT_COLORS = [
"#95999f",
"#6d7b8a",
"#5e6ad2",
"#02b5ed",
"#02b55c",
"#f2be02",
"#e57a00",
"#f38e82",
]
PROJECT_ICON_DEFAULT_ICONS = [
"home",
"apps",
"settings",
"star",
"favorite",
"done",
"check_circle",
"add_task",
"create_new_folder",
"dataset",
"terminal",
"key",
"rocket",
"public",
"quiz",
"mood",
"gavel",
"eco",
"diamond",
"forest",
"bolt",
"sync",
"cached",
"library_add",
"view_timeline",
"view_kanban",
"empty_dashboard",
"cycle",
]
class Meta:
model = Project
fields = [
"name",
"description",
"project_lead",
"default_assignee",
"identifier",
"icon_prop",
"emoji",
"cover_image",
"module_view",
"cycle_view",
"issue_views_view",
"page_view",
"intake_view",
"guest_view_all_features",
"archive_in",
"close_in",
"timezone",
"external_source",
"external_id",
"is_issue_type_enabled",
"is_time_tracking_enabled",
]
read_only_fields = [
"id",
"workspace",
"created_at",
"updated_at",
"created_by",
"updated_by",
"logo_props",
]
def validate(self, data):
if data.get("project_lead", None) is not None:
# Check if the project lead is a member of the workspace
if not WorkspaceMember.objects.filter(
workspace_id=self.context["workspace_id"],
member_id=data.get("project_lead"),
).exists():
raise serializers.ValidationError("Project lead should be a user in the workspace")
if data.get("default_assignee", None) is not None:
# Check if the default assignee is a member of the workspace
if not WorkspaceMember.objects.filter(
workspace_id=self.context["workspace_id"],
member_id=data.get("default_assignee"),
).exists():
raise serializers.ValidationError("Default assignee should be a user in the workspace")
return data
def create(self, validated_data):
identifier = validated_data.get("identifier", "").strip().upper()
if identifier == "":
raise serializers.ValidationError(detail="Project Identifier is required")
if ProjectIdentifier.objects.filter(name=identifier, workspace_id=self.context["workspace_id"]).exists():
raise serializers.ValidationError(detail="Project Identifier is taken")
if validated_data.get("logo_props", None) is None:
# Generate a random icon and color for the project icon
validated_data["logo_props"] = {
"in_use": "icon",
"icon": {
"name": random.choice(self.PROJECT_ICON_DEFAULT_ICONS),
"color": random.choice(self.PROJECT_ICON_DEFAULT_COLORS),
},
}
project = Project.objects.create(**validated_data, workspace_id=self.context["workspace_id"])
return project
class ProjectUpdateSerializer(ProjectCreateSerializer):
"""
Serializer for updating projects with enhanced state and estimation management.
Extends project creation with update-specific validations including default state
assignment, estimation configuration, and project setting modifications.
"""
class Meta(ProjectCreateSerializer.Meta):
model = Project
fields = ProjectCreateSerializer.Meta.fields + [
"default_state",
"estimate",
]
read_only_fields = ProjectCreateSerializer.Meta.read_only_fields
def update(self, instance, validated_data):
"""Update a project"""
if (
validated_data.get("default_state", None) is not None
and not State.objects.filter(project=instance, id=validated_data.get("default_state")).exists()
):
# Check if the default state is a state in the project
raise serializers.ValidationError("Default state should be a state in the project")
if (
validated_data.get("estimate", None) is not None
and not Estimate.objects.filter(project=instance, id=validated_data.get("estimate")).exists()
):
# Check if the estimate is a estimate in the project
raise serializers.ValidationError("Estimate should be a estimate in the project")
return super().update(instance, validated_data)
class ProjectSerializer(BaseSerializer):
"""
Comprehensive project serializer with metrics and member context.
Provides complete project data including member counts, cycle/module totals,
deployment status, and user-specific context for project management.
"""
total_members = serializers.IntegerField(read_only=True)
total_cycles = serializers.IntegerField(read_only=True)
total_modules = serializers.IntegerField(read_only=True)
is_member = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True)
is_deployed = serializers.BooleanField(read_only=True)
cover_image_url = serializers.CharField(read_only=True)
class Meta:
model = Project
fields = "__all__"
read_only_fields = [
"id",
"emoji",
"workspace",
"created_at",
"updated_at",
"created_by",
"updated_by",
"deleted_at",
"cover_image_url",
]
def validate(self, data):
# Check project lead should be a member of the workspace
if (
data.get("project_lead", None) is not None
and not WorkspaceMember.objects.filter(
workspace_id=self.context["workspace_id"],
member_id=data.get("project_lead"),
).exists()
):
raise serializers.ValidationError("Project lead should be a user in the workspace")
# Check default assignee should be a member of the workspace
if (
data.get("default_assignee", None) is not None
and not WorkspaceMember.objects.filter(
workspace_id=self.context["workspace_id"],
member_id=data.get("default_assignee"),
).exists()
):
raise serializers.ValidationError("Default assignee should be a user in the workspace")
# Validate description content for security
if "description_html" in data and data["description_html"]:
if isinstance(data["description_html"], dict):
is_valid, error_msg, sanitized_html = validate_html_content(str(data["description_html"]))
# Update the data with sanitized HTML if available
if sanitized_html is not None:
data["description_html"] = sanitized_html
if not is_valid:
raise serializers.ValidationError({"error": "html content is not valid"})
return data
def create(self, validated_data):
identifier = validated_data.get("identifier", "").strip().upper()
if identifier == "":
raise serializers.ValidationError(detail="Project Identifier is required")
if ProjectIdentifier.objects.filter(name=identifier, workspace_id=self.context["workspace_id"]).exists():
raise serializers.ValidationError(detail="Project Identifier is taken")
project = Project.objects.create(**validated_data, workspace_id=self.context["workspace_id"])
_ = ProjectIdentifier.objects.create(
name=project.identifier,
project=project,
workspace_id=self.context["workspace_id"],
)
return project
class ProjectLiteSerializer(BaseSerializer):
"""
Lightweight project serializer for minimal data transfer.
Provides essential project information including identifiers, visual properties,
and basic metadata optimized for list views and references.
"""
cover_image_url = serializers.CharField(read_only=True)
class Meta:
model = Project
fields = [
"id",
"identifier",
"name",
"cover_image",
"icon_prop",
"emoji",
"description",
"cover_image_url",
]
read_only_fields = fields

View File

@@ -0,0 +1,47 @@
# Module imports
from .base import BaseSerializer
from plane.db.models import State
class StateSerializer(BaseSerializer):
"""
Serializer for work item states with default state management.
Handles state creation and updates including default state validation
and automatic default state switching for workflow management.
"""
def validate(self, data):
# If the default is being provided then make all other states default False
if data.get("default", False):
State.objects.filter(project_id=self.context.get("project_id")).update(default=False)
return data
class Meta:
model = State
fields = "__all__"
read_only_fields = [
"id",
"created_by",
"updated_by",
"created_at",
"updated_at",
"workspace",
"project",
"deleted_at",
"slug",
]
class StateLiteSerializer(BaseSerializer):
"""
Lightweight state serializer for minimal data transfer.
Provides essential state information including visual properties
and grouping data optimized for UI display and filtering.
"""
class Meta:
model = State
fields = ["id", "name", "color", "group"]
read_only_fields = fields

View File

@@ -0,0 +1,34 @@
from rest_framework import serializers
# Module imports
from plane.db.models import User
from .base import BaseSerializer
class UserLiteSerializer(BaseSerializer):
"""
Lightweight user serializer for minimal data transfer.
Provides essential user information including names, avatar, and contact details
optimized for member lists, assignee displays, and user references.
"""
avatar_url = serializers.CharField(
help_text="Avatar URL",
read_only=True,
)
class Meta:
model = User
fields = [
"id",
"first_name",
"last_name",
"email",
"avatar",
"avatar_url",
"display_name",
"email",
]
read_only_fields = fields

View File

@@ -0,0 +1,17 @@
# Module imports
from plane.db.models import Workspace
from .base import BaseSerializer
class WorkspaceLiteSerializer(BaseSerializer):
"""
Lightweight workspace serializer for minimal data transfer.
Provides essential workspace identifiers including name, slug, and ID
optimized for navigation, references, and performance-critical operations.
"""
class Meta:
model = Workspace
fields = ["name", "slug", "id"]
read_only_fields = fields

View File

@@ -0,0 +1,25 @@
from .asset import urlpatterns as asset_patterns
from .cycle import urlpatterns as cycle_patterns
from .intake import urlpatterns as intake_patterns
from .label import urlpatterns as label_patterns
from .member import urlpatterns as member_patterns
from .module import urlpatterns as module_patterns
from .project import urlpatterns as project_patterns
from .state import urlpatterns as state_patterns
from .user import urlpatterns as user_patterns
from .work_item import urlpatterns as work_item_patterns
from .invite import urlpatterns as invite_patterns
urlpatterns = [
*asset_patterns,
*cycle_patterns,
*intake_patterns,
*label_patterns,
*member_patterns,
*module_patterns,
*project_patterns,
*state_patterns,
*user_patterns,
*work_item_patterns,
*invite_patterns,
]

View File

@@ -0,0 +1,40 @@
from django.urls import path
from plane.api.views import (
UserAssetEndpoint,
UserServerAssetEndpoint,
GenericAssetEndpoint,
)
urlpatterns = [
path(
"assets/user-assets/",
UserAssetEndpoint.as_view(http_method_names=["post"]),
name="user-assets",
),
path(
"assets/user-assets/<uuid:asset_id>/",
UserAssetEndpoint.as_view(http_method_names=["patch", "delete"]),
name="user-assets-detail",
),
path(
"assets/user-assets/server/",
UserServerAssetEndpoint.as_view(http_method_names=["post"]),
name="user-server-assets",
),
path(
"assets/user-assets/<uuid:asset_id>/server/",
UserServerAssetEndpoint.as_view(http_method_names=["patch", "delete"]),
name="user-server-assets-detail",
),
path(
"workspaces/<str:slug>/assets/",
GenericAssetEndpoint.as_view(http_method_names=["post"]),
name="generic-asset",
),
path(
"workspaces/<str:slug>/assets/<uuid:asset_id>/",
GenericAssetEndpoint.as_view(http_method_names=["get", "patch"]),
name="generic-asset-detail",
),
]

View File

@@ -0,0 +1,53 @@
from django.urls import path
from plane.api.views.cycle import (
CycleListCreateAPIEndpoint,
CycleDetailAPIEndpoint,
CycleIssueListCreateAPIEndpoint,
CycleIssueDetailAPIEndpoint,
TransferCycleIssueAPIEndpoint,
CycleArchiveUnarchiveAPIEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
CycleListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="cycles",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
CycleDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="cycles",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
CycleIssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="cycle-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:issue_id>/",
CycleIssueDetailAPIEndpoint.as_view(http_method_names=["get", "delete"]),
name="cycle-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
TransferCycleIssueAPIEndpoint.as_view(http_method_names=["post"]),
name="transfer-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/archive/",
CycleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post"]),
name="cycle-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
CycleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["get"]),
name="cycle-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/<uuid:cycle_id>/unarchive/",
CycleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["delete"]),
name="cycle-archive-unarchive",
),
]

View File

@@ -0,0 +1,20 @@
from django.urls import path
from plane.api.views import (
IntakeIssueListCreateAPIEndpoint,
IntakeIssueDetailAPIEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/",
IntakeIssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="intake-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/<uuid:issue_id>/",
IntakeIssueDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="intake-issue",
),
]

View File

@@ -0,0 +1,18 @@
# Django imports
from django.urls import path, include
# Third party imports
from rest_framework.routers import DefaultRouter
# Module imports
from plane.api.views import WorkspaceInvitationsViewset
# Create router with just the invitations prefix (no workspace slug)
router = DefaultRouter()
router.register(r"invitations", WorkspaceInvitationsViewset, basename="workspace-invitations")
# Wrap the router URLs with the workspace slug path
urlpatterns = [
path("workspaces/<str:slug>/", include(router.urls)),
]

View File

@@ -0,0 +1,17 @@
from django.urls import path
from plane.api.views import LabelListCreateAPIEndpoint, LabelDetailAPIEndpoint
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/",
LabelListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="label",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/labels/<uuid:pk>/",
LabelDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="label",
),
]

View File

@@ -0,0 +1,32 @@
from django.urls import path
from plane.api.views import ProjectMemberListCreateAPIEndpoint, ProjectMemberDetailAPIEndpoint, WorkspaceMemberAPIEndpoint
urlpatterns = [
# Project members
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
ProjectMemberListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="project-members",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/<uuid:pk>/",
ProjectMemberDetailAPIEndpoint.as_view(http_method_names=["patch", "delete", "get"]),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/",
ProjectMemberListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="project-members",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/<uuid:pk>/",
ProjectMemberDetailAPIEndpoint.as_view(http_method_names=["patch", "delete", "get"]),
name="project-member",
),
path(
"workspaces/<str:slug>/members/",
WorkspaceMemberAPIEndpoint.as_view(http_method_names=["get"]),
name="workspace-members",
),
]

View File

@@ -0,0 +1,47 @@
from django.urls import path
from plane.api.views import (
ModuleListCreateAPIEndpoint,
ModuleDetailAPIEndpoint,
ModuleIssueListCreateAPIEndpoint,
ModuleIssueDetailAPIEndpoint,
ModuleArchiveUnarchiveAPIEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
ModuleListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
ModuleDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="modules-detail",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/",
ModuleIssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-issues/<uuid:issue_id>/",
ModuleIssueDetailAPIEndpoint.as_view(http_method_names=["delete"]),
name="module-issues-detail",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/archive/",
ModuleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post"]),
name="module-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
ModuleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["get"]),
name="module-archive-list",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/<uuid:pk>/unarchive/",
ModuleArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["delete"]),
name="module-unarchive",
),
]

View File

@@ -0,0 +1,25 @@
from django.urls import path
from plane.api.views import (
ProjectListCreateAPIEndpoint,
ProjectDetailAPIEndpoint,
ProjectArchiveUnarchiveAPIEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/",
ProjectListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="project",
),
path(
"workspaces/<str:slug>/projects/<uuid:pk>/",
ProjectDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="project",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
ProjectArchiveUnarchiveAPIEndpoint.as_view(http_method_names=["post", "delete"]),
name="project-archive-unarchive",
),
]

View File

@@ -0,0 +1,20 @@
from drf_spectacular.views import (
SpectacularAPIView,
SpectacularRedocView,
SpectacularSwaggerView,
)
from django.urls import path
urlpatterns = [
path("schema/", SpectacularAPIView.as_view(), name="schema"),
path(
"schema/swagger-ui/",
SpectacularSwaggerView.as_view(url_name="schema"),
name="swagger-ui",
),
path(
"schema/redoc/",
SpectacularRedocView.as_view(url_name="schema"),
name="redoc",
),
]

View File

@@ -0,0 +1,19 @@
from django.urls import path
from plane.api.views import (
StateListCreateAPIEndpoint,
StateDetailAPIEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/",
StateListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="states",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:state_id>/",
StateDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="states",
),
]

View File

@@ -0,0 +1,11 @@
from django.urls import path
from plane.api.views import UserEndpoint
urlpatterns = [
path(
"users/me/",
UserEndpoint.as_view(http_method_names=["get"]),
name="users",
),
]

View File

@@ -0,0 +1,146 @@
from django.urls import path
from plane.api.views import (
IssueListCreateAPIEndpoint,
IssueDetailAPIEndpoint,
IssueLinkListCreateAPIEndpoint,
IssueLinkDetailAPIEndpoint,
IssueCommentListCreateAPIEndpoint,
IssueCommentDetailAPIEndpoint,
IssueActivityListAPIEndpoint,
IssueActivityDetailAPIEndpoint,
IssueAttachmentListCreateAPIEndpoint,
IssueAttachmentDetailAPIEndpoint,
WorkspaceIssueAPIEndpoint,
IssueSearchEndpoint,
)
# Deprecated url patterns
old_url_patterns = [
path(
"workspaces/<str:slug>/issues/search/",
IssueSearchEndpoint.as_view(http_method_names=["get"]),
name="issue-search",
),
path(
"workspaces/<str:slug>/issues/<str:project_identifier>-<str:issue_identifier>/",
WorkspaceIssueAPIEndpoint.as_view(http_method_names=["get"]),
name="issue-by-identifier",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
IssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
IssueDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/",
IssueLinkListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="link",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/links/<uuid:pk>/",
IssueLinkDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="link",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="comment",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="comment",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/",
IssueActivityListAPIEndpoint.as_view(http_method_names=["get"]),
name="activity",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/activities/<uuid:pk>/",
IssueActivityDetailAPIEndpoint.as_view(http_method_names=["get"]),
name="activity",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
IssueAttachmentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="attachment",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
IssueAttachmentDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="issue-attachment",
),
]
# New url patterns with work-items as the prefix
new_url_patterns = [
path(
"workspaces/<str:slug>/work-items/search/",
IssueSearchEndpoint.as_view(http_method_names=["get"]),
name="work-item-search",
),
path(
"workspaces/<str:slug>/work-items/<str:project_identifier>-<str:issue_identifier>/",
WorkspaceIssueAPIEndpoint.as_view(http_method_names=["get"]),
name="work-item-by-identifier",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/",
IssueListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="work-item-list",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:pk>/",
IssueDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="work-item-detail",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:issue_id>/links/",
IssueLinkListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="work-item-link-list",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:issue_id>/links/<uuid:pk>/",
IssueLinkDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="work-item-link-detail",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:issue_id>/comments/",
IssueCommentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="work-item-comment-list",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="work-item-comment-detail",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:issue_id>/activities/",
IssueActivityListAPIEndpoint.as_view(http_method_names=["get"]),
name="work-item-activity-list",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:issue_id>/activities/<uuid:pk>/",
IssueActivityDetailAPIEndpoint.as_view(http_method_names=["get"]),
name="work-item-activity-detail",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:issue_id>/attachments/",
IssueAttachmentListCreateAPIEndpoint.as_view(http_method_names=["get", "post"]),
name="work-item-attachment-list",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:issue_id>/attachments/<uuid:pk>/",
IssueAttachmentDetailAPIEndpoint.as_view(http_method_names=["get", "patch", "delete"]),
name="work-item-attachment-detail",
),
]
urlpatterns = old_url_patterns + new_url_patterns

View File

@@ -0,0 +1,57 @@
from .project import (
ProjectListCreateAPIEndpoint,
ProjectDetailAPIEndpoint,
ProjectArchiveUnarchiveAPIEndpoint,
)
from .state import (
StateListCreateAPIEndpoint,
StateDetailAPIEndpoint,
)
from .issue import (
WorkspaceIssueAPIEndpoint,
IssueListCreateAPIEndpoint,
IssueDetailAPIEndpoint,
LabelListCreateAPIEndpoint,
LabelDetailAPIEndpoint,
IssueLinkListCreateAPIEndpoint,
IssueLinkDetailAPIEndpoint,
IssueCommentListCreateAPIEndpoint,
IssueCommentDetailAPIEndpoint,
IssueActivityListAPIEndpoint,
IssueActivityDetailAPIEndpoint,
IssueAttachmentListCreateAPIEndpoint,
IssueAttachmentDetailAPIEndpoint,
IssueSearchEndpoint,
)
from .cycle import (
CycleListCreateAPIEndpoint,
CycleDetailAPIEndpoint,
CycleIssueListCreateAPIEndpoint,
CycleIssueDetailAPIEndpoint,
TransferCycleIssueAPIEndpoint,
CycleArchiveUnarchiveAPIEndpoint,
)
from .module import (
ModuleListCreateAPIEndpoint,
ModuleDetailAPIEndpoint,
ModuleIssueListCreateAPIEndpoint,
ModuleIssueDetailAPIEndpoint,
ModuleArchiveUnarchiveAPIEndpoint,
)
from .member import ProjectMemberListCreateAPIEndpoint, ProjectMemberDetailAPIEndpoint, WorkspaceMemberAPIEndpoint
from .intake import (
IntakeIssueListCreateAPIEndpoint,
IntakeIssueDetailAPIEndpoint,
)
from .asset import UserAssetEndpoint, UserServerAssetEndpoint, GenericAssetEndpoint
from .user import UserEndpoint
from .invite import WorkspaceInvitationsViewset

View File

@@ -0,0 +1,613 @@
# Python Imports
import uuid
# Django Imports
from django.utils import timezone
from django.conf import settings
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from drf_spectacular.utils import OpenApiExample, OpenApiRequest
# Module Imports
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
from plane.settings.storage import S3Storage
from plane.db.models import FileAsset, User, Workspace
from plane.api.views.base import BaseAPIView
from plane.api.serializers import (
UserAssetUploadSerializer,
AssetUpdateSerializer,
GenericAssetUploadSerializer,
GenericAssetUpdateSerializer,
)
from plane.utils.openapi import (
ASSET_ID_PARAMETER,
WORKSPACE_SLUG_PARAMETER,
PRESIGNED_URL_SUCCESS_RESPONSE,
GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE,
GENERIC_ASSET_VALIDATION_ERROR_RESPONSE,
ASSET_CONFLICT_RESPONSE,
ASSET_DOWNLOAD_SUCCESS_RESPONSE,
ASSET_DOWNLOAD_ERROR_RESPONSE,
ASSET_UPDATED_RESPONSE,
ASSET_DELETED_RESPONSE,
VALIDATION_ERROR_RESPONSE,
ASSET_NOT_FOUND_RESPONSE,
NOT_FOUND_RESPONSE,
UNAUTHORIZED_RESPONSE,
asset_docs,
)
from plane.utils.exception_logger import log_exception
class UserAssetEndpoint(BaseAPIView):
"""This endpoint is used to upload user profile images."""
def asset_delete(self, asset_id):
asset = FileAsset.objects.filter(id=asset_id).first()
if asset is None:
return
asset.is_deleted = True
asset.deleted_at = timezone.now()
asset.save(update_fields=["is_deleted", "deleted_at"])
return
def entity_asset_delete(self, entity_type, asset, request):
# User Avatar
if entity_type == FileAsset.EntityTypeContext.USER_AVATAR:
user = User.objects.get(id=asset.user_id)
user.avatar_asset_id = None
user.save()
return
# User Cover
if entity_type == FileAsset.EntityTypeContext.USER_COVER:
user = User.objects.get(id=asset.user_id)
user.cover_image_asset_id = None
user.save()
return
return
@asset_docs(
operation_id="create_user_asset_upload",
summary="Generate presigned URL for user asset upload",
description="Generate presigned URL for user asset upload",
request=OpenApiRequest(
request=UserAssetUploadSerializer,
examples=[
OpenApiExample(
"User Avatar Upload",
value={
"name": "profile.jpg",
"type": "image/jpeg",
"size": 1024000,
"entity_type": "USER_AVATAR",
},
description="Example request for uploading a user avatar",
),
OpenApiExample(
"User Cover Upload",
value={
"name": "cover.jpg",
"type": "image/jpeg",
"size": 1024000,
"entity_type": "USER_COVER",
},
description="Example request for uploading a user cover",
),
],
),
responses={
200: PRESIGNED_URL_SUCCESS_RESPONSE,
400: VALIDATION_ERROR_RESPONSE,
401: UNAUTHORIZED_RESPONSE,
},
)
def post(self, request):
"""Generate presigned URL for user asset upload.
Create a presigned URL for uploading user profile assets (avatar or cover image).
This endpoint generates the necessary credentials for direct S3 upload.
"""
# get the asset key
name = request.data.get("name")
type = request.data.get("type", "image/jpeg")
size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
entity_type = request.data.get("entity_type", False)
# Check if the file size is within the limit
size_limit = min(size, settings.FILE_SIZE_LIMIT)
# Check if the entity type is allowed
if not entity_type or entity_type not in ["USER_AVATAR", "USER_COVER"]:
return Response(
{"error": "Invalid entity type.", "status": False},
status=status.HTTP_400_BAD_REQUEST,
)
# Check if the file type is allowed
allowed_types = [
"image/jpeg",
"image/png",
"image/webp",
"image/jpg",
"image/gif",
]
if type not in allowed_types:
return Response(
{
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
"status": False,
},
status=status.HTTP_400_BAD_REQUEST,
)
# asset key
asset_key = f"{uuid.uuid4().hex}-{name}"
# Create a File Asset
asset = FileAsset.objects.create(
attributes={"name": name, "type": type, "size": size_limit},
asset=asset_key,
size=size_limit,
user=request.user,
created_by=request.user,
entity_type=entity_type,
)
# Get the presigned URL
storage = S3Storage(request=request)
# Generate a presigned URL to share an S3 object
presigned_url = storage.generate_presigned_post(object_name=asset_key, file_type=type, file_size=size_limit)
# Return the presigned URL
return Response(
{
"upload_data": presigned_url,
"asset_id": str(asset.id),
"asset_url": asset.asset_url,
},
status=status.HTTP_200_OK,
)
@asset_docs(
operation_id="update_user_asset",
summary="Mark user asset as uploaded",
description="Mark user asset as uploaded",
parameters=[ASSET_ID_PARAMETER],
request=OpenApiRequest(
request=AssetUpdateSerializer,
examples=[
OpenApiExample(
"Update Asset Attributes",
value={
"attributes": {
"name": "updated_profile.jpg",
"type": "image/jpeg",
"size": 1024000,
},
"entity_type": "USER_AVATAR",
},
description="Example request for updating asset attributes",
),
],
),
responses={
204: ASSET_UPDATED_RESPONSE,
404: NOT_FOUND_RESPONSE,
},
)
def patch(self, request, asset_id):
"""Update user asset after upload completion.
Update the asset status and attributes after the file has been uploaded to S3.
This endpoint should be called after completing the S3 upload to mark the asset as uploaded.
"""
# get the asset id
asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
# get the storage metadata
asset.is_uploaded = True
# get the storage metadata
if not asset.storage_metadata:
get_asset_object_metadata.delay(asset_id=str(asset_id))
# update the attributes
asset.attributes = request.data.get("attributes", asset.attributes)
# save the asset
asset.save(update_fields=["is_uploaded", "attributes"])
return Response(status=status.HTTP_204_NO_CONTENT)
@asset_docs(
operation_id="delete_user_asset",
summary="Delete user asset",
parameters=[ASSET_ID_PARAMETER],
responses={
204: ASSET_DELETED_RESPONSE,
404: NOT_FOUND_RESPONSE,
},
)
def delete(self, request, asset_id):
"""Delete user asset.
Delete a user profile asset (avatar or cover image) and remove its reference from the user profile.
This performs a soft delete by marking the asset as deleted and updating the user's profile.
"""
asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
asset.is_deleted = True
asset.deleted_at = timezone.now()
# get the entity and save the asset id for the request field
self.entity_asset_delete(entity_type=asset.entity_type, asset=asset, request=request)
asset.save(update_fields=["is_deleted", "deleted_at"])
return Response(status=status.HTTP_204_NO_CONTENT)
class UserServerAssetEndpoint(BaseAPIView):
"""This endpoint is used to upload user profile images."""
def asset_delete(self, asset_id):
asset = FileAsset.objects.filter(id=asset_id).first()
if asset is None:
return
asset.is_deleted = True
asset.deleted_at = timezone.now()
asset.save(update_fields=["is_deleted", "deleted_at"])
return
def entity_asset_delete(self, entity_type, asset, request):
# User Avatar
if entity_type == FileAsset.EntityTypeContext.USER_AVATAR:
user = User.objects.get(id=asset.user_id)
user.avatar_asset_id = None
user.save()
return
# User Cover
if entity_type == FileAsset.EntityTypeContext.USER_COVER:
user = User.objects.get(id=asset.user_id)
user.cover_image_asset_id = None
user.save()
return
return
@asset_docs(
operation_id="create_user_server_asset_upload",
summary="Generate presigned URL for user server asset upload",
request=UserAssetUploadSerializer,
responses={
200: PRESIGNED_URL_SUCCESS_RESPONSE,
400: VALIDATION_ERROR_RESPONSE,
},
)
def post(self, request):
"""Generate presigned URL for user server asset upload.
Create a presigned URL for uploading user profile assets
(avatar or cover image) using server credentials. This endpoint generates the
necessary credentials for direct S3 upload with server-side authentication.
"""
# get the asset key
name = request.data.get("name")
type = request.data.get("type", "image/jpeg")
size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
entity_type = request.data.get("entity_type", False)
# Check if the file size is within the limit
size_limit = min(size, settings.FILE_SIZE_LIMIT)
# Check if the entity type is allowed
if not entity_type or entity_type not in ["USER_AVATAR", "USER_COVER"]:
return Response(
{"error": "Invalid entity type.", "status": False},
status=status.HTTP_400_BAD_REQUEST,
)
# Check if the file type is allowed
allowed_types = [
"image/jpeg",
"image/png",
"image/webp",
"image/jpg",
"image/gif",
]
if type not in allowed_types:
return Response(
{
"error": "Invalid file type. Only JPEG and PNG files are allowed.",
"status": False,
},
status=status.HTTP_400_BAD_REQUEST,
)
# asset key
asset_key = f"{uuid.uuid4().hex}-{name}"
# Create a File Asset
asset = FileAsset.objects.create(
attributes={"name": name, "type": type, "size": size_limit},
asset=asset_key,
size=size_limit,
user=request.user,
created_by=request.user,
entity_type=entity_type,
)
# Get the presigned URL
storage = S3Storage(request=request, is_server=True)
# Generate a presigned URL to share an S3 object
presigned_url = storage.generate_presigned_post(object_name=asset_key, file_type=type, file_size=size_limit)
# Return the presigned URL
return Response(
{
"upload_data": presigned_url,
"asset_id": str(asset.id),
"asset_url": asset.asset_url,
},
status=status.HTTP_200_OK,
)
@asset_docs(
operation_id="update_user_server_asset",
summary="Mark user server asset as uploaded",
parameters=[ASSET_ID_PARAMETER],
request=AssetUpdateSerializer,
responses={
204: ASSET_UPDATED_RESPONSE,
404: NOT_FOUND_RESPONSE,
},
)
def patch(self, request, asset_id):
"""Update user server asset after upload completion.
Update the asset status and attributes after the file has been uploaded to S3 using server credentials.
This endpoint should be called after completing the S3 upload to mark the asset as uploaded.
"""
# get the asset id
asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
# get the storage metadata
asset.is_uploaded = True
# get the storage metadata
if not asset.storage_metadata:
get_asset_object_metadata.delay(asset_id=str(asset_id))
# update the attributes
asset.attributes = request.data.get("attributes", asset.attributes)
# save the asset
asset.save(update_fields=["is_uploaded", "attributes"])
return Response(status=status.HTTP_204_NO_CONTENT)
@asset_docs(
operation_id="delete_user_server_asset",
summary="Delete user server asset",
parameters=[ASSET_ID_PARAMETER],
responses={
204: ASSET_DELETED_RESPONSE,
404: NOT_FOUND_RESPONSE,
},
)
def delete(self, request, asset_id):
"""Delete user server asset.
Delete a user profile asset (avatar or cover image) using server credentials and
remove its reference from the user profile. This performs a soft delete by marking the
asset as deleted and updating the user's profile.
"""
asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
asset.is_deleted = True
asset.deleted_at = timezone.now()
# get the entity and save the asset id for the request field
self.entity_asset_delete(entity_type=asset.entity_type, asset=asset, request=request)
asset.save(update_fields=["is_deleted", "deleted_at"])
return Response(status=status.HTTP_204_NO_CONTENT)
class GenericAssetEndpoint(BaseAPIView):
"""This endpoint is used to upload generic assets that can be later bound to entities."""
use_read_replica = True
@asset_docs(
operation_id="get_generic_asset",
summary="Get presigned URL for asset download",
description="Get presigned URL for asset download",
parameters=[WORKSPACE_SLUG_PARAMETER],
responses={
200: ASSET_DOWNLOAD_SUCCESS_RESPONSE,
400: ASSET_DOWNLOAD_ERROR_RESPONSE,
404: ASSET_NOT_FOUND_RESPONSE,
},
)
def get(self, request, slug, asset_id):
"""Get presigned URL for asset download.
Generate a presigned URL for downloading a generic asset.
The asset must be uploaded and associated with the specified workspace.
"""
try:
# Get the workspace
workspace = Workspace.objects.get(slug=slug)
# Get the asset
asset = FileAsset.objects.get(id=asset_id, workspace_id=workspace.id, is_deleted=False)
# Check if the asset exists and is uploaded
if not asset.is_uploaded:
return Response(
{"error": "Asset not yet uploaded"},
status=status.HTTP_400_BAD_REQUEST,
)
# Generate presigned URL for GET
storage = S3Storage(request=request, is_server=True)
presigned_url = storage.generate_presigned_url(
object_name=asset.asset.name, filename=asset.attributes.get("name")
)
return Response(
{
"asset_id": str(asset.id),
"asset_url": presigned_url,
"asset_name": asset.attributes.get("name", ""),
"asset_type": asset.attributes.get("type", ""),
},
status=status.HTTP_200_OK,
)
except Workspace.DoesNotExist:
return Response({"error": "Workspace not found"}, status=status.HTTP_404_NOT_FOUND)
except FileAsset.DoesNotExist:
return Response({"error": "Asset not found"}, status=status.HTTP_404_NOT_FOUND)
except Exception as e:
log_exception(e)
return Response(
{"error": "Internal server error"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
@asset_docs(
operation_id="create_generic_asset_upload",
summary="Generate presigned URL for generic asset upload",
description="Generate presigned URL for generic asset upload",
parameters=[WORKSPACE_SLUG_PARAMETER],
request=OpenApiRequest(
request=GenericAssetUploadSerializer,
examples=[
OpenApiExample(
"GenericAssetUploadSerializer",
value={
"name": "image.jpg",
"type": "image/jpeg",
"size": 1024000,
"project_id": "123e4567-e89b-12d3-a456-426614174000",
"external_id": "1234567890",
"external_source": "github",
},
description="Example request for uploading a generic asset",
),
],
),
responses={
200: GENERIC_ASSET_UPLOAD_SUCCESS_RESPONSE,
400: GENERIC_ASSET_VALIDATION_ERROR_RESPONSE,
404: NOT_FOUND_RESPONSE,
409: ASSET_CONFLICT_RESPONSE,
},
)
def post(self, request, slug):
"""Generate presigned URL for generic asset upload.
Create a presigned URL for uploading generic assets that can be bound to entities like work items.
Supports various file types and includes external source tracking for integrations.
"""
name = request.data.get("name")
type = request.data.get("type")
size = int(request.data.get("size", settings.FILE_SIZE_LIMIT))
project_id = request.data.get("project_id")
external_id = request.data.get("external_id")
external_source = request.data.get("external_source")
# Check if the request is valid
if not name or not size:
return Response(
{"error": "Name and size are required fields.", "status": False},
status=status.HTTP_400_BAD_REQUEST,
)
# Check if the file size is within the limit
size_limit = min(size, settings.FILE_SIZE_LIMIT)
# Check if the file type is allowed
if not type or type not in settings.ATTACHMENT_MIME_TYPES:
return Response(
{"error": "Invalid file type.", "status": False},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the workspace
workspace = Workspace.objects.get(slug=slug)
# asset key
asset_key = f"{workspace.id}/{uuid.uuid4().hex}-{name}"
# Check for existing asset with same external details if provided
if external_id and external_source:
existing_asset = FileAsset.objects.filter(
workspace__slug=slug,
external_source=external_source,
external_id=external_id,
is_deleted=False,
).first()
if existing_asset:
return Response(
{
"message": "Asset with same external id and source already exists",
"asset_id": str(existing_asset.id),
"asset_url": existing_asset.asset_url,
},
status=status.HTTP_409_CONFLICT,
)
# Create a File Asset
asset = FileAsset.objects.create(
attributes={"name": name, "type": type, "size": size_limit},
asset=asset_key,
size=size_limit,
workspace_id=workspace.id,
project_id=project_id,
created_by=request.user,
external_id=external_id,
external_source=external_source,
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT, # Using ISSUE_ATTACHMENT since we'll bind it to issues # noqa: E501
)
# Get the presigned URL
storage = S3Storage(request=request, is_server=True)
presigned_url = storage.generate_presigned_post(object_name=asset_key, file_type=type, file_size=size_limit)
return Response(
{
"upload_data": presigned_url,
"asset_id": str(asset.id),
"asset_url": asset.asset_url,
},
status=status.HTTP_200_OK,
)
@asset_docs(
operation_id="update_generic_asset",
summary="Update generic asset after upload completion",
description="Update generic asset after upload completion",
parameters=[WORKSPACE_SLUG_PARAMETER, ASSET_ID_PARAMETER],
request=OpenApiRequest(
request=GenericAssetUpdateSerializer,
examples=[
OpenApiExample(
"GenericAssetUpdateSerializer",
value={"is_uploaded": True},
description="Example request for updating a generic asset",
)
],
),
responses={
204: ASSET_UPDATED_RESPONSE,
404: ASSET_NOT_FOUND_RESPONSE,
},
)
def patch(self, request, slug, asset_id):
"""Update generic asset after upload completion.
Update the asset status after the file has been uploaded to S3.
This endpoint should be called after completing the S3 upload to mark the asset as uploaded
and trigger metadata extraction.
"""
try:
asset = FileAsset.objects.get(id=asset_id, workspace__slug=slug, is_deleted=False)
# Update is_uploaded status
asset.is_uploaded = request.data.get("is_uploaded", asset.is_uploaded)
# Update storage metadata if not present
if not asset.storage_metadata:
get_asset_object_metadata.delay(asset_id=str(asset_id))
asset.save(update_fields=["is_uploaded"])
return Response(status=status.HTTP_204_NO_CONTENT)
except FileAsset.DoesNotExist:
return Response({"error": "Asset not found"}, status=status.HTTP_404_NOT_FOUND)

View File

@@ -0,0 +1,277 @@
# Python imports
import zoneinfo
import logging
# Django imports
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import IntegrityError
from django.urls import resolve
from django.utils import timezone
# Third party imports
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.filters import SearchFilter
from rest_framework.viewsets import ModelViewSet
from rest_framework.exceptions import APIException
from rest_framework.generics import GenericAPIView
# Module imports
from plane.db.models.api import APIToken
from plane.api.middleware.api_authentication import APIKeyAuthentication
from plane.api.rate_limit import ApiKeyRateThrottle, ServiceTokenRateThrottle
from plane.utils.exception_logger import log_exception
from plane.utils.paginator import BasePaginator
from plane.utils.core.mixins import ReadReplicaControlMixin
logger = logging.getLogger("plane.api")
class TimezoneMixin:
"""
This enables timezone conversion according
to the user set timezone
"""
def initial(self, request, *args, **kwargs):
super().initial(request, *args, **kwargs)
if request.user.is_authenticated:
timezone.activate(zoneinfo.ZoneInfo(request.user.user_timezone))
else:
timezone.deactivate()
class BaseAPIView(TimezoneMixin, GenericAPIView, ReadReplicaControlMixin, BasePaginator):
authentication_classes = [APIKeyAuthentication]
permission_classes = [IsAuthenticated]
use_read_replica = False
def filter_queryset(self, queryset):
for backend in list(self.filter_backends):
queryset = backend().filter_queryset(self.request, queryset, self)
return queryset
def get_throttles(self):
throttle_classes = []
api_key = self.request.headers.get("X-Api-Key")
if api_key:
service_token = APIToken.objects.filter(token=api_key, is_service=True).first()
if service_token:
throttle_classes.append(ServiceTokenRateThrottle())
return throttle_classes
throttle_classes.append(ApiKeyRateThrottle())
return throttle_classes
def handle_exception(self, exc):
"""
Handle any exception that occurs, by returning an appropriate response,
or re-raising the error.
"""
try:
response = super().handle_exception(exc)
return response
except Exception as e:
if isinstance(e, IntegrityError):
return Response(
{"error": "The payload is not valid"},
status=status.HTTP_400_BAD_REQUEST,
)
if isinstance(e, ValidationError):
return Response(
{"error": "Please provide valid detail"},
status=status.HTTP_400_BAD_REQUEST,
)
if isinstance(e, ObjectDoesNotExist):
return Response(
{"error": "The requested resource does not exist."},
status=status.HTTP_404_NOT_FOUND,
)
if isinstance(e, KeyError):
return Response(
{"error": "The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
def dispatch(self, request, *args, **kwargs):
try:
response = super().dispatch(request, *args, **kwargs)
if settings.DEBUG:
from django.db import connection
print(f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}")
return response
except Exception as exc:
response = self.handle_exception(exc)
return exc
def finalize_response(self, request, response, *args, **kwargs):
# Call super to get the default response
response = super().finalize_response(request, response, *args, **kwargs)
# Add custom headers if they exist in the request META
ratelimit_remaining = request.META.get("X-RateLimit-Remaining")
if ratelimit_remaining is not None:
response["X-RateLimit-Remaining"] = ratelimit_remaining
ratelimit_reset = request.META.get("X-RateLimit-Reset")
if ratelimit_reset is not None:
response["X-RateLimit-Reset"] = ratelimit_reset
return response
@property
def workspace_slug(self):
return self.kwargs.get("slug", None)
@property
def project_id(self):
project_id = self.kwargs.get("project_id", None)
if project_id:
return project_id
if resolve(self.request.path_info).url_name == "project":
return self.kwargs.get("pk", None)
@property
def fields(self):
fields = [field for field in self.request.GET.get("fields", "").split(",") if field]
return fields if fields else None
@property
def expand(self):
expand = [expand for expand in self.request.GET.get("expand", "").split(",") if expand]
return expand if expand else None
class BaseViewSet(TimezoneMixin, ReadReplicaControlMixin, ModelViewSet, BasePaginator):
model = None
authentication_classes = [APIKeyAuthentication]
permission_classes = [
IsAuthenticated,
]
use_read_replica = False
def get_queryset(self):
try:
return self.model.objects.all()
except Exception as e:
log_exception(e)
raise APIException("Please check the view", status.HTTP_400_BAD_REQUEST)
def handle_exception(self, exc):
"""
Handle any exception that occurs, by returning an appropriate response,
or re-raising the error.
"""
try:
response = super().handle_exception(exc)
return response
except Exception as e:
if isinstance(e, IntegrityError):
log_exception(e)
return Response(
{"error": "The payload is not valid"},
status=status.HTTP_400_BAD_REQUEST,
)
if isinstance(e, ValidationError):
logger.warning(
"Validation Error",
extra={
"error_code": "VALIDATION_ERROR",
"error_message": str(e),
},
)
return Response(
{"error": "Please provide valid detail"},
status=status.HTTP_400_BAD_REQUEST,
)
if isinstance(e, ObjectDoesNotExist):
logger.warning(
"Object Does Not Exist",
extra={
"error_code": "OBJECT_DOES_NOT_EXIST",
"error_message": str(e),
},
)
return Response(
{"error": "The required object does not exist."},
status=status.HTTP_404_NOT_FOUND,
)
if isinstance(e, KeyError):
logger.error(
"Key Error",
extra={
"error_code": "KEY_ERROR",
"error_message": str(e),
},
)
return Response(
{"error": "The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
)
def dispatch(self, request, *args, **kwargs):
try:
response = super().dispatch(request, *args, **kwargs)
if settings.DEBUG:
from django.db import connection
print(f"{request.method} - {request.get_full_path()} of Queries: {len(connection.queries)}")
return response
except Exception as exc:
response = self.handle_exception(exc)
return response
@property
def workspace_slug(self):
return self.kwargs.get("slug", None)
@property
def project_id(self):
project_id = self.kwargs.get("project_id", None)
if project_id:
return project_id
if resolve(self.request.path_info).url_name == "project":
return self.kwargs.get("pk", None)
@property
def fields(self):
fields = [field for field in self.request.GET.get("fields", "").split(",") if field]
return fields if fields else None
@property
def expand(self):
expand = [expand for expand in self.request.GET.get("expand", "").split(",") if expand]
return expand if expand else None

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,483 @@
# Python imports
import json
# Django imports
from django.core.serializers.json import DjangoJSONEncoder
from django.utils import timezone
from django.db.models import Q, Value, UUIDField
from django.db.models.functions import Coalesce
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from drf_spectacular.utils import OpenApiResponse, OpenApiRequest
# Module imports
from plane.api.serializers import (
IntakeIssueSerializer,
IssueSerializer,
IntakeIssueCreateSerializer,
IntakeIssueUpdateSerializer,
)
from plane.app.permissions import ProjectLitePermission
from plane.bgtasks.issue_activities_task import issue_activity
from plane.db.models import Intake, IntakeIssue, Issue, Project, ProjectMember, State
from plane.utils.host import base_host
from .base import BaseAPIView
from plane.db.models.intake import SourceType
from plane.utils.openapi import (
intake_docs,
WORKSPACE_SLUG_PARAMETER,
PROJECT_ID_PARAMETER,
ISSUE_ID_PARAMETER,
CURSOR_PARAMETER,
PER_PAGE_PARAMETER,
FIELDS_PARAMETER,
EXPAND_PARAMETER,
create_paginated_response,
# Request Examples
INTAKE_ISSUE_CREATE_EXAMPLE,
INTAKE_ISSUE_UPDATE_EXAMPLE,
# Response Examples
INTAKE_ISSUE_EXAMPLE,
INVALID_REQUEST_RESPONSE,
DELETED_RESPONSE,
)
class IntakeIssueListCreateAPIEndpoint(BaseAPIView):
"""Intake Work Item List and Create Endpoint"""
serializer_class = IntakeIssueSerializer
model = Intake
permission_classes = [ProjectLitePermission]
use_read_replica = True
def get_queryset(self):
intake = Intake.objects.filter(
workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"),
).first()
project = Project.objects.get(workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id"))
if intake is None or not project.intake_view:
return IntakeIssue.objects.none()
return (
IntakeIssue.objects.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"),
intake_id=intake.id,
)
.select_related("issue", "workspace", "project")
.order_by(self.kwargs.get("order_by", "-created_at"))
)
@intake_docs(
operation_id="get_intake_work_items_list",
summary="List intake work items",
description="Retrieve all work items in the project's intake queue. Returns paginated results when listing all intake work items.", # noqa: E501
parameters=[
WORKSPACE_SLUG_PARAMETER,
PROJECT_ID_PARAMETER,
CURSOR_PARAMETER,
PER_PAGE_PARAMETER,
FIELDS_PARAMETER,
EXPAND_PARAMETER,
],
responses={
200: create_paginated_response(
IntakeIssueSerializer,
"PaginatedIntakeIssueResponse",
"Paginated list of intake work items",
"Paginated Intake Work Items",
),
},
)
def get(self, request, slug, project_id):
"""List intake work items
Retrieve all work items in the project's intake queue.
Returns paginated results when listing all intake work items.
"""
issue_queryset = self.get_queryset()
return self.paginate(
request=request,
queryset=(issue_queryset),
on_results=lambda intake_issues: IntakeIssueSerializer(
intake_issues, many=True, fields=self.fields, expand=self.expand
).data,
)
@intake_docs(
operation_id="create_intake_work_item",
summary="Create intake work item",
description="Submit a new work item to the project's intake queue for review and triage. Automatically creates the work item with default triage state and tracks activity.", # noqa: E501
parameters=[
WORKSPACE_SLUG_PARAMETER,
PROJECT_ID_PARAMETER,
],
request=OpenApiRequest(
request=IntakeIssueCreateSerializer,
examples=[INTAKE_ISSUE_CREATE_EXAMPLE],
),
responses={
201: OpenApiResponse(
description="Intake work item created",
response=IntakeIssueSerializer,
examples=[INTAKE_ISSUE_EXAMPLE],
),
400: INVALID_REQUEST_RESPONSE,
},
)
def post(self, request, slug, project_id):
"""Create intake work item
Submit a new work item to the project's intake queue for review and triage.
Automatically creates the work item with default triage state and tracks activity.
"""
if not request.data.get("issue", {}).get("name", False):
return Response({"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST)
intake = Intake.objects.filter(workspace__slug=slug, project_id=project_id).first()
project = Project.objects.get(workspace__slug=slug, pk=project_id)
# Intake view
if intake is None and not project.intake_view:
return Response(
{"error": "Intake is not enabled for this project enable it through the project's api"},
status=status.HTTP_400_BAD_REQUEST,
)
# Check for valid priority
if request.data.get("issue", {}).get("priority", "none") not in [
"low",
"medium",
"high",
"urgent",
"none",
]:
return Response({"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST)
# create an issue
issue = Issue.objects.create(
name=request.data.get("issue", {}).get("name"),
description=request.data.get("issue", {}).get("description", {}),
description_html=request.data.get("issue", {}).get("description_html", "<p></p>"),
priority=request.data.get("issue", {}).get("priority", "none"),
project_id=project_id,
)
# create an intake issue
intake_issue = IntakeIssue.objects.create(
intake_id=intake.id,
project_id=project_id,
issue=issue,
source=SourceType.IN_APP,
)
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
intake=str(intake_issue.id),
)
serializer = IntakeIssueSerializer(intake_issue)
return Response(serializer.data, status=status.HTTP_201_CREATED)
class IntakeIssueDetailAPIEndpoint(BaseAPIView):
"""Intake Issue API Endpoint"""
permission_classes = [ProjectLitePermission]
serializer_class = IntakeIssueSerializer
model = IntakeIssue
use_read_replica = True
filterset_fields = ["status"]
def get_queryset(self):
intake = Intake.objects.filter(
workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"),
).first()
project = Project.objects.get(workspace__slug=self.kwargs.get("slug"), pk=self.kwargs.get("project_id"))
if intake is None or not project.intake_view:
return IntakeIssue.objects.none()
return (
IntakeIssue.objects.filter(
Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True),
workspace__slug=self.kwargs.get("slug"),
project_id=self.kwargs.get("project_id"),
intake_id=intake.id,
)
.select_related("issue", "workspace", "project")
.order_by(self.kwargs.get("order_by", "-created_at"))
)
@intake_docs(
operation_id="retrieve_intake_work_item",
summary="Retrieve intake work item",
description="Retrieve details of a specific intake work item.",
parameters=[
WORKSPACE_SLUG_PARAMETER,
PROJECT_ID_PARAMETER,
ISSUE_ID_PARAMETER,
],
responses={
200: OpenApiResponse(
description="Intake work item",
response=IntakeIssueSerializer,
examples=[INTAKE_ISSUE_EXAMPLE],
),
},
)
def get(self, request, slug, project_id, issue_id):
"""Retrieve intake work item
Retrieve details of a specific intake work item.
"""
intake_issue_queryset = self.get_queryset().get(issue_id=issue_id)
intake_issue_data = IntakeIssueSerializer(intake_issue_queryset, fields=self.fields, expand=self.expand).data
return Response(intake_issue_data, status=status.HTTP_200_OK)
@intake_docs(
operation_id="update_intake_work_item",
summary="Update intake work item",
description="Modify an existing intake work item's properties or status for triage processing. Supports status changes like accept, reject, or mark as duplicate.", # noqa: E501
parameters=[
WORKSPACE_SLUG_PARAMETER,
PROJECT_ID_PARAMETER,
ISSUE_ID_PARAMETER,
],
request=OpenApiRequest(
request=IntakeIssueUpdateSerializer,
examples=[INTAKE_ISSUE_UPDATE_EXAMPLE],
),
responses={
200: OpenApiResponse(
description="Intake work item updated",
response=IntakeIssueSerializer,
examples=[INTAKE_ISSUE_EXAMPLE],
),
400: INVALID_REQUEST_RESPONSE,
},
)
def patch(self, request, slug, project_id, issue_id):
"""Update intake work item
Modify an existing intake work item's properties or status for triage processing.
Supports status changes like accept, reject, or mark as duplicate.
"""
intake = Intake.objects.filter(workspace__slug=slug, project_id=project_id).first()
project = Project.objects.get(workspace__slug=slug, pk=project_id)
# Intake view
if intake is None and not project.intake_view:
return Response(
{"error": "Intake is not enabled for this project enable it through the project's api"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the intake issue
intake_issue = IntakeIssue.objects.get(
issue_id=issue_id,
workspace__slug=slug,
project_id=project_id,
intake_id=intake.id,
)
# Get the project member
project_member = ProjectMember.objects.get(
workspace__slug=slug,
project_id=project_id,
member=request.user,
is_active=True,
)
# Only project members admins and created_by users can access this endpoint
if project_member.role <= 5 and str(intake_issue.created_by_id) != str(request.user.id):
return Response(
{"error": "You cannot edit intake work items"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get issue data
issue_data = request.data.pop("issue", False)
if bool(issue_data):
issue = Issue.objects.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=Q(~Q(labels__id__isnull=True) & Q(label_issue__deleted_at__isnull=True)),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=Q(
~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True)
& Q(issue_assignee__deleted_at__isnull=True)
),
),
Value([], output_field=ArrayField(UUIDField())),
),
).get(pk=issue_id, workspace__slug=slug, project_id=project_id)
# Only allow guests to edit name and description
if project_member.role <= 5:
issue_data = {
"name": issue_data.get("name", issue.name),
"description_html": issue_data.get("description_html", issue.description_html),
"description": issue_data.get("description", issue.description),
}
issue_serializer = IssueSerializer(issue, data=issue_data, partial=True)
if issue_serializer.is_valid():
current_instance = issue
# Log all the updates
requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder)
if issue is not None:
issue_activity.delay(
type="issue.activity.updated",
requested_data=requested_data,
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=json.dumps(
IssueSerializer(current_instance).data,
cls=DjangoJSONEncoder,
),
epoch=int(timezone.now().timestamp()),
intake=(intake_issue.id),
)
issue_serializer.save()
else:
return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# Only project admins and members can edit intake issue attributes
if project_member.role > 15:
serializer = IntakeIssueUpdateSerializer(intake_issue, data=request.data, partial=True)
current_instance = json.dumps(IntakeIssueSerializer(intake_issue).data, cls=DjangoJSONEncoder)
if serializer.is_valid():
serializer.save()
# Update the issue state if the issue is rejected or marked as duplicate
if serializer.data["status"] in [-1, 2]:
issue = Issue.objects.get(pk=issue_id, workspace__slug=slug, project_id=project_id)
state = State.objects.filter(group="cancelled", workspace__slug=slug, project_id=project_id).first()
if state is not None:
issue.state = state
issue.save()
# Update the issue state if it is accepted
if serializer.data["status"] in [1]:
issue = Issue.objects.get(pk=issue_id, workspace__slug=slug, project_id=project_id)
# Update the issue state only if it is in triage state
if issue.state.is_triage:
# Move to default state
state = State.objects.filter(workspace__slug=slug, project_id=project_id, default=True).first()
if state is not None:
issue.state = state
issue.save()
# create a activity for status change
issue_activity.delay(
type="intake.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=False,
origin=base_host(request=request, is_app=True),
intake=str(intake_issue.id),
)
serializer = IntakeIssueSerializer(intake_issue)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
else:
return Response(IntakeIssueSerializer(intake_issue).data, status=status.HTTP_200_OK)
@intake_docs(
operation_id="delete_intake_work_item",
summary="Delete intake work item",
description="Permanently remove an intake work item from the triage queue. Also deletes the underlying work item if it hasn't been accepted yet.", # noqa: E501
parameters=[
WORKSPACE_SLUG_PARAMETER,
PROJECT_ID_PARAMETER,
ISSUE_ID_PARAMETER,
],
responses={
204: DELETED_RESPONSE,
},
)
def delete(self, request, slug, project_id, issue_id):
"""Delete intake work item
Permanently remove an intake work item from the triage queue.
Also deletes the underlying work item if it hasn't been accepted yet.
"""
intake = Intake.objects.filter(workspace__slug=slug, project_id=project_id).first()
project = Project.objects.get(workspace__slug=slug, pk=project_id)
# Intake view
if intake is None and not project.intake_view:
return Response(
{"error": "Intake is not enabled for this project enable it through the project's api"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the intake issue
intake_issue = IntakeIssue.objects.get(
issue_id=issue_id,
workspace__slug=slug,
project_id=project_id,
intake_id=intake.id,
)
# Check the issue status
if intake_issue.status in [-2, -1, 0, 2]:
# Delete the issue also
issue = Issue.objects.filter(workspace__slug=slug, project_id=project_id, pk=issue_id).first()
if issue.created_by_id != request.user.id and (
not ProjectMember.objects.filter(
workspace__slug=slug,
member=request.user,
role=20,
project_id=project_id,
is_active=True,
).exists()
):
return Response(
{"error": "Only admin or creator can delete the work item"},
status=status.HTTP_403_FORBIDDEN,
)
issue.delete()
intake_issue.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -0,0 +1,150 @@
# Third party imports
from rest_framework.response import Response
from rest_framework import status
from drf_spectacular.utils import (
extend_schema,
OpenApiResponse,
OpenApiRequest,
OpenApiParameter,
OpenApiTypes,
)
# Module imports
from plane.api.views.base import BaseViewSet
from plane.db.models import WorkspaceMemberInvite, Workspace
from plane.api.serializers import WorkspaceInviteSerializer
from plane.utils.permissions import WorkspaceOwnerPermission
from plane.utils.openapi.parameters import WORKSPACE_SLUG_PARAMETER
class WorkspaceInvitationsViewset(BaseViewSet):
"""
Endpoint for creating, listing and deleting workspace invites.
"""
serializer_class = WorkspaceInviteSerializer
model = WorkspaceMemberInvite
permission_classes = [
WorkspaceOwnerPermission,
]
def get_queryset(self):
return self.filter_queryset(super().get_queryset().filter(workspace__slug=self.kwargs.get("slug")))
def get_object(self):
return self.get_queryset().get(pk=self.kwargs.get("pk"))
@extend_schema(
summary="List workspace invites",
description="List all workspace invites for a workspace",
responses={
200: OpenApiResponse(
description="Workspace invites",
response=WorkspaceInviteSerializer(many=True),
)
},
parameters=[
WORKSPACE_SLUG_PARAMETER,
],
)
def list(self, request, slug):
workspace_member_invites = self.get_queryset()
serializer = WorkspaceInviteSerializer(workspace_member_invites, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema(
summary="Get workspace invite",
description="Get a workspace invite by ID",
responses={200: OpenApiResponse(description="Workspace invite", response=WorkspaceInviteSerializer)},
parameters=[
WORKSPACE_SLUG_PARAMETER,
OpenApiParameter(
name="pk",
description="Workspace invite ID",
required=True,
type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH,
),
],
)
def retrieve(self, request, slug, pk):
workspace_member_invite = self.get_object()
serializer = WorkspaceInviteSerializer(workspace_member_invite)
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema(
summary="Create workspace invite",
description="Create a workspace invite",
responses={201: OpenApiResponse(description="Workspace invite", response=WorkspaceInviteSerializer)},
request=OpenApiRequest(request=WorkspaceInviteSerializer),
parameters=[
WORKSPACE_SLUG_PARAMETER,
],
)
def create(self, request, slug):
workspace = Workspace.objects.get(slug=slug)
serializer = WorkspaceInviteSerializer(data=request.data, context={"slug": slug})
serializer.is_valid(raise_exception=True)
serializer.save(workspace=workspace, created_by=request.user)
return Response(serializer.data, status=status.HTTP_201_CREATED)
@extend_schema(
summary="Update workspace invite",
description="Update a workspace invite",
responses={200: OpenApiResponse(description="Workspace invite", response=WorkspaceInviteSerializer)},
request=OpenApiRequest(request=WorkspaceInviteSerializer),
parameters=[
WORKSPACE_SLUG_PARAMETER,
OpenApiParameter(
name="pk",
description="Workspace invite ID",
required=True,
type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH,
),
],
)
def partial_update(self, request, slug, pk):
workspace_member_invite = self.get_object()
if request.data.get("email"):
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"error": "Email cannot be updated after invite is created.", "code": "EMAIL_CANNOT_BE_UPDATED"},
)
serializer = WorkspaceInviteSerializer(
workspace_member_invite, data=request.data, partial=True, context={"slug": slug}
)
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema(
summary="Delete workspace invite",
description="Delete a workspace invite",
responses={204: OpenApiResponse(description="Workspace invite deleted")},
parameters=[
WORKSPACE_SLUG_PARAMETER,
OpenApiParameter(
name="pk",
description="Workspace invite ID",
required=True,
type=OpenApiTypes.UUID,
location=OpenApiParameter.PATH,
),
],
)
def destroy(self, request, slug, pk):
workspace_member_invite = self.get_object()
if workspace_member_invite.accepted:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"error": "Invite already accepted", "code": "INVITE_ALREADY_ACCEPTED"},
)
if workspace_member_invite.responded_at:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"error": "Invite already responded", "code": "INVITE_ALREADY_RESPONDED"},
)
workspace_member_invite.delete()
return Response(status=status.HTTP_204_NO_CONTENT)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,219 @@
# Third Party imports
from rest_framework.response import Response
from rest_framework import status
from drf_spectacular.utils import (
extend_schema,
OpenApiResponse,
OpenApiRequest,
)
# Module imports
from .base import BaseAPIView
from plane.api.serializers import UserLiteSerializer, ProjectMemberSerializer
from plane.db.models import User, Workspace, WorkspaceMember, ProjectMember
from plane.utils.permissions import ProjectMemberPermission, WorkSpaceAdminPermission, ProjectAdminPermission
from plane.utils.openapi import (
WORKSPACE_SLUG_PARAMETER,
PROJECT_ID_PARAMETER,
UNAUTHORIZED_RESPONSE,
FORBIDDEN_RESPONSE,
WORKSPACE_NOT_FOUND_RESPONSE,
PROJECT_NOT_FOUND_RESPONSE,
WORKSPACE_MEMBER_EXAMPLE,
PROJECT_MEMBER_EXAMPLE,
)
class WorkspaceMemberAPIEndpoint(BaseAPIView):
permission_classes = [WorkSpaceAdminPermission]
use_read_replica = True
@extend_schema(
operation_id="get_workspace_members",
summary="List workspace members",
description="Retrieve all users who are members of the specified workspace.",
tags=["Members"],
parameters=[WORKSPACE_SLUG_PARAMETER],
responses={
200: OpenApiResponse(
description="List of workspace members with their roles",
response={
"type": "array",
"items": {
"allOf": [
{"$ref": "#/components/schemas/UserLite"},
{
"type": "object",
"properties": {
"role": {
"type": "integer",
"description": "Member role in the workspace",
}
},
},
]
},
},
examples=[WORKSPACE_MEMBER_EXAMPLE],
),
401: UNAUTHORIZED_RESPONSE,
403: FORBIDDEN_RESPONSE,
404: WORKSPACE_NOT_FOUND_RESPONSE,
},
)
# Get all the users that are present inside the workspace
def get(self, request, slug):
"""List workspace members
Retrieve all users who are members of the specified workspace.
Returns user profiles with their respective workspace roles and permissions.
"""
# Check if the workspace exists
if not Workspace.objects.filter(slug=slug).exists():
return Response(
{"error": "Provided workspace does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
workspace_members = WorkspaceMember.objects.filter(workspace__slug=slug).select_related("member")
# Get all the users with their roles
users_with_roles = []
for workspace_member in workspace_members:
user_data = UserLiteSerializer(workspace_member.member).data
user_data["role"] = workspace_member.role
users_with_roles.append(user_data)
return Response(users_with_roles, status=status.HTTP_200_OK)
class ProjectMemberListCreateAPIEndpoint(BaseAPIView):
permission_classes = [ProjectMemberPermission]
use_read_replica = True
def get_permissions(self):
if self.request.method == "GET":
return [ProjectMemberPermission()]
return [ProjectAdminPermission()]
@extend_schema(
operation_id="get_project_members",
summary="List project members",
description="Retrieve all users who are members of the specified project.",
tags=["Members"],
parameters=[WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER],
responses={
200: OpenApiResponse(
description="List of project members with their roles",
response=UserLiteSerializer,
examples=[PROJECT_MEMBER_EXAMPLE],
),
401: UNAUTHORIZED_RESPONSE,
403: FORBIDDEN_RESPONSE,
404: PROJECT_NOT_FOUND_RESPONSE,
},
)
# Get all the users that are present inside the workspace
def get(self, request, slug, project_id):
"""List project members
Retrieve all users who are members of the specified project.
Returns user profiles with their project-specific roles and access levels.
"""
# Check if the workspace exists
if not Workspace.objects.filter(slug=slug).exists():
return Response(
{"error": "Provided workspace does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the workspace members that are present inside the workspace
project_members = ProjectMember.objects.filter(project_id=project_id, workspace__slug=slug).values_list(
"member_id", flat=True
)
# Get all the users that are present inside the workspace
users = UserLiteSerializer(User.objects.filter(id__in=project_members), many=True).data
return Response(users, status=status.HTTP_200_OK)
@extend_schema(
operation_id="create_project_member",
summary="Create project member",
description="Create a new project member",
tags=["Members"],
parameters=[WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER],
responses={201: OpenApiResponse(description="Project member created", response=ProjectMemberSerializer)},
request=OpenApiRequest(request=ProjectMemberSerializer),
)
def post(self, request, slug, project_id):
serializer = ProjectMemberSerializer(data=request.data, context={"slug": slug})
serializer.is_valid(raise_exception=True)
serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
# API endpoint to get and update a project member
class ProjectMemberDetailAPIEndpoint(ProjectMemberListCreateAPIEndpoint):
@extend_schema(
operation_id="get_project_member",
summary="Get project member",
description="Retrieve a project member by ID.",
tags=["Members"],
parameters=[WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER],
responses={
200: OpenApiResponse(description="Project member", response=ProjectMemberSerializer),
401: UNAUTHORIZED_RESPONSE,
403: FORBIDDEN_RESPONSE,
404: PROJECT_NOT_FOUND_RESPONSE,
},
)
# Get a project member by ID
def get(self, request, slug, project_id, pk):
"""Get project member
Retrieve a project member by ID.
Returns a project member with their project-specific roles and access levels.
"""
# Check if the workspace exists
if not Workspace.objects.filter(slug=slug).exists():
return Response(
{"error": "Provided workspace does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
# Get the workspace members that are present inside the workspace
project_members = ProjectMember.objects.get(project_id=project_id, workspace__slug=slug, pk=pk)
user = User.objects.get(id=project_members.member_id)
user = UserLiteSerializer(user).data
return Response(user, status=status.HTTP_200_OK)
@extend_schema(
operation_id="update_project_member",
summary="Update project member",
description="Update a project member",
tags=["Members"],
parameters=[WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER],
responses={200: OpenApiResponse(description="Project member updated", response=ProjectMemberSerializer)},
request=OpenApiRequest(request=ProjectMemberSerializer),
)
def patch(self, request, slug, project_id, pk):
project_member = ProjectMember.objects.get(project_id=project_id, workspace__slug=slug, pk=pk)
serializer = ProjectMemberSerializer(project_member, data=request.data, partial=True, context={"slug": slug})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
@extend_schema(
operation_id="delete_project_member",
summary="Delete project member",
description="Delete a project member",
tags=["Members"],
parameters=[WORKSPACE_SLUG_PARAMETER, PROJECT_ID_PARAMETER],
responses={204: OpenApiResponse(description="Project member deleted")},
)
def delete(self, request, slug, project_id, pk):
project_member = ProjectMember.objects.get(project_id=project_id, workspace__slug=slug, pk=pk)
project_member.is_active = False
project_member.save()
return Response(status=status.HTTP_204_NO_CONTENT)

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,586 @@
# Python imports
import json
# Django imports
from django.db import IntegrityError
from django.db.models import Exists, F, Func, OuterRef, Prefetch, Q, Subquery
from django.utils import timezone
from django.core.serializers.json import DjangoJSONEncoder
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from rest_framework.serializers import ValidationError
from drf_spectacular.utils import OpenApiResponse, OpenApiRequest
# Module imports
from plane.db.models import (
Cycle,
Intake,
IssueUserProperty,
Module,
Project,
DeployBoard,
ProjectMember,
State,
Workspace,
UserFavorite,
)
from plane.bgtasks.webhook_task import model_activity, webhook_activity
from .base import BaseAPIView
from plane.utils.host import base_host
from plane.api.serializers import (
ProjectSerializer,
ProjectCreateSerializer,
ProjectUpdateSerializer,
)
from plane.app.permissions import ProjectBasePermission
from plane.utils.openapi import (
project_docs,
PROJECT_ID_PARAMETER,
PROJECT_PK_PARAMETER,
CURSOR_PARAMETER,
PER_PAGE_PARAMETER,
ORDER_BY_PARAMETER,
FIELDS_PARAMETER,
EXPAND_PARAMETER,
create_paginated_response,
# Request Examples
PROJECT_CREATE_EXAMPLE,
PROJECT_UPDATE_EXAMPLE,
# Response Examples
PROJECT_EXAMPLE,
PROJECT_NOT_FOUND_RESPONSE,
WORKSPACE_NOT_FOUND_RESPONSE,
PROJECT_NAME_TAKEN_RESPONSE,
DELETED_RESPONSE,
ARCHIVED_RESPONSE,
UNARCHIVED_RESPONSE,
)
class ProjectListCreateAPIEndpoint(BaseAPIView):
"""Project List and Create Endpoint"""
serializer_class = ProjectSerializer
model = Project
webhook_event = "project"
permission_classes = [ProjectBasePermission]
use_read_replica = True
def get_queryset(self):
return (
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(
Q(
project_projectmember__member=self.request.user,
project_projectmember__is_active=True,
)
| Q(network=2)
)
.select_related("project_lead")
.annotate(
is_member=Exists(
ProjectMember.objects.filter(
member=self.request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
is_active=True,
)
)
)
.annotate(
total_members=ProjectMember.objects.filter(
project_id=OuterRef("id"), member__is_bot=False, is_active=True
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
total_modules=Module.objects.filter(project_id=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
member_role=ProjectMember.objects.filter(
project_id=OuterRef("pk"),
member_id=self.request.user.id,
is_active=True,
).values("role")
)
.annotate(
is_deployed=Exists(
DeployBoard.objects.filter(
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
)
)
)
.order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
@project_docs(
operation_id="list_projects",
summary="List or retrieve projects",
description="Retrieve all projects in a workspace or get details of a specific project.",
parameters=[
CURSOR_PARAMETER,
PER_PAGE_PARAMETER,
ORDER_BY_PARAMETER,
FIELDS_PARAMETER,
EXPAND_PARAMETER,
],
responses={
200: create_paginated_response(
ProjectSerializer,
"PaginatedProjectResponse",
"Paginated list of projects",
"Paginated Projects",
),
404: PROJECT_NOT_FOUND_RESPONSE,
},
)
def get(self, request, slug):
"""List projects
Retrieve all projects in a workspace or get details of a specific project.
Returns projects ordered by user's custom sort order with member information.
"""
sort_order_query = ProjectMember.objects.filter(
member=request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
is_active=True,
).values("sort_order")
projects = (
self.get_queryset()
.annotate(sort_order=Subquery(sort_order_query))
.prefetch_related(
Prefetch(
"project_projectmember",
queryset=ProjectMember.objects.filter(workspace__slug=slug, is_active=True).select_related(
"member"
),
)
)
.order_by(request.GET.get("order_by", "sort_order"))
)
return self.paginate(
request=request,
queryset=(projects),
on_results=lambda projects: ProjectSerializer(
projects, many=True, fields=self.fields, expand=self.expand
).data,
)
@project_docs(
operation_id="create_project",
summary="Create project",
description="Create a new project in the workspace with default states and member assignments.",
request=OpenApiRequest(
request=ProjectCreateSerializer,
examples=[PROJECT_CREATE_EXAMPLE],
),
responses={
201: OpenApiResponse(
description="Project created successfully",
response=ProjectSerializer,
examples=[PROJECT_EXAMPLE],
),
404: WORKSPACE_NOT_FOUND_RESPONSE,
409: PROJECT_NAME_TAKEN_RESPONSE,
},
)
def post(self, request, slug):
"""Create project
Create a new project in the workspace with default states and member assignments.
Automatically adds the creator as admin and sets up default workflow states.
"""
try:
workspace = Workspace.objects.get(slug=slug)
serializer = ProjectCreateSerializer(data={**request.data}, context={"workspace_id": workspace.id})
if serializer.is_valid():
serializer.save()
# Add the user as Administrator to the project
_ = ProjectMember.objects.create(project_id=serializer.instance.id, member=request.user, role=20)
# Also create the issue property for the user
_ = IssueUserProperty.objects.create(project_id=serializer.instance.id, user=request.user)
if serializer.instance.project_lead is not None and str(serializer.instance.project_lead) != str(
request.user.id
):
ProjectMember.objects.create(
project_id=serializer.instance.id,
member_id=serializer.instance.project_lead,
role=20,
)
# Also create the issue property for the user
IssueUserProperty.objects.create(
project_id=serializer.instance.id,
user_id=serializer.instance.project_lead,
)
# Default states
states = [
{
"name": "Backlog",
"color": "#60646C",
"sequence": 15000,
"group": "backlog",
"default": True,
},
{
"name": "Todo",
"color": "#60646C",
"sequence": 25000,
"group": "unstarted",
},
{
"name": "In Progress",
"color": "#F59E0B",
"sequence": 35000,
"group": "started",
},
{
"name": "Done",
"color": "#46A758",
"sequence": 45000,
"group": "completed",
},
{
"name": "Cancelled",
"color": "#9AA4BC",
"sequence": 55000,
"group": "cancelled",
},
]
State.objects.bulk_create(
[
State(
name=state["name"],
color=state["color"],
project=serializer.instance,
sequence=state["sequence"],
workspace=serializer.instance.workspace,
group=state["group"],
default=state.get("default", False),
created_by=request.user,
)
for state in states
]
)
project = self.get_queryset().filter(pk=serializer.instance.id).first()
# Model activity
model_activity.delay(
model_name="project",
model_id=str(project.id),
requested_data=request.data,
current_instance=None,
actor_id=request.user.id,
slug=slug,
origin=base_host(request=request, is_app=True),
)
serializer = ProjectSerializer(project)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"name": "The project name is already taken"},
status=status.HTTP_409_CONFLICT,
)
except Workspace.DoesNotExist:
return Response({"error": "Workspace does not exist"}, status=status.HTTP_404_NOT_FOUND)
except ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_409_CONFLICT,
)
class ProjectDetailAPIEndpoint(BaseAPIView):
"""Project Endpoints to update, retrieve and delete endpoint"""
serializer_class = ProjectSerializer
model = Project
webhook_event = "project"
permission_classes = [ProjectBasePermission]
use_read_replica = True
def get_queryset(self):
return (
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(
Q(
project_projectmember__member=self.request.user,
project_projectmember__is_active=True,
)
| Q(network=2)
)
.select_related("workspace", "workspace__owner", "default_assignee", "project_lead")
.annotate(
is_member=Exists(
ProjectMember.objects.filter(
member=self.request.user,
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
is_active=True,
)
)
)
.annotate(
total_members=ProjectMember.objects.filter(
project_id=OuterRef("id"), member__is_bot=False, is_active=True
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
total_cycles=Cycle.objects.filter(project_id=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
total_modules=Module.objects.filter(project_id=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
member_role=ProjectMember.objects.filter(
project_id=OuterRef("pk"),
member_id=self.request.user.id,
is_active=True,
).values("role")
)
.annotate(
is_deployed=Exists(
DeployBoard.objects.filter(
project_id=OuterRef("pk"),
workspace__slug=self.kwargs.get("slug"),
)
)
)
.order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
@project_docs(
operation_id="retrieve_project",
summary="Retrieve project",
description="Retrieve details of a specific project.",
parameters=[
PROJECT_PK_PARAMETER,
],
responses={
200: OpenApiResponse(
description="Project details",
response=ProjectSerializer,
examples=[PROJECT_EXAMPLE],
),
404: PROJECT_NOT_FOUND_RESPONSE,
},
)
def get(self, request, slug, pk):
"""Retrieve project
Retrieve details of a specific project.
"""
project = self.get_queryset().get(workspace__slug=slug, pk=pk)
serializer = ProjectSerializer(project, fields=self.fields, expand=self.expand)
return Response(serializer.data, status=status.HTTP_200_OK)
@project_docs(
operation_id="update_project",
summary="Update project",
description="Partially update an existing project's properties like name, description, or settings.",
parameters=[
PROJECT_PK_PARAMETER,
],
request=OpenApiRequest(
request=ProjectUpdateSerializer,
examples=[PROJECT_UPDATE_EXAMPLE],
),
responses={
200: OpenApiResponse(
description="Project updated successfully",
response=ProjectSerializer,
examples=[PROJECT_EXAMPLE],
),
404: PROJECT_NOT_FOUND_RESPONSE,
409: PROJECT_NAME_TAKEN_RESPONSE,
},
)
def patch(self, request, slug, pk):
"""Update project
Partially update an existing project's properties like name, description, or settings.
Tracks changes in model activity logs for audit purposes.
"""
try:
workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=pk)
current_instance = json.dumps(ProjectSerializer(project).data, cls=DjangoJSONEncoder)
intake_view = request.data.get("intake_view", project.intake_view)
if project.archived_at:
return Response(
{"error": "Archived project cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = ProjectUpdateSerializer(
project,
data={**request.data, "intake_view": intake_view},
context={"workspace_id": workspace.id},
partial=True,
)
if serializer.is_valid():
serializer.save()
if serializer.data["intake_view"]:
intake = Intake.objects.filter(project=project, is_default=True).first()
if not intake:
Intake.objects.create(
name=f"{project.name} Intake",
project=project,
is_default=True,
)
project = self.get_queryset().filter(pk=serializer.instance.id).first()
model_activity.delay(
model_name="project",
model_id=str(project.id),
requested_data=request.data,
current_instance=current_instance,
actor_id=request.user.id,
slug=slug,
origin=base_host(request=request, is_app=True),
)
serializer = ProjectSerializer(project)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"name": "The project name is already taken"},
status=status.HTTP_409_CONFLICT,
)
except (Project.DoesNotExist, Workspace.DoesNotExist):
return Response({"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND)
except ValidationError:
return Response(
{"identifier": "The project identifier is already taken"},
status=status.HTTP_409_CONFLICT,
)
@project_docs(
operation_id="delete_project",
summary="Delete project",
description="Permanently remove a project and all its associated data from the workspace.",
parameters=[
PROJECT_PK_PARAMETER,
],
responses={
204: DELETED_RESPONSE,
},
)
def delete(self, request, slug, pk):
"""Delete project
Permanently remove a project and all its associated data from the workspace.
Only admins can delete projects and the action cannot be undone.
"""
project = Project.objects.get(pk=pk, workspace__slug=slug)
# Delete the user favorite cycle
UserFavorite.objects.filter(entity_type="project", entity_identifier=pk, project_id=pk).delete()
project.delete()
webhook_activity.delay(
event="project",
verb="deleted",
field=None,
old_value=None,
new_value=None,
actor_id=request.user.id,
slug=slug,
current_site=base_host(request=request, is_app=True),
event_id=project.id,
old_identifier=None,
new_identifier=None,
)
return Response(status=status.HTTP_204_NO_CONTENT)
class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView):
"""Project Archive and Unarchive Endpoint"""
permission_classes = [ProjectBasePermission]
@project_docs(
operation_id="archive_project",
summary="Archive project",
description="Move a project to archived status, hiding it from active project lists.",
parameters=[
PROJECT_ID_PARAMETER,
],
request={},
responses={
204: ARCHIVED_RESPONSE,
},
)
def post(self, request, slug, project_id):
"""Archive project
Move a project to archived status, hiding it from active project lists.
Archived projects remain accessible but are excluded from regular workflows.
"""
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project.archived_at = timezone.now()
project.save()
UserFavorite.objects.filter(workspace__slug=slug, project=project_id).delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@project_docs(
operation_id="unarchive_project",
summary="Unarchive project",
description="Restore an archived project to active status, making it available in regular workflows.",
parameters=[
PROJECT_ID_PARAMETER,
],
request={},
responses={
204: UNARCHIVED_RESPONSE,
},
)
def delete(self, request, slug, project_id):
"""Unarchive project
Restore an archived project to active status, making it available in regular workflows.
The project will reappear in active project lists and become fully functional.
"""
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project.archived_at = None
project.save()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -0,0 +1,296 @@
# Django imports
from django.db import IntegrityError
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from drf_spectacular.utils import OpenApiResponse, OpenApiRequest
# Module imports
from plane.api.serializers import StateSerializer
from plane.app.permissions import ProjectEntityPermission
from plane.db.models import Issue, State
from .base import BaseAPIView
from plane.utils.openapi import (
state_docs,
STATE_ID_PARAMETER,
CURSOR_PARAMETER,
PER_PAGE_PARAMETER,
FIELDS_PARAMETER,
EXPAND_PARAMETER,
create_paginated_response,
# Request Examples
STATE_CREATE_EXAMPLE,
STATE_UPDATE_EXAMPLE,
# Response Examples
STATE_EXAMPLE,
INVALID_REQUEST_RESPONSE,
STATE_NAME_EXISTS_RESPONSE,
DELETED_RESPONSE,
STATE_CANNOT_DELETE_RESPONSE,
EXTERNAL_ID_EXISTS_RESPONSE,
)
class StateListCreateAPIEndpoint(BaseAPIView):
"""State List and Create Endpoint"""
serializer_class = StateSerializer
model = State
permission_classes = [ProjectEntityPermission]
use_read_replica = True
def get_queryset(self):
return (
State.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(is_triage=False)
.filter(project__archived_at__isnull=True)
.select_related("project")
.select_related("workspace")
.distinct()
)
@state_docs(
operation_id="create_state",
summary="Create state",
description="Create a new workflow state for a project with specified name, color, and group.",
request=OpenApiRequest(
request=StateSerializer,
examples=[STATE_CREATE_EXAMPLE],
),
responses={
200: OpenApiResponse(
description="State created",
response=StateSerializer,
examples=[STATE_EXAMPLE],
),
400: INVALID_REQUEST_RESPONSE,
409: STATE_NAME_EXISTS_RESPONSE,
},
)
def post(self, request, slug, project_id):
"""Create state
Create a new workflow state for a project with specified name, color, and group.
Supports external ID tracking for integration purposes.
"""
try:
serializer = StateSerializer(data=request.data, context={"project_id": project_id})
if serializer.is_valid():
if (
request.data.get("external_id")
and request.data.get("external_source")
and State.objects.filter(
project_id=project_id,
workspace__slug=slug,
external_source=request.data.get("external_source"),
external_id=request.data.get("external_id"),
).exists()
):
state = State.objects.filter(
workspace__slug=slug,
project_id=project_id,
external_id=request.data.get("external_id"),
external_source=request.data.get("external_source"),
).first()
return Response(
{
"error": "State with the same external id and external source already exists",
"id": str(state.id),
},
status=status.HTTP_409_CONFLICT,
)
serializer.save(project_id=project_id)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError:
state = State.objects.filter(
workspace__slug=slug,
project_id=project_id,
name=request.data.get("name"),
).first()
return Response(
{
"error": "State with the same name already exists in the project",
"id": str(state.id),
},
status=status.HTTP_409_CONFLICT,
)
@state_docs(
operation_id="list_states",
summary="List states",
description="Retrieve all workflow states for a project.",
parameters=[
CURSOR_PARAMETER,
PER_PAGE_PARAMETER,
FIELDS_PARAMETER,
EXPAND_PARAMETER,
],
responses={
200: create_paginated_response(
StateSerializer,
"PaginatedStateResponse",
"Paginated list of states",
"Paginated States",
),
},
)
def get(self, request, slug, project_id):
"""List states
Retrieve all workflow states for a project.
Returns paginated results when listing all states.
"""
return self.paginate(
request=request,
queryset=(self.get_queryset()),
on_results=lambda states: StateSerializer(states, many=True, fields=self.fields, expand=self.expand).data,
)
class StateDetailAPIEndpoint(BaseAPIView):
"""State Detail Endpoint"""
serializer_class = StateSerializer
model = State
permission_classes = [ProjectEntityPermission]
use_read_replica = True
def get_queryset(self):
return (
State.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(is_triage=False)
.filter(project__archived_at__isnull=True)
.select_related("project")
.select_related("workspace")
.distinct()
)
@state_docs(
operation_id="retrieve_state",
summary="Retrieve state",
description="Retrieve details of a specific state.",
parameters=[
STATE_ID_PARAMETER,
],
responses={
200: OpenApiResponse(
description="State retrieved",
response=StateSerializer,
examples=[STATE_EXAMPLE],
),
},
)
def get(self, request, slug, project_id, state_id):
"""Retrieve state
Retrieve details of a specific state.
Returns paginated results when listing all states.
"""
serializer = StateSerializer(
self.get_queryset().get(pk=state_id),
fields=self.fields,
expand=self.expand,
)
return Response(serializer.data, status=status.HTTP_200_OK)
@state_docs(
operation_id="delete_state",
summary="Delete state",
description="Permanently remove a workflow state from a project. Default states and states with existing work items cannot be deleted.", # noqa: E501
parameters=[
STATE_ID_PARAMETER,
],
responses={
204: DELETED_RESPONSE,
400: STATE_CANNOT_DELETE_RESPONSE,
},
)
def delete(self, request, slug, project_id, state_id):
"""Delete state
Permanently remove a workflow state from a project.
Default states and states with existing work items cannot be deleted.
"""
state = State.objects.get(is_triage=False, pk=state_id, project_id=project_id, workspace__slug=slug)
if state.default:
return Response(
{"error": "Default state cannot be deleted"},
status=status.HTTP_400_BAD_REQUEST,
)
# Check for any issues in the state
issue_exist = Issue.objects.filter(state=state_id).exists()
if issue_exist:
return Response(
{"error": "The state is not empty, only empty states can be deleted"},
status=status.HTTP_400_BAD_REQUEST,
)
state.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@state_docs(
operation_id="update_state",
summary="Update state",
description="Partially update an existing workflow state's properties like name, color, or group.",
parameters=[
STATE_ID_PARAMETER,
],
request=OpenApiRequest(
request=StateSerializer,
examples=[STATE_UPDATE_EXAMPLE],
),
responses={
200: OpenApiResponse(
description="State updated",
response=StateSerializer,
examples=[STATE_EXAMPLE],
),
400: INVALID_REQUEST_RESPONSE,
409: EXTERNAL_ID_EXISTS_RESPONSE,
},
)
def patch(self, request, slug, project_id, state_id):
"""Update state
Partially update an existing workflow state's properties like name, color, or group.
Validates external ID uniqueness if provided.
"""
state = State.objects.get(workspace__slug=slug, project_id=project_id, pk=state_id)
serializer = StateSerializer(state, data=request.data, partial=True)
if serializer.is_valid():
if (
request.data.get("external_id")
and (state.external_id != str(request.data.get("external_id")))
and State.objects.filter(
project_id=project_id,
workspace__slug=slug,
external_source=request.data.get("external_source", state.external_source),
external_id=request.data.get("external_id"),
).exists()
):
return Response(
{
"error": "State with the same external id and external source already exists",
"id": str(state.id),
},
status=status.HTTP_409_CONFLICT,
)
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)

View File

@@ -0,0 +1,37 @@
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from drf_spectacular.utils import OpenApiResponse
# Module imports
from plane.api.serializers import UserLiteSerializer
from plane.api.views.base import BaseAPIView
from plane.db.models import User
from plane.utils.openapi.decorators import user_docs
from plane.utils.openapi import USER_EXAMPLE
class UserEndpoint(BaseAPIView):
serializer_class = UserLiteSerializer
model = User
@user_docs(
operation_id="get_current_user",
summary="Get current user",
description="Retrieve the authenticated user's profile information including basic details.",
responses={
200: OpenApiResponse(
description="Current user profile",
response=UserLiteSerializer,
examples=[USER_EXAMPLE],
),
},
)
def get(self, request):
"""Get current user
Retrieve the authenticated user's profile information including basic details.
Returns user data based on the current authentication context.
"""
serializer = UserLiteSerializer(request.user)
return Response(serializer.data, status=status.HTTP_200_OK)

View File

View File

@@ -0,0 +1,5 @@
from django.apps import AppConfig
class AppApiConfig(AppConfig):
name = "plane.app"

View File

@@ -0,0 +1,47 @@
# Django imports
from django.utils import timezone
from django.db.models import Q
# Third party imports
from rest_framework import authentication
from rest_framework.exceptions import AuthenticationFailed
# Module imports
from plane.db.models import APIToken
class APIKeyAuthentication(authentication.BaseAuthentication):
"""
Authentication with an API Key
"""
www_authenticate_realm = "api"
media_type = "application/json"
auth_header_name = "X-Api-Key"
def get_api_token(self, request):
return request.headers.get(self.auth_header_name)
def validate_api_token(self, token):
try:
api_token = APIToken.objects.get(
Q(Q(expired_at__gt=timezone.now()) | Q(expired_at__isnull=True)),
token=token,
is_active=True,
)
except APIToken.DoesNotExist:
raise AuthenticationFailed("Given API token is not valid")
# save api token last used
api_token.last_used = timezone.now()
api_token.save(update_fields=["last_used"])
return (api_token.user, api_token.token)
def authenticate(self, request):
token = self.get_api_token(request=request)
if not token:
return None
# Validate the API token
user, token = self.validate_api_token(token)
return user, token

View File

@@ -0,0 +1,17 @@
from .workspace import (
WorkSpaceBasePermission,
WorkspaceOwnerPermission,
WorkSpaceAdminPermission,
WorkspaceEntityPermission,
WorkspaceViewerPermission,
WorkspaceUserPermission,
)
from .project import (
ProjectBasePermission,
ProjectEntityPermission,
ProjectMemberPermission,
ProjectLitePermission,
ProjectAdminPermission,
)
from .base import allow_permission, ROLE
from .page import ProjectPagePermission

View File

@@ -0,0 +1,73 @@
from plane.db.models import WorkspaceMember, ProjectMember
from functools import wraps
from rest_framework.response import Response
from rest_framework import status
from enum import Enum
class ROLE(Enum):
ADMIN = 20
MEMBER = 15
GUEST = 5
def allow_permission(allowed_roles, level="PROJECT", creator=False, model=None):
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(instance, request, *args, **kwargs):
# Check for creator if required
if creator and model:
obj = model.objects.filter(id=kwargs["pk"], created_by=request.user).exists()
if obj:
return view_func(instance, request, *args, **kwargs)
# Convert allowed_roles to their values if they are enum members
allowed_role_values = [role.value if isinstance(role, ROLE) else role for role in allowed_roles]
# Check role permissions
if level == "WORKSPACE":
if WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=kwargs["slug"],
role__in=allowed_role_values,
is_active=True,
).exists():
return view_func(instance, request, *args, **kwargs)
else:
is_user_has_allowed_role = ProjectMember.objects.filter(
member=request.user,
workspace__slug=kwargs["slug"],
project_id=kwargs["project_id"],
role__in=allowed_role_values,
is_active=True,
).exists()
# Return if the user has the allowed role else if they are workspace admin and part of the project regardless of the role # noqa: E501
if is_user_has_allowed_role:
return view_func(instance, request, *args, **kwargs)
elif (
ProjectMember.objects.filter(
member=request.user,
workspace__slug=kwargs["slug"],
project_id=kwargs["project_id"],
is_active=True,
).exists()
and WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=kwargs["slug"],
role=ROLE.ADMIN.value,
is_active=True,
).exists()
):
return view_func(instance, request, *args, **kwargs)
# Return permission denied if no conditions are met
return Response(
{"error": "You don't have the required permissions."},
status=status.HTTP_403_FORBIDDEN,
)
return _wrapped_view
return decorator

View File

@@ -0,0 +1,121 @@
from plane.db.models import ProjectMember, Page
from plane.app.permissions import ROLE
from rest_framework.permissions import BasePermission, SAFE_METHODS
# Permission Mappings for workspace members
ADMIN = ROLE.ADMIN.value
MEMBER = ROLE.MEMBER.value
GUEST = ROLE.GUEST.value
class ProjectPagePermission(BasePermission):
"""
Custom permission to control access to pages within a workspace
based on user roles, page visibility (public/private), and feature flags.
"""
def has_permission(self, request, view):
"""
Check basic project-level permissions before checking object-level permissions.
"""
if request.user.is_anonymous:
return False
user_id = request.user.id
slug = view.kwargs.get("slug")
page_id = view.kwargs.get("page_id")
project_id = view.kwargs.get("project_id")
# Hook for extended validation
extended_access, role = self._check_access_and_get_role(request, slug, project_id)
if extended_access is False:
return False
if page_id:
page = Page.objects.get(id=page_id, workspace__slug=slug)
# Allow access if the user is the owner of the page
if page.owned_by_id == user_id:
return True
# Handle private page access
if page.access == Page.PRIVATE_ACCESS:
return self._has_private_page_action_access(request, slug, page, project_id)
# Handle public page access
return self._has_public_page_action_access(request, role)
def _check_project_member_access(self, request, slug, project_id):
"""
Check if the user is a project member.
"""
return (
ProjectMember.objects.filter(
member=request.user,
workspace__slug=slug,
is_active=True,
project_id=project_id,
)
.values_list("role", flat=True)
.first()
)
def _check_access_and_get_role(self, request, slug, project_id):
"""
Hook for extended access checking
Returns: True (allow), False (deny), None (continue with normal flow)
"""
role = self._check_project_member_access(request, slug, project_id)
if not role:
return False, None
return True, role
def _has_private_page_action_access(self, request, slug, page, project_id):
"""
Check access to private pages. Override for feature flag logic.
"""
# Base implementation: only owner can access private pages
return False
def _check_project_action_access(self, request, role):
method = request.method
# Only admins can create (POST) pages
if method == "POST":
if role in [ADMIN, MEMBER]:
return True
return False
# Safe methods (GET, HEAD, OPTIONS) allowed for all active roles
if method in SAFE_METHODS:
if role in [ADMIN, MEMBER, GUEST]:
return True
return False
# PUT/PATCH: Admins and members can update
if method in ["PUT", "PATCH"]:
if role in [ADMIN, MEMBER]:
return True
return False
# DELETE: Only admins can delete
if method == "DELETE":
if role in [ADMIN]:
return True
return False
# Deny by default
return False
def _has_public_page_action_access(self, request, role):
"""
Check if the user has permission to access a public page
and can perform operations on the page.
"""
project_member_exists = self._check_project_action_access(request, role)
if not project_member_exists:
return False
return True

View File

@@ -0,0 +1,139 @@
# Third Party imports
from rest_framework.permissions import SAFE_METHODS, BasePermission
# Module import
from plane.db.models import ProjectMember, WorkspaceMember
from plane.db.models.project import ROLE
class ProjectBasePermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug, member=request.user, is_active=True
).exists()
## Only workspace owners or admins can create the projects
if request.method == "POST":
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
is_active=True,
).exists()
project_member_qs = ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
is_active=True,
)
## Only project admins or workspace admin who is part of the project can access
if project_member_qs.filter(role=ROLE.ADMIN.value).exists():
return True
else:
return (
project_member_qs.exists()
and WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role=ROLE.ADMIN.value,
is_active=True,
).exists()
)
class ProjectMemberPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug, member=request.user, is_active=True
).exists()
## Only workspace owners or admins can create the projects
if request.method == "POST":
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
is_active=True,
).exists()
## Only Project Admins can update project attributes
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
project_id=view.project_id,
is_active=True,
).exists()
class ProjectEntityPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
# Handle requests based on project__identifier
if hasattr(view, "project_identifier") and view.project_identifier:
if request.method in SAFE_METHODS:
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
project__identifier=view.project_identifier,
is_active=True,
).exists()
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
is_active=True,
).exists()
## Only project members or admins can create and edit the project attributes
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role__in=[ROLE.ADMIN.value, ROLE.MEMBER.value],
project_id=view.project_id,
is_active=True,
).exists()
class ProjectAdminPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
role=ROLE.ADMIN.value,
project_id=view.project_id,
is_active=True,
).exists()
class ProjectLitePermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
return ProjectMember.objects.filter(
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
is_active=True,
).exists()

View File

@@ -0,0 +1,106 @@
# Third Party imports
from rest_framework.permissions import BasePermission, SAFE_METHODS
# Module imports
from plane.db.models import WorkspaceMember
# Permission Mappings
Admin = 20
Member = 15
Guest = 5
# TODO: Move the below logic to python match - python v3.10
class WorkSpaceBasePermission(BasePermission):
def has_permission(self, request, view):
# allow anyone to create a workspace
if request.user.is_anonymous:
return False
if request.method == "POST":
return True
## Safe Methods
if request.method in SAFE_METHODS:
return True
# allow only admins and owners to update the workspace settings
if request.method in ["PUT", "PATCH"]:
return WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Admin, Member],
is_active=True,
).exists()
# allow only owner to delete the workspace
if request.method == "DELETE":
return WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role=Admin,
is_active=True,
).exists()
class WorkspaceOwnerPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug, member=request.user, role=Admin
).exists()
class WorkSpaceAdminPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
return WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Admin, Member],
is_active=True,
).exists()
class WorkspaceEntityPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
## Safe Methods -> Handle the filtering logic in queryset
if request.method in SAFE_METHODS:
return WorkspaceMember.objects.filter(
workspace__slug=view.workspace_slug, member=request.user, is_active=True
).exists()
return WorkspaceMember.objects.filter(
member=request.user,
workspace__slug=view.workspace_slug,
role__in=[Admin, Member],
is_active=True,
).exists()
class WorkspaceViewerPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
return WorkspaceMember.objects.filter(
member=request.user, workspace__slug=view.workspace_slug, is_active=True
).exists()
class WorkspaceUserPermission(BasePermission):
def has_permission(self, request, view):
if request.user.is_anonymous:
return False
return WorkspaceMember.objects.filter(
member=request.user, workspace__slug=view.workspace_slug, is_active=True
).exists()

View File

@@ -0,0 +1,130 @@
from .base import BaseSerializer
from .user import (
UserSerializer,
UserLiteSerializer,
ChangePasswordSerializer,
ResetPasswordSerializer,
UserAdminLiteSerializer,
UserMeSerializer,
UserMeSettingsSerializer,
ProfileSerializer,
AccountSerializer,
)
from .workspace import (
WorkSpaceSerializer,
WorkSpaceMemberSerializer,
WorkSpaceMemberInviteSerializer,
WorkspaceLiteSerializer,
WorkspaceThemeSerializer,
WorkspaceMemberAdminSerializer,
WorkspaceMemberMeSerializer,
WorkspaceUserPropertiesSerializer,
WorkspaceUserLinkSerializer,
WorkspaceRecentVisitSerializer,
WorkspaceHomePreferenceSerializer,
StickySerializer,
)
from .project import (
ProjectSerializer,
ProjectListSerializer,
ProjectDetailSerializer,
ProjectMemberSerializer,
ProjectMemberInviteSerializer,
ProjectIdentifierSerializer,
ProjectLiteSerializer,
ProjectMemberLiteSerializer,
DeployBoardSerializer,
ProjectMemberAdminSerializer,
ProjectPublicMemberSerializer,
ProjectMemberRoleSerializer,
)
from .state import StateSerializer, StateLiteSerializer
from .view import IssueViewSerializer, ViewIssueListSerializer
from .cycle import (
CycleSerializer,
CycleIssueSerializer,
CycleWriteSerializer,
CycleUserPropertiesSerializer,
)
from .asset import FileAssetSerializer
from .issue import (
IssueCreateSerializer,
IssueActivitySerializer,
IssueCommentSerializer,
IssueUserPropertySerializer,
IssueAssigneeSerializer,
LabelSerializer,
IssueSerializer,
IssueFlatSerializer,
IssueStateSerializer,
IssueLinkSerializer,
IssueIntakeSerializer,
IssueLiteSerializer,
IssueAttachmentSerializer,
IssueSubscriberSerializer,
IssueReactionSerializer,
CommentReactionSerializer,
IssueVoteSerializer,
IssueRelationSerializer,
RelatedIssueSerializer,
IssuePublicSerializer,
IssueDetailSerializer,
IssueReactionLiteSerializer,
IssueAttachmentLiteSerializer,
IssueLinkLiteSerializer,
IssueVersionDetailSerializer,
IssueDescriptionVersionDetailSerializer,
IssueListDetailSerializer,
)
from .module import (
ModuleDetailSerializer,
ModuleWriteSerializer,
ModuleSerializer,
ModuleIssueSerializer,
ModuleLinkSerializer,
ModuleUserPropertiesSerializer,
)
from .api import APITokenSerializer, APITokenReadSerializer
from .importer import ImporterSerializer
from .page import (
PageSerializer,
PageDetailSerializer,
PageVersionSerializer,
PageBinaryUpdateSerializer,
PageVersionDetailSerializer,
)
from .estimate import (
EstimateSerializer,
EstimatePointSerializer,
EstimateReadSerializer,
WorkspaceEstimateSerializer,
)
from .intake import (
IntakeSerializer,
IntakeIssueSerializer,
IssueStateIntakeSerializer,
IntakeIssueLiteSerializer,
IntakeIssueDetailSerializer,
)
from .analytic import AnalyticViewSerializer
from .notification import NotificationSerializer, UserNotificationPreferenceSerializer
from .exporter import ExporterHistorySerializer
from .webhook import WebhookSerializer, WebhookLogSerializer
from .favorite import UserFavoriteSerializer
from .draft import (
DraftIssueCreateSerializer,
DraftIssueSerializer,
DraftIssueDetailSerializer,
)

View File

@@ -0,0 +1,27 @@
from .base import BaseSerializer
from plane.db.models import AnalyticView
from plane.utils.issue_filters import issue_filters
class AnalyticViewSerializer(BaseSerializer):
class Meta:
model = AnalyticView
fields = "__all__"
read_only_fields = ["workspace", "query"]
def create(self, validated_data):
query_params = validated_data.get("query_dict", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = {}
return AnalyticView.objects.create(**validated_data)
def update(self, instance, validated_data):
query_params = validated_data.get("query_data", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@@ -0,0 +1,37 @@
from .base import BaseSerializer
from plane.db.models import APIToken, APIActivityLog
from rest_framework import serializers
from django.utils import timezone
class APITokenSerializer(BaseSerializer):
class Meta:
model = APIToken
fields = "__all__"
read_only_fields = [
"token",
"expired_at",
"created_at",
"updated_at",
"workspace",
"user",
]
class APITokenReadSerializer(BaseSerializer):
is_active = serializers.SerializerMethodField()
class Meta:
model = APIToken
exclude = ("token",)
def get_is_active(self, obj: APIToken) -> bool:
if obj.expired_at is None:
return True
return timezone.now() < obj.expired_at
class APIActivityLogSerializer(BaseSerializer):
class Meta:
model = APIActivityLog
fields = "__all__"

View File

@@ -0,0 +1,9 @@
from .base import BaseSerializer
from plane.db.models import FileAsset
class FileAssetSerializer(BaseSerializer):
class Meta:
model = FileAsset
fields = "__all__"
read_only_fields = ["created_by", "updated_by", "created_at", "updated_at"]

View File

@@ -0,0 +1,197 @@
from rest_framework import serializers
class BaseSerializer(serializers.ModelSerializer):
id = serializers.PrimaryKeyRelatedField(read_only=True)
class DynamicBaseSerializer(BaseSerializer):
def __init__(self, *args, **kwargs):
# If 'fields' is provided in the arguments, remove it and store it separately.
# This is done so as not to pass this custom argument up to the superclass.
fields = kwargs.pop("fields", [])
self.expand = kwargs.pop("expand", []) or []
fields = self.expand
# Call the initialization of the superclass.
super().__init__(*args, **kwargs)
# If 'fields' was provided, filter the fields of the serializer accordingly.
if fields is not None:
self.fields = self._filter_fields(fields)
def _filter_fields(self, fields):
"""
Adjust the serializer's fields based on the provided 'fields' list.
:param fields: List or dictionary specifying which fields to include in the serializer.
:return: The updated fields for the serializer.
"""
# Check each field_name in the provided fields.
for field_name in fields:
# If the field is a dictionary (indicating nested fields),
# loop through its keys and values.
if isinstance(field_name, dict):
for key, value in field_name.items():
# If the value of this nested field is a list,
# perform a recursive filter on it.
if isinstance(value, list):
self._filter_fields(self.fields[key], value)
# Create a list to store allowed fields.
allowed = []
for item in fields:
# If the item is a string, it directly represents a field's name.
if isinstance(item, str):
allowed.append(item)
# If the item is a dictionary, it represents a nested field.
# Add the key of this dictionary to the allowed list.
elif isinstance(item, dict):
allowed.append(list(item.keys())[0])
for field in allowed:
if field not in self.fields:
from . import (
WorkspaceLiteSerializer,
ProjectLiteSerializer,
UserLiteSerializer,
StateLiteSerializer,
IssueSerializer,
LabelSerializer,
CycleIssueSerializer,
IssueLiteSerializer,
IssueRelationSerializer,
IntakeIssueLiteSerializer,
IssueReactionLiteSerializer,
IssueLinkLiteSerializer,
RelatedIssueSerializer,
)
# Expansion mapper
expansion = {
"user": UserLiteSerializer,
"workspace": WorkspaceLiteSerializer,
"project": ProjectLiteSerializer,
"default_assignee": UserLiteSerializer,
"project_lead": UserLiteSerializer,
"state": StateLiteSerializer,
"created_by": UserLiteSerializer,
"issue": IssueSerializer,
"actor": UserLiteSerializer,
"owned_by": UserLiteSerializer,
"members": UserLiteSerializer,
"assignees": UserLiteSerializer,
"labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer,
"parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
"issue_intake": IntakeIssueLiteSerializer,
"issue_related": RelatedIssueSerializer,
"issue_reactions": IssueReactionLiteSerializer,
"issue_link": IssueLinkLiteSerializer,
"sub_issues": IssueLiteSerializer,
}
if field not in self.fields and field in expansion:
self.fields[field] = expansion[field](
many=(
True
if field
in [
"members",
"assignees",
"labels",
"issue_cycle",
"issue_relation",
"issue_intake",
"issue_reactions",
"issue_attachment",
"issue_link",
"sub_issues",
"issue_related",
]
else False
)
)
return self.fields
def to_representation(self, instance):
response = super().to_representation(instance)
# Ensure 'expand' is iterable before processing
if self.expand:
for expand in self.expand:
if expand in self.fields:
# Import all the expandable serializers
from . import (
WorkspaceLiteSerializer,
ProjectLiteSerializer,
UserLiteSerializer,
StateLiteSerializer,
IssueSerializer,
LabelSerializer,
CycleIssueSerializer,
IssueRelationSerializer,
IntakeIssueLiteSerializer,
IssueLiteSerializer,
IssueReactionLiteSerializer,
IssueAttachmentLiteSerializer,
IssueLinkLiteSerializer,
RelatedIssueSerializer,
)
# Expansion mapper
expansion = {
"user": UserLiteSerializer,
"workspace": WorkspaceLiteSerializer,
"project": ProjectLiteSerializer,
"default_assignee": UserLiteSerializer,
"project_lead": UserLiteSerializer,
"state": StateLiteSerializer,
"created_by": UserLiteSerializer,
"issue": IssueSerializer,
"actor": UserLiteSerializer,
"owned_by": UserLiteSerializer,
"members": UserLiteSerializer,
"assignees": UserLiteSerializer,
"labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer,
"parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
"issue_intake": IntakeIssueLiteSerializer,
"issue_related": RelatedIssueSerializer,
"issue_reactions": IssueReactionLiteSerializer,
"issue_attachment": IssueAttachmentLiteSerializer,
"issue_link": IssueLinkLiteSerializer,
"sub_issues": IssueLiteSerializer,
}
# Check if field in expansion then expand the field
if expand in expansion:
if isinstance(response.get(expand), list):
exp_serializer = expansion[expand](getattr(instance, expand), many=True)
else:
exp_serializer = expansion[expand](getattr(instance, expand))
response[expand] = exp_serializer.data
else:
# You might need to handle this case differently
response[expand] = getattr(instance, f"{expand}_id", None)
# Check if issue_attachments is in fields or expand
if "issue_attachments" in self.fields or "issue_attachments" in self.expand:
# Import the model here to avoid circular imports
from plane.db.models import FileAsset
issue_id = getattr(instance, "id", None)
if issue_id:
# Fetch related issue_attachments
issue_attachments = FileAsset.objects.filter(
issue_id=issue_id,
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
)
# Serialize issue_attachments and add them to the response
response["issue_attachments"] = IssueAttachmentLiteSerializer(issue_attachments, many=True).data
else:
response["issue_attachments"] = []
return response

View File

@@ -0,0 +1,102 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .issue import IssueStateSerializer
from plane.db.models import Cycle, CycleIssue, CycleUserProperties
from plane.utils.timezone_converter import convert_to_utc
class CycleWriteSerializer(BaseSerializer):
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("end_date", None) is not None
and data.get("start_date", None) > data.get("end_date", None)
):
raise serializers.ValidationError("Start date cannot exceed end date")
if data.get("start_date", None) is not None and data.get("end_date", None) is not None:
project_id = (
self.initial_data.get("project_id", None)
or (self.instance and self.instance.project_id)
or self.context.get("project_id", None)
)
data["start_date"] = convert_to_utc(
date=str(data.get("start_date").date()),
project_id=project_id,
is_start_date=True,
)
data["end_date"] = convert_to_utc(
date=str(data.get("end_date", None).date()),
project_id=project_id,
)
return data
class Meta:
model = Cycle
fields = "__all__"
read_only_fields = ["workspace", "project", "owned_by", "archived_at"]
class CycleSerializer(BaseSerializer):
# favorite
is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
# state group wise distribution
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
# active | draft | upcoming | completed
status = serializers.CharField(read_only=True)
class Meta:
model = Cycle
fields = [
# necessary fields
"id",
"workspace_id",
"project_id",
# model fields
"name",
"description",
"start_date",
"end_date",
"owned_by_id",
"view_props",
"sort_order",
"external_source",
"external_id",
"progress_snapshot",
"logo_props",
# meta fields
"is_favorite",
"total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
"status",
]
read_only_fields = fields
class CycleIssueSerializer(BaseSerializer):
issue_detail = IssueStateSerializer(read_only=True, source="issue")
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
model = CycleIssue
fields = "__all__"
read_only_fields = ["workspace", "project", "cycle"]
class CycleUserPropertiesSerializer(BaseSerializer):
class Meta:
model = CycleUserProperties
fields = "__all__"
read_only_fields = ["workspace", "project", "cycle", "user"]

View File

@@ -0,0 +1,338 @@
# Django imports
from django.utils import timezone
# Third Party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from plane.db.models import (
User,
Issue,
Label,
State,
DraftIssue,
DraftIssueAssignee,
DraftIssueLabel,
DraftIssueCycle,
DraftIssueModule,
ProjectMember,
EstimatePoint,
)
from plane.utils.content_validator import (
validate_html_content,
validate_binary_data,
)
from plane.app.permissions import ROLE
class DraftIssueCreateSerializer(BaseSerializer):
# ids
state_id = serializers.PrimaryKeyRelatedField(
source="state", queryset=State.objects.all(), required=False, allow_null=True
)
parent_id = serializers.PrimaryKeyRelatedField(
source="parent", queryset=Issue.objects.all(), required=False, allow_null=True
)
label_ids = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
)
assignee_ids = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
class Meta:
model = DraftIssue
fields = "__all__"
read_only_fields = [
"workspace",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
def to_representation(self, instance):
data = super().to_representation(instance)
assignee_ids = self.initial_data.get("assignee_ids")
data["assignee_ids"] = assignee_ids if assignee_ids else []
label_ids = self.initial_data.get("label_ids")
data["label_ids"] = label_ids if label_ids else []
return data
def validate(self, attrs):
if (
attrs.get("start_date", None) is not None
and attrs.get("target_date", None) is not None
and attrs.get("start_date", None) > attrs.get("target_date", None)
):
raise serializers.ValidationError("Start date cannot exceed target date")
# Validate description content for security
if "description_html" in attrs and attrs["description_html"]:
is_valid, error_msg, sanitized_html = validate_html_content(attrs["description_html"])
if not is_valid:
raise serializers.ValidationError({"error": "html content is not valid"})
# Update the attrs with sanitized HTML if available
if sanitized_html is not None:
attrs["description_html"] = sanitized_html
if "description_binary" in attrs and attrs["description_binary"]:
is_valid, error_msg = validate_binary_data(attrs["description_binary"])
if not is_valid:
raise serializers.ValidationError({"description_binary": "Invalid binary data"})
# Validate assignees are from project
if attrs.get("assignee_ids", []):
attrs["assignee_ids"] = ProjectMember.objects.filter(
project_id=self.context["project_id"],
role__gte=ROLE.MEMBER.value,
is_active=True,
member_id__in=attrs["assignee_ids"],
).values_list("member_id", flat=True)
# Validate labels are from project
if attrs.get("label_ids"):
label_ids = [label.id for label in attrs["label_ids"]]
attrs["label_ids"] = list(
Label.objects.filter(project_id=self.context.get("project_id"), id__in=label_ids).values_list(
"id", flat=True
)
)
# # Check state is from the project only else raise validation error
if (
attrs.get("state")
and not State.objects.filter(
project_id=self.context.get("project_id"),
pk=attrs.get("state").id,
).exists()
):
raise serializers.ValidationError("State is not valid please pass a valid state_id")
# # Check parent issue is from workspace as it can be cross workspace
if (
attrs.get("parent")
and not Issue.objects.filter(
project_id=self.context.get("project_id"),
pk=attrs.get("parent").id,
).exists()
):
raise serializers.ValidationError("Parent is not valid issue_id please pass a valid issue_id")
if (
attrs.get("estimate_point")
and not EstimatePoint.objects.filter(
project_id=self.context.get("project_id"),
pk=attrs.get("estimate_point").id,
).exists()
):
raise serializers.ValidationError("Estimate point is not valid please pass a valid estimate_point_id")
return attrs
def create(self, validated_data):
assignees = validated_data.pop("assignee_ids", None)
labels = validated_data.pop("label_ids", None)
modules = validated_data.pop("module_ids", None)
cycle_id = self.initial_data.get("cycle_id", None)
modules = self.initial_data.get("module_ids", None)
workspace_id = self.context["workspace_id"]
project_id = self.context["project_id"]
# Create Issue
issue = DraftIssue.objects.create(**validated_data, workspace_id=workspace_id, project_id=project_id)
# Issue Audit Users
created_by_id = issue.created_by_id
updated_by_id = issue.updated_by_id
if assignees is not None and len(assignees):
DraftIssueAssignee.objects.bulk_create(
[
DraftIssueAssignee(
assignee_id=assignee_id,
draft_issue=issue,
workspace_id=workspace_id,
project_id=project_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for assignee_id in assignees
],
batch_size=10,
)
if labels is not None and len(labels):
DraftIssueLabel.objects.bulk_create(
[
DraftIssueLabel(
label_id=label_id,
draft_issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for label_id in labels
],
batch_size=10,
)
if cycle_id is not None:
DraftIssueCycle.objects.create(
cycle_id=cycle_id,
draft_issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
if modules is not None and len(modules):
DraftIssueModule.objects.bulk_create(
[
DraftIssueModule(
module_id=module_id,
draft_issue=issue,
project_id=project_id,
workspace_id=workspace_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for module_id in modules
],
batch_size=10,
)
return issue
def update(self, instance, validated_data):
assignees = validated_data.pop("assignee_ids", None)
labels = validated_data.pop("label_ids", None)
cycle_id = self.context.get("cycle_id", None)
modules = self.initial_data.get("module_ids", None)
# Related models
workspace_id = instance.workspace_id
project_id = instance.project_id
created_by_id = instance.created_by_id
updated_by_id = instance.updated_by_id
if assignees is not None:
DraftIssueAssignee.objects.filter(draft_issue=instance).delete()
DraftIssueAssignee.objects.bulk_create(
[
DraftIssueAssignee(
assignee_id=assignee_id,
draft_issue=instance,
workspace_id=workspace_id,
project_id=project_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for assignee_id in assignees
],
batch_size=10,
)
if labels is not None:
DraftIssueLabel.objects.filter(draft_issue=instance).delete()
DraftIssueLabel.objects.bulk_create(
[
DraftIssueLabel(
label_id=label,
draft_issue=instance,
workspace_id=workspace_id,
project_id=project_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for label in labels
],
batch_size=10,
)
if cycle_id != "not_provided":
DraftIssueCycle.objects.filter(draft_issue=instance).delete()
if cycle_id:
DraftIssueCycle.objects.create(
cycle_id=cycle_id,
draft_issue=instance,
workspace_id=workspace_id,
project_id=project_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
if modules is not None:
DraftIssueModule.objects.filter(draft_issue=instance).delete()
DraftIssueModule.objects.bulk_create(
[
DraftIssueModule(
module_id=module_id,
draft_issue=instance,
workspace_id=workspace_id,
project_id=project_id,
created_by_id=created_by_id,
updated_by_id=updated_by_id,
)
for module_id in modules
],
batch_size=10,
)
# Time updation occurs even when other related models are updated
instance.updated_at = timezone.now()
return super().update(instance, validated_data)
class DraftIssueSerializer(BaseSerializer):
# ids
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
module_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
# Many to many
label_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
assignee_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
class Meta:
model = DraftIssue
fields = [
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"created_at",
"updated_at",
"created_by",
"updated_by",
"type_id",
"description_html",
]
read_only_fields = fields
class DraftIssueDetailSerializer(DraftIssueSerializer):
description_html = serializers.CharField()
class Meta(DraftIssueSerializer.Meta):
fields = DraftIssueSerializer.Meta.fields + ["description_html"]
read_only_fields = fields

View File

@@ -0,0 +1,46 @@
# Module imports
from .base import BaseSerializer
from plane.db.models import Estimate, EstimatePoint
from rest_framework import serializers
class EstimateSerializer(BaseSerializer):
class Meta:
model = Estimate
fields = "__all__"
read_only_fields = ["workspace", "project"]
class EstimatePointSerializer(BaseSerializer):
def validate(self, data):
if not data:
raise serializers.ValidationError("Estimate points are required")
value = data.get("value")
if value and len(value) > 20:
raise serializers.ValidationError("Value can't be more than 20 characters")
return data
class Meta:
model = EstimatePoint
fields = "__all__"
read_only_fields = ["estimate", "workspace", "project"]
class EstimateReadSerializer(BaseSerializer):
points = EstimatePointSerializer(read_only=True, many=True)
class Meta:
model = Estimate
fields = "__all__"
read_only_fields = ["points", "name", "description"]
class WorkspaceEstimateSerializer(BaseSerializer):
points = EstimatePointSerializer(read_only=True, many=True)
class Meta:
model = Estimate
fields = "__all__"
read_only_fields = ["points", "name", "description"]

View File

@@ -0,0 +1,26 @@
# Module imports
from .base import BaseSerializer
from plane.db.models import ExporterHistory
from .user import UserLiteSerializer
class ExporterHistorySerializer(BaseSerializer):
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
class Meta:
model = ExporterHistory
fields = [
"id",
"created_at",
"updated_at",
"project",
"provider",
"status",
"url",
"initiated_by",
"initiated_by_detail",
"token",
"created_by",
"updated_by",
]
read_only_fields = fields

View File

@@ -0,0 +1,85 @@
from rest_framework import serializers
from plane.db.models import UserFavorite, Cycle, Module, Issue, IssueView, Page, Project
class ProjectFavoriteLiteSerializer(serializers.ModelSerializer):
class Meta:
model = Project
fields = ["id", "name", "logo_props"]
class PageFavoriteLiteSerializer(serializers.ModelSerializer):
project_id = serializers.SerializerMethodField()
class Meta:
model = Page
fields = ["id", "name", "logo_props", "project_id"]
def get_project_id(self, obj):
project = obj.projects.first() # This gets the first project related to the Page
return project.id if project else None
class CycleFavoriteLiteSerializer(serializers.ModelSerializer):
class Meta:
model = Cycle
fields = ["id", "name", "logo_props", "project_id"]
class ModuleFavoriteLiteSerializer(serializers.ModelSerializer):
class Meta:
model = Module
fields = ["id", "name", "logo_props", "project_id"]
class ViewFavoriteSerializer(serializers.ModelSerializer):
class Meta:
model = IssueView
fields = ["id", "name", "logo_props", "project_id"]
def get_entity_model_and_serializer(entity_type):
entity_map = {
"cycle": (Cycle, CycleFavoriteLiteSerializer),
"issue": (Issue, None),
"module": (Module, ModuleFavoriteLiteSerializer),
"view": (IssueView, ViewFavoriteSerializer),
"page": (Page, PageFavoriteLiteSerializer),
"project": (Project, ProjectFavoriteLiteSerializer),
"folder": (None, None),
}
return entity_map.get(entity_type, (None, None))
class UserFavoriteSerializer(serializers.ModelSerializer):
entity_data = serializers.SerializerMethodField()
class Meta:
model = UserFavorite
fields = [
"id",
"entity_type",
"entity_identifier",
"entity_data",
"name",
"is_folder",
"sequence",
"parent",
"workspace_id",
"project_id",
]
read_only_fields = ["workspace", "created_by", "updated_by"]
def get_entity_data(self, obj):
entity_type = obj.entity_type
entity_identifier = obj.entity_identifier
entity_model, entity_serializer = get_entity_model_and_serializer(entity_type)
if entity_model and entity_serializer:
try:
entity = entity_model.objects.get(pk=entity_identifier)
return entity_serializer(entity).data
except entity_model.DoesNotExist:
return None
return None

View File

@@ -0,0 +1,16 @@
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from plane.db.models import Importer
class ImporterSerializer(BaseSerializer):
initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
class Meta:
model = Importer
fields = "__all__"

View File

@@ -0,0 +1,90 @@
# Third party frameworks
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .issue import IssueIntakeSerializer, LabelLiteSerializer, IssueDetailSerializer
from .project import ProjectLiteSerializer
from .state import StateLiteSerializer
from .user import UserLiteSerializer
from plane.db.models import Intake, IntakeIssue, Issue
class IntakeSerializer(BaseSerializer):
project_detail = ProjectLiteSerializer(source="project", read_only=True)
pending_issue_count = serializers.IntegerField(read_only=True)
class Meta:
model = Intake
fields = "__all__"
read_only_fields = ["project", "workspace"]
class IntakeIssueSerializer(BaseSerializer):
issue = IssueIntakeSerializer(read_only=True)
class Meta:
model = IntakeIssue
fields = [
"id",
"status",
"duplicate_to",
"snoozed_till",
"source",
"issue",
"created_by",
]
read_only_fields = ["project", "workspace"]
def to_representation(self, instance):
# Pass the annotated fields to the Issue instance if they exist
if hasattr(instance, "label_ids"):
instance.issue.label_ids = instance.label_ids
return super().to_representation(instance)
class IntakeIssueDetailSerializer(BaseSerializer):
issue = IssueDetailSerializer(read_only=True)
duplicate_issue_detail = IssueIntakeSerializer(read_only=True, source="duplicate_to")
class Meta:
model = IntakeIssue
fields = [
"id",
"status",
"duplicate_to",
"snoozed_till",
"duplicate_issue_detail",
"source",
"issue",
]
read_only_fields = ["project", "workspace"]
def to_representation(self, instance):
# Pass the annotated fields to the Issue instance if they exist
if hasattr(instance, "assignee_ids"):
instance.issue.assignee_ids = instance.assignee_ids
if hasattr(instance, "label_ids"):
instance.issue.label_ids = instance.label_ids
return super().to_representation(instance)
class IntakeIssueLiteSerializer(BaseSerializer):
class Meta:
model = IntakeIssue
fields = ["id", "status", "duplicate_to", "snoozed_till", "source"]
read_only_fields = fields
class IssueStateIntakeSerializer(BaseSerializer):
state_detail = StateLiteSerializer(read_only=True, source="state")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
sub_issues_count = serializers.IntegerField(read_only=True)
issue_intake = IntakeIssueLiteSerializer(read_only=True, many=True)
class Meta:
model = Issue
fields = "__all__"

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,276 @@
# Third Party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer, DynamicBaseSerializer
from .project import ProjectLiteSerializer
# Django imports
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
from plane.db.models import (
User,
Module,
ModuleMember,
ModuleIssue,
ModuleLink,
ModuleUserProperties,
)
class ModuleWriteSerializer(BaseSerializer):
lead_id = serializers.PrimaryKeyRelatedField(
source="lead", queryset=User.objects.all(), required=False, allow_null=True
)
member_ids = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
)
class Meta:
model = Module
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"archived_at",
"deleted_at",
]
def to_representation(self, instance):
data = super().to_representation(instance)
data["member_ids"] = [str(member.id) for member in instance.members.all()]
return data
def validate(self, data):
if (
data.get("start_date", None) is not None
and data.get("target_date", None) is not None
and data.get("start_date", None) > data.get("target_date", None)
):
raise serializers.ValidationError("Start date cannot exceed target date")
return data
def create(self, validated_data):
members = validated_data.pop("member_ids", None)
project = self.context["project"]
module_name = validated_data.get("name")
if module_name:
# Lookup for the module name in the module table for that project
if Module.objects.filter(name=module_name, project=project).exists():
raise serializers.ValidationError({"error": "Module with this name already exists"})
module = Module.objects.create(**validated_data, project=project)
if members is not None:
ModuleMember.objects.bulk_create(
[
ModuleMember(
module=module,
member=member,
project=project,
workspace=project.workspace,
created_by=module.created_by,
updated_by=module.updated_by,
)
for member in members
],
batch_size=10,
ignore_conflicts=True,
)
return module
def update(self, instance, validated_data):
members = validated_data.pop("member_ids", None)
module_name = validated_data.get("name")
if module_name:
# Lookup for the module name in the module table for that project
if Module.objects.filter(name=module_name, project=instance.project).exclude(id=instance.id).exists():
raise serializers.ValidationError({"error": "Module with this name already exists"})
if members is not None:
ModuleMember.objects.filter(module=instance).delete()
ModuleMember.objects.bulk_create(
[
ModuleMember(
module=instance,
member=member,
project=instance.project,
workspace=instance.project.workspace,
created_by=instance.created_by,
updated_by=instance.updated_by,
)
for member in members
],
batch_size=10,
ignore_conflicts=True,
)
return super().update(instance, validated_data)
class ModuleFlatSerializer(BaseSerializer):
class Meta:
model = Module
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class ModuleIssueSerializer(BaseSerializer):
module_detail = ModuleFlatSerializer(read_only=True, source="module")
issue_detail = ProjectLiteSerializer(read_only=True, source="issue")
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
model = ModuleIssue
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"module",
]
class ModuleLinkSerializer(BaseSerializer):
class Meta:
model = ModuleLink
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"created_by",
"updated_by",
"created_at",
"updated_at",
"module",
]
def to_internal_value(self, data):
# Modify the URL before validation by appending http:// if missing
url = data.get("url", "")
if url and not url.startswith(("http://", "https://")):
data["url"] = "http://" + url
return super().to_internal_value(data)
def validate_url(self, value):
# Use Django's built-in URLValidator for validation
url_validator = URLValidator()
try:
url_validator(value)
except ValidationError:
raise serializers.ValidationError({"error": "Invalid URL format."})
return value
def create(self, validated_data):
validated_data["url"] = self.validate_url(validated_data.get("url"))
if ModuleLink.objects.filter(url=validated_data.get("url"), module_id=validated_data.get("module_id")).exists():
raise serializers.ValidationError({"error": "URL already exists."})
return super().create(validated_data)
def update(self, instance, validated_data):
validated_data["url"] = self.validate_url(validated_data.get("url"))
if (
ModuleLink.objects.filter(url=validated_data.get("url"), module_id=instance.module_id)
.exclude(pk=instance.id)
.exists()
):
raise serializers.ValidationError({"error": "URL already exists for this Issue"})
return super().update(instance, validated_data)
class ModuleSerializer(DynamicBaseSerializer):
member_ids = serializers.ListField(child=serializers.UUIDField(), required=False, allow_null=True)
is_favorite = serializers.BooleanField(read_only=True)
total_issues = serializers.IntegerField(read_only=True)
cancelled_issues = serializers.IntegerField(read_only=True)
completed_issues = serializers.IntegerField(read_only=True)
started_issues = serializers.IntegerField(read_only=True)
unstarted_issues = serializers.IntegerField(read_only=True)
backlog_issues = serializers.IntegerField(read_only=True)
total_estimate_points = serializers.FloatField(read_only=True)
completed_estimate_points = serializers.FloatField(read_only=True)
class Meta:
model = Module
fields = [
# Required fields
"id",
"workspace_id",
"project_id",
# Model fields
"name",
"description",
"description_text",
"description_html",
"start_date",
"target_date",
"status",
"lead_id",
"member_ids",
"view_props",
"sort_order",
"external_source",
"external_id",
"logo_props",
# computed fields
"total_estimate_points",
"completed_estimate_points",
"is_favorite",
"total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
"created_at",
"updated_at",
"archived_at",
]
read_only_fields = fields
class ModuleDetailSerializer(ModuleSerializer):
link_module = ModuleLinkSerializer(read_only=True, many=True)
sub_issues = serializers.IntegerField(read_only=True)
backlog_estimate_points = serializers.FloatField(read_only=True)
unstarted_estimate_points = serializers.FloatField(read_only=True)
started_estimate_points = serializers.FloatField(read_only=True)
cancelled_estimate_points = serializers.FloatField(read_only=True)
class Meta(ModuleSerializer.Meta):
fields = ModuleSerializer.Meta.fields + [
"link_module",
"sub_issues",
"backlog_estimate_points",
"unstarted_estimate_points",
"started_estimate_points",
"cancelled_estimate_points",
]
class ModuleUserPropertiesSerializer(BaseSerializer):
class Meta:
model = ModuleUserProperties
fields = "__all__"
read_only_fields = ["workspace", "project", "module", "user"]

View File

@@ -0,0 +1,24 @@
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from plane.db.models import Notification, UserNotificationPreference
# Third Party imports
from rest_framework import serializers
class NotificationSerializer(BaseSerializer):
triggered_by_details = UserLiteSerializer(read_only=True, source="triggered_by")
is_inbox_issue = serializers.BooleanField(read_only=True)
is_intake_issue = serializers.BooleanField(read_only=True)
is_mentioned_notification = serializers.BooleanField(read_only=True)
class Meta:
model = Notification
fields = "__all__"
class UserNotificationPreferenceSerializer(BaseSerializer):
class Meta:
model = UserNotificationPreference
fields = "__all__"

View File

@@ -0,0 +1,221 @@
# Third party imports
from rest_framework import serializers
import base64
# Module imports
from .base import BaseSerializer
from plane.utils.content_validator import (
validate_binary_data,
validate_html_content,
)
from plane.db.models import (
Page,
PageLabel,
Label,
ProjectPage,
Project,
PageVersion,
)
class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
)
# Many to many
label_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
project_ids = serializers.ListField(child=serializers.UUIDField(), required=False)
class Meta:
model = Page
fields = [
"id",
"name",
"owned_by",
"access",
"color",
"labels",
"parent",
"is_favorite",
"is_locked",
"archived_at",
"workspace",
"created_at",
"updated_at",
"created_by",
"updated_by",
"view_props",
"logo_props",
"label_ids",
"project_ids",
]
read_only_fields = ["workspace", "owned_by"]
def create(self, validated_data):
labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"]
description = self.context["description"]
description_binary = self.context["description_binary"]
description_html = self.context["description_html"]
# Get the workspace id from the project
project = Project.objects.get(pk=project_id)
# Create the page
page = Page.objects.create(
**validated_data,
description=description,
description_binary=description_binary,
description_html=description_html,
owned_by_id=owned_by_id,
workspace_id=project.workspace_id,
)
# Create the project page
ProjectPage.objects.create(
workspace_id=page.workspace_id,
project_id=project_id,
page_id=page.id,
created_by_id=page.created_by_id,
updated_by_id=page.updated_by_id,
)
# Create page labels
if labels is not None:
PageLabel.objects.bulk_create(
[
PageLabel(
label=label,
page=page,
workspace_id=page.workspace_id,
created_by_id=page.created_by_id,
updated_by_id=page.updated_by_id,
)
for label in labels
],
batch_size=10,
)
return page
def update(self, instance, validated_data):
labels = validated_data.pop("labels", None)
if labels is not None:
PageLabel.objects.filter(page=instance).delete()
PageLabel.objects.bulk_create(
[
PageLabel(
label=label,
page=instance,
workspace_id=instance.workspace_id,
created_by_id=instance.created_by_id,
updated_by_id=instance.updated_by_id,
)
for label in labels
],
batch_size=10,
)
return super().update(instance, validated_data)
class PageDetailSerializer(PageSerializer):
description_html = serializers.CharField()
class Meta(PageSerializer.Meta):
fields = PageSerializer.Meta.fields + ["description_html"]
class PageVersionSerializer(BaseSerializer):
class Meta:
model = PageVersion
fields = [
"id",
"workspace",
"page",
"last_saved_at",
"owned_by",
"created_at",
"updated_at",
"created_by",
"updated_by",
]
read_only_fields = ["workspace", "page"]
class PageVersionDetailSerializer(BaseSerializer):
class Meta:
model = PageVersion
fields = [
"id",
"workspace",
"page",
"last_saved_at",
"description_binary",
"description_html",
"description_json",
"owned_by",
"created_at",
"updated_at",
"created_by",
"updated_by",
]
read_only_fields = ["workspace", "page"]
class PageBinaryUpdateSerializer(serializers.Serializer):
"""Serializer for updating page binary description with validation"""
description_binary = serializers.CharField(required=False, allow_blank=True)
description_html = serializers.CharField(required=False, allow_blank=True)
description = serializers.JSONField(required=False, allow_null=True)
def validate_description_binary(self, value):
"""Validate the base64-encoded binary data"""
if not value:
return value
try:
# Decode the base64 data
binary_data = base64.b64decode(value)
# Validate the binary data
is_valid, error_message = validate_binary_data(binary_data)
if not is_valid:
raise serializers.ValidationError(f"Invalid binary data: {error_message}")
return binary_data
except Exception as e:
if isinstance(e, serializers.ValidationError):
raise
raise serializers.ValidationError("Failed to decode base64 data")
def validate_description_html(self, value):
"""Validate the HTML content"""
if not value:
return value
# Use the validation function from utils
is_valid, error_message, sanitized_html = validate_html_content(value)
if not is_valid:
raise serializers.ValidationError(error_message)
# Return sanitized HTML if available, otherwise return original
return sanitized_html if sanitized_html is not None else value
def update(self, instance, validated_data):
"""Update the page instance with validated data"""
if "description_binary" in validated_data:
instance.description_binary = validated_data.get("description_binary")
if "description_html" in validated_data:
instance.description_html = validated_data.get("description_html")
if "description" in validated_data:
instance.description = validated_data.get("description")
instance.save()
return instance

View File

@@ -0,0 +1,203 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer, DynamicBaseSerializer
from plane.app.serializers.workspace import WorkspaceLiteSerializer
from plane.app.serializers.user import UserLiteSerializer, UserAdminLiteSerializer
from plane.db.models import (
Project,
ProjectMember,
ProjectMemberInvite,
ProjectIdentifier,
DeployBoard,
ProjectPublicMember,
)
from plane.utils.content_validator import (
validate_html_content,
)
class ProjectSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(source="workspace", read_only=True)
inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
class Meta:
model = Project
fields = "__all__"
read_only_fields = ["workspace", "deleted_at"]
def validate_name(self, name):
project_id = self.instance.id if self.instance else None
workspace_id = self.context["workspace_id"]
project = Project.objects.filter(name=name, workspace_id=workspace_id)
if project_id:
project = project.exclude(id=project_id)
if project.exists():
raise serializers.ValidationError(
detail="PROJECT_NAME_ALREADY_EXIST",
)
return name
def validate_identifier(self, identifier):
project_id = self.instance.id if self.instance else None
workspace_id = self.context["workspace_id"]
project = Project.objects.filter(identifier=identifier, workspace_id=workspace_id)
if project_id:
project = project.exclude(id=project_id)
if project.exists():
raise serializers.ValidationError(
detail="PROJECT_IDENTIFIER_ALREADY_EXIST",
)
return identifier
def validate(self, data):
# Validate description content for security
if "description_html" in data and data["description_html"]:
is_valid, error_msg, sanitized_html = validate_html_content(str(data["description_html"]))
# Update the data with sanitized HTML if available
if sanitized_html is not None:
data["description_html"] = sanitized_html
if not is_valid:
raise serializers.ValidationError({"error": "html content is not valid"})
return data
def create(self, validated_data):
workspace_id = self.context["workspace_id"]
project = Project.objects.create(**validated_data, workspace_id=workspace_id)
ProjectIdentifier.objects.create(name=project.identifier, project=project, workspace_id=workspace_id)
return project
class ProjectLiteSerializer(BaseSerializer):
class Meta:
model = Project
fields = [
"id",
"identifier",
"name",
"cover_image",
"cover_image_url",
"logo_props",
"description",
]
read_only_fields = fields
class ProjectListSerializer(DynamicBaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True)
anchor = serializers.CharField(read_only=True)
members = serializers.SerializerMethodField()
cover_image_url = serializers.CharField(read_only=True)
inbox_view = serializers.BooleanField(read_only=True, source="intake_view")
def get_members(self, obj):
project_members = getattr(obj, "members_list", None)
if project_members is not None:
# Filter members by the project ID
return [member.member_id for member in project_members if member.is_active and not member.member.is_bot]
return []
class Meta:
model = Project
fields = "__all__"
class ProjectDetailSerializer(BaseSerializer):
# workspace = WorkSpaceSerializer(read_only=True)
default_assignee = UserLiteSerializer(read_only=True)
project_lead = UserLiteSerializer(read_only=True)
is_favorite = serializers.BooleanField(read_only=True)
sort_order = serializers.FloatField(read_only=True)
member_role = serializers.IntegerField(read_only=True)
anchor = serializers.CharField(read_only=True)
class Meta:
model = Project
fields = "__all__"
class ProjectMemberSerializer(BaseSerializer):
workspace = WorkspaceLiteSerializer(read_only=True)
project = ProjectLiteSerializer(read_only=True)
member = UserLiteSerializer(read_only=True)
class Meta:
model = ProjectMember
fields = "__all__"
class ProjectMemberAdminSerializer(BaseSerializer):
workspace = WorkspaceLiteSerializer(read_only=True)
project = ProjectLiteSerializer(read_only=True)
member = UserAdminLiteSerializer(read_only=True)
class Meta:
model = ProjectMember
fields = "__all__"
class ProjectMemberRoleSerializer(DynamicBaseSerializer):
original_role = serializers.IntegerField(source="role", read_only=True)
class Meta:
model = ProjectMember
fields = ("id", "role", "member", "project", "original_role", "created_at")
read_only_fields = ["original_role", "created_at"]
class ProjectMemberInviteSerializer(BaseSerializer):
project = ProjectLiteSerializer(read_only=True)
workspace = WorkspaceLiteSerializer(read_only=True)
class Meta:
model = ProjectMemberInvite
fields = "__all__"
class ProjectIdentifierSerializer(BaseSerializer):
class Meta:
model = ProjectIdentifier
fields = "__all__"
class ProjectMemberLiteSerializer(BaseSerializer):
member = UserLiteSerializer(read_only=True)
is_subscribed = serializers.BooleanField(read_only=True)
class Meta:
model = ProjectMember
fields = ["member", "id", "is_subscribed"]
read_only_fields = fields
class DeployBoardSerializer(BaseSerializer):
project_details = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
class Meta:
model = DeployBoard
fields = "__all__"
read_only_fields = ["workspace", "project", "anchor"]
class ProjectPublicMemberSerializer(BaseSerializer):
class Meta:
model = ProjectPublicMember
fields = "__all__"
read_only_fields = ["workspace", "project", "member"]

View File

@@ -0,0 +1,32 @@
# Module imports
from .base import BaseSerializer
from rest_framework import serializers
from plane.db.models import State
class StateSerializer(BaseSerializer):
order = serializers.FloatField(required=False)
class Meta:
model = State
fields = [
"id",
"project_id",
"workspace_id",
"name",
"color",
"group",
"default",
"description",
"sequence",
"order",
]
read_only_fields = ["workspace", "project"]
class StateLiteSerializer(BaseSerializer):
class Meta:
model = State
fields = ["id", "name", "color", "group"]
read_only_fields = fields

View File

@@ -0,0 +1,207 @@
# Third party imports
from rest_framework import serializers
# Module import
from plane.db.models import Account, Profile, User, Workspace, WorkspaceMemberInvite
from plane.utils.url import contains_url
from .base import BaseSerializer
class UserSerializer(BaseSerializer):
def validate_first_name(self, value):
if contains_url(value):
raise serializers.ValidationError("First name cannot contain a URL.")
return value
def validate_last_name(self, value):
if contains_url(value):
raise serializers.ValidationError("Last name cannot contain a URL.")
return value
class Meta:
model = User
# Exclude password field from the serializer
fields = [field.name for field in User._meta.fields if field.name != "password"]
# Make all system fields and email read only
read_only_fields = [
"id",
"username",
"mobile_number",
"email",
"token",
"created_at",
"updated_at",
"is_superuser",
"is_staff",
"is_managed",
"last_active",
"last_login_time",
"last_logout_time",
"last_login_ip",
"last_logout_ip",
"last_login_uagent",
"last_location",
"last_login_medium",
"created_location",
"is_bot",
"is_password_autoset",
"is_email_verified",
"is_active",
"token_updated_at",
]
# If the user has already filled first name or last name then he is onboarded
def get_is_onboarded(self, obj):
return bool(obj.first_name) or bool(obj.last_name)
class UserMeSerializer(BaseSerializer):
class Meta:
model = User
fields = [
"id",
"avatar",
"cover_image",
"avatar_url",
"cover_image_url",
"date_joined",
"display_name",
"email",
"first_name",
"last_name",
"is_active",
"is_bot",
"is_email_verified",
"user_timezone",
"username",
"is_password_autoset",
"is_email_verified",
"last_login_medium",
]
read_only_fields = fields
class UserMeSettingsSerializer(BaseSerializer):
workspace = serializers.SerializerMethodField()
class Meta:
model = User
fields = ["id", "email", "workspace"]
read_only_fields = fields
def get_workspace(self, obj):
workspace_invites = WorkspaceMemberInvite.objects.filter(email=obj.email).count()
# profile
profile = Profile.objects.get(user=obj)
if (
profile.last_workspace_id is not None
and Workspace.objects.filter(
pk=profile.last_workspace_id,
workspace_member__member=obj.id,
workspace_member__is_active=True,
).exists()
):
workspace = Workspace.objects.filter(
pk=profile.last_workspace_id,
workspace_member__member=obj.id,
workspace_member__is_active=True,
).first()
logo_asset_url = workspace.logo_asset.asset_url if workspace.logo_asset is not None else ""
return {
"last_workspace_id": profile.last_workspace_id,
"last_workspace_slug": (workspace.slug if workspace is not None else ""),
"last_workspace_name": (workspace.name if workspace is not None else ""),
"last_workspace_logo": (logo_asset_url),
"fallback_workspace_id": profile.last_workspace_id,
"fallback_workspace_slug": (workspace.slug if workspace is not None else ""),
"invites": workspace_invites,
}
else:
fallback_workspace = (
Workspace.objects.filter(workspace_member__member_id=obj.id, workspace_member__is_active=True)
.order_by("created_at")
.first()
)
return {
"last_workspace_id": None,
"last_workspace_slug": None,
"fallback_workspace_id": (fallback_workspace.id if fallback_workspace is not None else None),
"fallback_workspace_slug": (fallback_workspace.slug if fallback_workspace is not None else None),
"invites": workspace_invites,
}
class UserLiteSerializer(BaseSerializer):
class Meta:
model = User
fields = [
"id",
"first_name",
"last_name",
"avatar",
"avatar_url",
"is_bot",
"display_name",
]
read_only_fields = ["id", "is_bot"]
class UserAdminLiteSerializer(BaseSerializer):
class Meta:
model = User
fields = [
"id",
"first_name",
"last_name",
"avatar",
"avatar_url",
"is_bot",
"display_name",
"email",
"last_login_medium",
]
read_only_fields = ["id", "is_bot"]
class ChangePasswordSerializer(serializers.Serializer):
model = User
"""
Serializer for password change endpoint.
"""
old_password = serializers.CharField(required=True)
new_password = serializers.CharField(required=True, min_length=8)
confirm_password = serializers.CharField(required=True, min_length=8)
def validate(self, data):
if data.get("old_password") == data.get("new_password"):
raise serializers.ValidationError({"error": "New password cannot be same as old password."})
if data.get("new_password") != data.get("confirm_password"):
raise serializers.ValidationError({"error": "Confirm password should be same as the new password."})
return data
class ResetPasswordSerializer(serializers.Serializer):
"""
Serializer for password change endpoint.
"""
new_password = serializers.CharField(required=True, min_length=8)
class ProfileSerializer(BaseSerializer):
class Meta:
model = Profile
fields = "__all__"
read_only_fields = ["user"]
class AccountSerializer(BaseSerializer):
class Meta:
model = Account
fields = "__all__"
read_only_fields = ["user"]

View File

@@ -0,0 +1,82 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import DynamicBaseSerializer
from plane.db.models import IssueView
from plane.utils.issue_filters import issue_filters
class ViewIssueListSerializer(serializers.Serializer):
def get_assignee_ids(self, instance):
return [assignee.assignee_id for assignee in instance.issue_assignee.all()]
def get_label_ids(self, instance):
return [label.label_id for label in instance.label_issue.all()]
def get_module_ids(self, instance):
return [module.module_id for module in instance.issue_module.all()]
def to_representation(self, instance):
data = {
"id": instance.id,
"name": instance.name,
"state_id": instance.state_id,
"sort_order": instance.sort_order,
"completed_at": instance.completed_at,
"estimate_point": instance.estimate_point_id,
"priority": instance.priority,
"start_date": instance.start_date,
"target_date": instance.target_date,
"sequence_id": instance.sequence_id,
"project_id": instance.project_id,
"parent_id": instance.parent_id,
"cycle_id": instance.cycle_id,
"sub_issues_count": instance.sub_issues_count,
"created_at": instance.created_at,
"updated_at": instance.updated_at,
"created_by": instance.created_by_id,
"updated_by": instance.updated_by_id,
"attachment_count": instance.attachment_count,
"link_count": instance.link_count,
"is_draft": instance.is_draft,
"archived_at": instance.archived_at,
"state__group": instance.state.group if instance.state else None,
"assignee_ids": self.get_assignee_ids(instance),
"label_ids": self.get_label_ids(instance),
"module_ids": self.get_module_ids(instance),
}
return data
class IssueViewSerializer(DynamicBaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
class Meta:
model = IssueView
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"query",
"owned_by",
"access",
"is_locked",
]
def create(self, validated_data):
query_params = validated_data.get("filters", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = {}
return IssueView.objects.create(**validated_data)
def update(self, instance, validated_data):
query_params = validated_data.get("filters", {})
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@@ -0,0 +1,98 @@
# Python imports
import socket
import ipaddress
from urllib.parse import urlparse
# Third party imports
from rest_framework import serializers
# Module imports
from .base import DynamicBaseSerializer
from plane.db.models import Webhook, WebhookLog
from plane.db.models.webhook import validate_domain, validate_schema
class WebhookSerializer(DynamicBaseSerializer):
url = serializers.URLField(validators=[validate_schema, validate_domain])
def create(self, validated_data):
url = validated_data.get("url", None)
# Extract the hostname from the URL
hostname = urlparse(url).hostname
if not hostname:
raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
# Resolve the hostname to IP addresses
try:
ip_addresses = socket.getaddrinfo(hostname, None)
except socket.gaierror:
raise serializers.ValidationError({"url": "Hostname could not be resolved."})
if not ip_addresses:
raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
for addr in ip_addresses:
ip = ipaddress.ip_address(addr[4][0])
if ip.is_loopback:
raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
# Additional validation for multiple request domains and their subdomains
request = self.context.get("request")
disallowed_domains = ["plane.so"] # Add your disallowed domains here
if request:
request_host = request.get_host().split(":")[0] # Remove port if present
disallowed_domains.append(request_host)
# Check if hostname is a subdomain or exact match of any disallowed domain
if any(hostname == domain or hostname.endswith("." + domain) for domain in disallowed_domains):
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
return Webhook.objects.create(**validated_data)
def update(self, instance, validated_data):
url = validated_data.get("url", None)
if url:
# Extract the hostname from the URL
hostname = urlparse(url).hostname
if not hostname:
raise serializers.ValidationError({"url": "Invalid URL: No hostname found."})
# Resolve the hostname to IP addresses
try:
ip_addresses = socket.getaddrinfo(hostname, None)
except socket.gaierror:
raise serializers.ValidationError({"url": "Hostname could not be resolved."})
if not ip_addresses:
raise serializers.ValidationError({"url": "No IP addresses found for the hostname."})
for addr in ip_addresses:
ip = ipaddress.ip_address(addr[4][0])
if ip.is_loopback:
raise serializers.ValidationError({"url": "URL resolves to a blocked IP address."})
# Additional validation for multiple request domains and their subdomains
request = self.context.get("request")
disallowed_domains = ["plane.so"] # Add your disallowed domains here
if request:
request_host = request.get_host().split(":")[0] # Remove port if present
disallowed_domains.append(request_host)
# Check if hostname is a subdomain or exact match of any disallowed domain
if any(hostname == domain or hostname.endswith("." + domain) for domain in disallowed_domains):
raise serializers.ValidationError({"url": "URL domain or its subdomain is not allowed."})
return super().update(instance, validated_data)
class Meta:
model = Webhook
fields = "__all__"
read_only_fields = ["workspace", "secret_key", "deleted_at"]
class WebhookLogSerializer(DynamicBaseSerializer):
class Meta:
model = WebhookLog
fields = "__all__"
read_only_fields = ["workspace", "webhook"]

View File

@@ -0,0 +1,329 @@
# Third party imports
from rest_framework import serializers
# Module imports
from .base import BaseSerializer, DynamicBaseSerializer
from .user import UserLiteSerializer, UserAdminLiteSerializer
from plane.db.models import (
Workspace,
WorkspaceMember,
WorkspaceMemberInvite,
WorkspaceTheme,
WorkspaceUserProperties,
WorkspaceUserLink,
UserRecentVisit,
Issue,
Page,
Project,
ProjectMember,
WorkspaceHomePreference,
Sticky,
WorkspaceUserPreference,
)
from plane.utils.constants import RESTRICTED_WORKSPACE_SLUGS
from plane.utils.url import contains_url
from plane.utils.content_validator import (
validate_html_content,
validate_binary_data,
)
# Django imports
from django.core.validators import URLValidator
from django.core.exceptions import ValidationError
import re
class WorkSpaceSerializer(DynamicBaseSerializer):
total_members = serializers.IntegerField(read_only=True)
logo_url = serializers.CharField(read_only=True)
role = serializers.IntegerField(read_only=True)
def validate_name(self, value):
# Check if the name contains a URL
if contains_url(value):
raise serializers.ValidationError("Name must not contain URLs")
return value
def validate_slug(self, value):
# Check if the slug is restricted
if value in RESTRICTED_WORKSPACE_SLUGS:
raise serializers.ValidationError("Slug is not valid")
# Slug should only contain alphanumeric characters, hyphens, and underscores
if not re.match(r"^[a-zA-Z0-9_-]+$", value):
raise serializers.ValidationError(
"Slug can only contain letters, numbers, hyphens (-), and underscores (_)"
)
return value
class Meta:
model = Workspace
fields = "__all__"
read_only_fields = [
"id",
"created_by",
"updated_by",
"created_at",
"updated_at",
"owner",
"logo_url",
]
class WorkspaceLiteSerializer(BaseSerializer):
class Meta:
model = Workspace
fields = ["name", "slug", "id", "logo_url"]
read_only_fields = fields
class WorkSpaceMemberSerializer(DynamicBaseSerializer):
member = UserLiteSerializer(read_only=True)
class Meta:
model = WorkspaceMember
fields = "__all__"
class WorkspaceMemberMeSerializer(BaseSerializer):
draft_issue_count = serializers.IntegerField(read_only=True)
class Meta:
model = WorkspaceMember
fields = "__all__"
class WorkspaceMemberAdminSerializer(DynamicBaseSerializer):
member = UserAdminLiteSerializer(read_only=True)
class Meta:
model = WorkspaceMember
fields = "__all__"
class WorkSpaceMemberInviteSerializer(BaseSerializer):
workspace = WorkspaceLiteSerializer(read_only=True)
invite_link = serializers.SerializerMethodField()
def get_invite_link(self, obj):
return f"/workspace-invitations/?invitation_id={obj.id}&email={obj.email}&slug={obj.workspace.slug}"
class Meta:
model = WorkspaceMemberInvite
fields = "__all__"
read_only_fields = [
"id",
"email",
"token",
"workspace",
"message",
"responded_at",
"created_at",
"updated_at",
"invite_link",
]
class WorkspaceThemeSerializer(BaseSerializer):
class Meta:
model = WorkspaceTheme
fields = "__all__"
read_only_fields = ["workspace", "actor"]
class WorkspaceUserPropertiesSerializer(BaseSerializer):
class Meta:
model = WorkspaceUserProperties
fields = "__all__"
read_only_fields = ["workspace", "user"]
class WorkspaceUserLinkSerializer(BaseSerializer):
class Meta:
model = WorkspaceUserLink
fields = "__all__"
read_only_fields = ["workspace", "owner"]
def to_internal_value(self, data):
url = data.get("url", "")
if url and not url.startswith(("http://", "https://")):
data["url"] = "http://" + url
return super().to_internal_value(data)
def validate_url(self, value):
url_validator = URLValidator()
try:
url_validator(value)
except ValidationError:
raise serializers.ValidationError({"error": "Invalid URL format."})
return value
def create(self, validated_data):
# Filtering the WorkspaceUserLink with the given url to check if the link already exists.
url = validated_data.get("url")
workspace_user_link = WorkspaceUserLink.objects.filter(
url=url,
workspace_id=validated_data.get("workspace_id"),
owner_id=validated_data.get("owner_id"),
)
if workspace_user_link.exists():
raise serializers.ValidationError({"error": "URL already exists for this workspace and owner"})
return super().create(validated_data)
def update(self, instance, validated_data):
# Filtering the WorkspaceUserLink with the given url to check if the link already exists.
url = validated_data.get("url")
workspace_user_link = WorkspaceUserLink.objects.filter(
url=url, workspace_id=instance.workspace_id, owner=instance.owner
)
if workspace_user_link.exclude(pk=instance.id).exists():
raise serializers.ValidationError({"error": "URL already exists for this workspace and owner"})
return super().update(instance, validated_data)
class IssueRecentVisitSerializer(serializers.ModelSerializer):
project_identifier = serializers.SerializerMethodField()
assignees = serializers.SerializerMethodField()
class Meta:
model = Issue
fields = [
"id",
"name",
"state",
"priority",
"assignees",
"type",
"sequence_id",
"project_id",
"project_identifier",
]
def get_project_identifier(self, obj):
project = obj.project
return project.identifier if project else None
def get_assignees(self, obj):
return list(obj.assignees.filter(issue_assignee__deleted_at__isnull=True).values_list("id", flat=True))
class ProjectRecentVisitSerializer(serializers.ModelSerializer):
project_members = serializers.SerializerMethodField()
class Meta:
model = Project
fields = ["id", "name", "logo_props", "project_members", "identifier"]
def get_project_members(self, obj):
members = ProjectMember.objects.filter(project_id=obj.id, member__is_bot=False, is_active=True).values_list(
"member", flat=True
)
return members
class PageRecentVisitSerializer(serializers.ModelSerializer):
project_id = serializers.SerializerMethodField()
project_identifier = serializers.SerializerMethodField()
class Meta:
model = Page
fields = [
"id",
"name",
"logo_props",
"project_id",
"owned_by",
"project_identifier",
]
def get_project_id(self, obj):
return obj.project_id if hasattr(obj, "project_id") else obj.projects.values_list("id", flat=True).first()
def get_project_identifier(self, obj):
project = obj.projects.first()
return project.identifier if project else None
def get_entity_model_and_serializer(entity_type):
entity_map = {
"issue": (Issue, IssueRecentVisitSerializer),
"page": (Page, PageRecentVisitSerializer),
"project": (Project, ProjectRecentVisitSerializer),
}
return entity_map.get(entity_type, (None, None))
class WorkspaceRecentVisitSerializer(BaseSerializer):
entity_data = serializers.SerializerMethodField()
class Meta:
model = UserRecentVisit
fields = ["id", "entity_name", "entity_identifier", "entity_data", "visited_at"]
read_only_fields = ["workspace", "owner", "created_by", "updated_by"]
def get_entity_data(self, obj):
entity_name = obj.entity_name
entity_identifier = obj.entity_identifier
entity_model, entity_serializer = get_entity_model_and_serializer(entity_name)
if entity_model and entity_serializer:
try:
entity = entity_model.objects.get(pk=entity_identifier)
return entity_serializer(entity).data
except entity_model.DoesNotExist:
return None
return None
class WorkspaceHomePreferenceSerializer(BaseSerializer):
class Meta:
model = WorkspaceHomePreference
fields = ["key", "is_enabled", "sort_order"]
read_only_fields = ["workspace", "created_by", "updated_by"]
class StickySerializer(BaseSerializer):
class Meta:
model = Sticky
fields = "__all__"
read_only_fields = ["workspace", "owner"]
extra_kwargs = {"name": {"required": False}}
def validate(self, data):
# Validate description content for security
if "description_html" in data and data["description_html"]:
is_valid, error_msg, sanitized_html = validate_html_content(data["description_html"])
if not is_valid:
raise serializers.ValidationError({"error": "html content is not valid"})
# Update the data with sanitized HTML if available
if sanitized_html is not None:
data["description_html"] = sanitized_html
if "description_binary" in data and data["description_binary"]:
is_valid, error_msg = validate_binary_data(data["description_binary"])
if not is_valid:
raise serializers.ValidationError({"description_binary": "Invalid binary data"})
return data
class WorkspaceUserPreferenceSerializer(BaseSerializer):
class Meta:
model = WorkspaceUserPreference
fields = ["key", "is_pinned", "sort_order"]
read_only_fields = ["workspace", "created_by", "updated_by"]

View File

@@ -0,0 +1,43 @@
from .analytic import urlpatterns as analytic_urls
from .api import urlpatterns as api_urls
from .asset import urlpatterns as asset_urls
from .cycle import urlpatterns as cycle_urls
from .estimate import urlpatterns as estimate_urls
from .external import urlpatterns as external_urls
from .intake import urlpatterns as intake_urls
from .issue import urlpatterns as issue_urls
from .module import urlpatterns as module_urls
from .notification import urlpatterns as notification_urls
from .page import urlpatterns as page_urls
from .project import urlpatterns as project_urls
from .search import urlpatterns as search_urls
from .state import urlpatterns as state_urls
from .user import urlpatterns as user_urls
from .views import urlpatterns as view_urls
from .webhook import urlpatterns as webhook_urls
from .workspace import urlpatterns as workspace_urls
from .timezone import urlpatterns as timezone_urls
from .exporter import urlpatterns as exporter_urls
urlpatterns = [
*analytic_urls,
*asset_urls,
*cycle_urls,
*estimate_urls,
*external_urls,
*intake_urls,
*issue_urls,
*module_urls,
*notification_urls,
*page_urls,
*project_urls,
*search_urls,
*state_urls,
*user_urls,
*view_urls,
*workspace_urls,
*api_urls,
*webhook_urls,
*timezone_urls,
*exporter_urls,
]

View File

@@ -0,0 +1,86 @@
from django.urls import path
from plane.app.views import (
AnalyticsEndpoint,
AnalyticViewViewset,
SavedAnalyticEndpoint,
ExportAnalyticsEndpoint,
AdvanceAnalyticsEndpoint,
AdvanceAnalyticsStatsEndpoint,
AdvanceAnalyticsChartEndpoint,
DefaultAnalyticsEndpoint,
ProjectStatsEndpoint,
ProjectAdvanceAnalyticsEndpoint,
ProjectAdvanceAnalyticsStatsEndpoint,
ProjectAdvanceAnalyticsChartEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/analytics/",
AnalyticsEndpoint.as_view(),
name="plane-analytics",
),
path(
"workspaces/<str:slug>/analytic-view/",
AnalyticViewViewset.as_view({"get": "list", "post": "create"}),
name="analytic-view",
),
path(
"workspaces/<str:slug>/analytic-view/<uuid:pk>/",
AnalyticViewViewset.as_view({"get": "retrieve", "patch": "partial_update", "delete": "destroy"}),
name="analytic-view",
),
path(
"workspaces/<str:slug>/saved-analytic-view/<uuid:analytic_id>/",
SavedAnalyticEndpoint.as_view(),
name="saved-analytic-view",
),
path(
"workspaces/<str:slug>/export-analytics/",
ExportAnalyticsEndpoint.as_view(),
name="export-analytics",
),
path(
"workspaces/<str:slug>/default-analytics/",
DefaultAnalyticsEndpoint.as_view(),
name="default-analytics",
),
path(
"workspaces/<str:slug>/project-stats/",
ProjectStatsEndpoint.as_view(),
name="project-analytics",
),
path(
"workspaces/<str:slug>/advance-analytics/",
AdvanceAnalyticsEndpoint.as_view(),
name="advance-analytics",
),
path(
"workspaces/<str:slug>/advance-analytics-stats/",
AdvanceAnalyticsStatsEndpoint.as_view(),
name="advance-analytics-stats",
),
path(
"workspaces/<str:slug>/advance-analytics-charts/",
AdvanceAnalyticsChartEndpoint.as_view(),
name="advance-analytics-chart",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics/",
ProjectAdvanceAnalyticsEndpoint.as_view(),
name="project-advance-analytics",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics-stats/",
ProjectAdvanceAnalyticsStatsEndpoint.as_view(),
name="project-advance-analytics-stats",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/advance-analytics-charts/",
ProjectAdvanceAnalyticsChartEndpoint.as_view(),
name="project-advance-analytics-chart",
),
]

View File

@@ -0,0 +1,22 @@
from django.urls import path
from plane.app.views import ApiTokenEndpoint, ServiceApiTokenEndpoint
urlpatterns = [
# API Tokens
path(
"users/api-tokens/",
ApiTokenEndpoint.as_view(),
name="api-tokens",
),
path(
"users/api-tokens/<uuid:pk>/",
ApiTokenEndpoint.as_view(),
name="api-tokens-details",
),
path(
"workspaces/<str:slug>/service-api-tokens/",
ServiceApiTokenEndpoint.as_view(),
name="service-api-tokens",
),
## End API Tokens
]

View File

@@ -0,0 +1,104 @@
from django.urls import path
from plane.app.views import (
FileAssetEndpoint,
UserAssetsEndpoint,
FileAssetViewSet,
# V2 Endpoints
WorkspaceFileAssetEndpoint,
UserAssetsV2Endpoint,
StaticFileAssetEndpoint,
AssetRestoreEndpoint,
ProjectAssetEndpoint,
ProjectBulkAssetEndpoint,
AssetCheckEndpoint,
WorkspaceAssetDownloadEndpoint,
ProjectAssetDownloadEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/file-assets/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path(
"workspaces/file-assets/<uuid:workspace_id>/<str:asset_key>/",
FileAssetEndpoint.as_view(),
name="file-assets",
),
path("users/file-assets/", UserAssetsEndpoint.as_view(), name="user-file-assets"),
path(
"users/file-assets/<str:asset_key>/",
UserAssetsEndpoint.as_view(),
name="user-file-assets",
),
path(
"workspaces/file-assets/<uuid:workspace_id>/<str:asset_key>/restore/",
FileAssetViewSet.as_view({"post": "restore"}),
name="file-assets-restore",
),
# V2 Endpoints
path(
"assets/v2/workspaces/<str:slug>/",
WorkspaceFileAssetEndpoint.as_view(),
name="workspace-file-assets",
),
path(
"assets/v2/workspaces/<str:slug>/<uuid:asset_id>/",
WorkspaceFileAssetEndpoint.as_view(),
name="workspace-file-assets",
),
path(
"assets/v2/user-assets/",
UserAssetsV2Endpoint.as_view(),
name="user-file-assets",
),
path(
"assets/v2/user-assets/<uuid:asset_id>/",
UserAssetsV2Endpoint.as_view(),
name="user-file-assets",
),
path(
"assets/v2/workspaces/<str:slug>/restore/<uuid:asset_id>/",
AssetRestoreEndpoint.as_view(),
name="asset-restore",
),
path(
"assets/v2/static/<uuid:asset_id>/",
StaticFileAssetEndpoint.as_view(),
name="static-file-asset",
),
path(
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/",
ProjectAssetEndpoint.as_view(),
name="bulk-asset-update",
),
path(
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/<uuid:pk>/",
ProjectAssetEndpoint.as_view(),
name="bulk-asset-update",
),
path(
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/<uuid:entity_id>/bulk/",
ProjectBulkAssetEndpoint.as_view(),
name="bulk-asset-update",
),
path(
"assets/v2/workspaces/<str:slug>/check/<uuid:asset_id>/",
AssetCheckEndpoint.as_view(),
name="asset-check",
),
path(
"assets/v2/workspaces/<str:slug>/download/<uuid:asset_id>/",
WorkspaceAssetDownloadEndpoint.as_view(),
name="workspace-asset-download",
),
path(
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/download/<uuid:asset_id>/",
ProjectAssetDownloadEndpoint.as_view(),
name="project-asset-download",
),
]

View File

@@ -0,0 +1,102 @@
from django.urls import path
from plane.app.views import (
CycleViewSet,
CycleIssueViewSet,
CycleDateCheckEndpoint,
CycleFavoriteViewSet,
CycleProgressEndpoint,
CycleAnalyticsEndpoint,
TransferCycleIssueEndpoint,
CycleUserPropertiesEndpoint,
CycleArchiveUnarchiveEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/",
CycleViewSet.as_view({"get": "list", "post": "create"}),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/",
CycleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/",
CycleIssueViewSet.as_view({"get": "list", "post": "create"}),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/cycle-issues/<uuid:issue_id>/",
CycleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/date-check/",
CycleDateCheckEndpoint.as_view(),
name="project-cycle-date",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/",
CycleFavoriteViewSet.as_view({"get": "list", "post": "create"}),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-cycles/<uuid:cycle_id>/",
CycleFavoriteViewSet.as_view({"delete": "destroy"}),
name="user-favorite-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/transfer-issues/",
TransferCycleIssueEndpoint.as_view(),
name="transfer-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/user-properties/",
CycleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/archive/",
CycleArchiveUnarchiveEndpoint.as_view(),
name="cycle-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
CycleArchiveUnarchiveEndpoint.as_view(),
name="cycle-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/<uuid:pk>/",
CycleArchiveUnarchiveEndpoint.as_view(),
name="cycle-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/progress/",
CycleProgressEndpoint.as_view(),
name="project-cycle",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/analytics/",
CycleAnalyticsEndpoint.as_view(),
name="project-cycle",
),
]

View File

@@ -0,0 +1,37 @@
from django.urls import path
from plane.app.views import (
ProjectEstimatePointEndpoint,
BulkEstimatePointEndpoint,
EstimatePointEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-estimates/",
ProjectEstimatePointEndpoint.as_view(),
name="project-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/",
BulkEstimatePointEndpoint.as_view({"get": "list", "post": "create"}),
name="bulk-create-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/",
BulkEstimatePointEndpoint.as_view({"get": "retrieve", "patch": "partial_update", "delete": "destroy"}),
name="bulk-create-estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/estimate-points/",
EstimatePointEndpoint.as_view({"post": "create"}),
name="estimate-points",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/estimates/<uuid:estimate_id>/estimate-points/<estimate_point_id>/",
EstimatePointEndpoint.as_view({"patch": "partial_update", "delete": "destroy"}),
name="estimate-points",
),
]

View File

@@ -0,0 +1,12 @@
from django.urls import path
from plane.app.views import ExportIssuesEndpoint
urlpatterns = [
path(
"workspaces/<str:slug>/export-issues/",
ExportIssuesEndpoint.as_view(),
name="export-issues",
),
]

View File

@@ -0,0 +1,20 @@
from django.urls import path
from plane.app.views import UnsplashEndpoint
from plane.app.views import GPTIntegrationEndpoint, WorkspaceGPTIntegrationEndpoint
urlpatterns = [
path("unsplash/", UnsplashEndpoint.as_view(), name="unsplash"),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(),
name="importer",
),
path(
"workspaces/<str:slug>/ai-assistant/",
WorkspaceGPTIntegrationEndpoint.as_view(),
name="importer",
),
]

View File

@@ -0,0 +1,62 @@
from django.urls import path
from plane.app.views import (
IntakeViewSet,
IntakeIssueViewSet,
IntakeWorkItemDescriptionVersionEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intakes/",
IntakeViewSet.as_view({"get": "list", "post": "create"}),
name="intake",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intakes/<uuid:pk>/",
IntakeViewSet.as_view({"get": "retrieve", "patch": "partial_update", "delete": "destroy"}),
name="intake",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/",
IntakeIssueViewSet.as_view({"get": "list", "post": "create"}),
name="intake-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-issues/<uuid:pk>/",
IntakeIssueViewSet.as_view({"get": "retrieve", "patch": "partial_update", "delete": "destroy"}),
name="intake-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/",
IntakeViewSet.as_view({"get": "list", "post": "create"}),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:pk>/",
IntakeViewSet.as_view({"get": "retrieve", "patch": "partial_update", "delete": "destroy"}),
name="inbox",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
IntakeIssueViewSet.as_view({"get": "list", "post": "create"}),
name="inbox-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:pk>/",
IntakeIssueViewSet.as_view({"get": "retrieve", "patch": "partial_update", "delete": "destroy"}),
name="inbox-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-work-items/<uuid:work_item_id>/description-versions/",
IntakeWorkItemDescriptionVersionEndpoint.as_view(),
name="intake-work-item-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/intake-work-items/<uuid:work_item_id>/description-versions/<uuid:pk>/",
IntakeWorkItemDescriptionVersionEndpoint.as_view(),
name="intake-work-item-versions",
),
]

View File

@@ -0,0 +1,282 @@
from django.urls import path
from plane.app.views import (
BulkCreateIssueLabelsEndpoint,
BulkDeleteIssuesEndpoint,
SubIssuesEndpoint,
IssueLinkViewSet,
IssueAttachmentEndpoint,
CommentReactionViewSet,
IssueActivityEndpoint,
IssueArchiveViewSet,
IssueCommentViewSet,
IssueListEndpoint,
IssueReactionViewSet,
IssueRelationViewSet,
IssueSubscriberViewSet,
IssueUserDisplayPropertyEndpoint,
IssueViewSet,
LabelViewSet,
BulkArchiveIssuesEndpoint,
DeletedIssuesListViewSet,
IssuePaginatedViewSet,
IssueDetailEndpoint,
IssueAttachmentV2Endpoint,
IssueBulkUpdateDateEndpoint,
IssueVersionEndpoint,
WorkItemDescriptionVersionEndpoint,
IssueMetaEndpoint,
IssueDetailIdentifierEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/list/",
IssueListEndpoint.as_view(),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
IssueViewSet.as_view({"get": "list", "post": "create"}),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues-detail/",
IssueDetailEndpoint.as_view(),
name="project-issue-detail",
),
# updated v1 paginated issues
# updated v2 paginated issues
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/v2/issues/",
IssuePaginatedViewSet.as_view({"get": "list"}),
name="project-issues-paginated",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
IssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/",
LabelViewSet.as_view({"get": "list", "post": "create"}),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-labels/<uuid:pk>/",
LabelViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-create-labels/",
BulkCreateIssueLabelsEndpoint.as_view(),
name="project-bulk-labels",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-delete-issues/",
BulkDeleteIssuesEndpoint.as_view(),
name="project-issues-bulk",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/bulk-archive-issues/",
BulkArchiveIssuesEndpoint.as_view(),
name="bulk-archive-issues",
),
##
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
SubIssuesEndpoint.as_view(),
name="sub-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/",
IssueLinkViewSet.as_view({"get": "list", "post": "create"}),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-links/<uuid:pk>/",
IssueLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-attachments/<uuid:pk>/",
IssueAttachmentEndpoint.as_view(),
name="project-issue-attachments",
),
# V2 Attachments
path(
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/attachments/",
IssueAttachmentV2Endpoint.as_view(),
name="project-issue-attachments",
),
path(
"assets/v2/workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/attachments/<uuid:pk>/",
IssueAttachmentV2Endpoint.as_view(),
name="project-issue-attachments",
),
## End Issues
## Issue Activity
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/history/",
IssueActivityEndpoint.as_view(),
name="project-issue-history",
),
## Issue Activity
## IssueComments
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/",
IssueCommentViewSet.as_view({"get": "list", "post": "create"}),
name="project-issue-comment",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/comments/<uuid:pk>/",
IssueCommentViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-comment",
),
## End IssueComments
# Issue Subscribers
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/",
IssueSubscriberViewSet.as_view({"get": "list", "post": "create"}),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-subscribers/<uuid:subscriber_id>/",
IssueSubscriberViewSet.as_view({"delete": "destroy"}),
name="project-issue-subscribers",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/subscribe/",
IssueSubscriberViewSet.as_view({"get": "subscription_status", "post": "subscribe", "delete": "unsubscribe"}),
name="project-issue-subscribers",
),
## End Issue Subscribers
# Issue Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/",
IssueReactionViewSet.as_view({"get": "list", "post": "create"}),
name="project-issue-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/reactions/<str:reaction_code>/",
IssueReactionViewSet.as_view({"delete": "destroy"}),
name="project-issue-reactions",
),
## End Issue Reactions
# Comment Reactions
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/",
CommentReactionViewSet.as_view({"get": "list", "post": "create"}),
name="project-issue-comment-reactions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/comments/<uuid:comment_id>/reactions/<str:reaction_code>/",
CommentReactionViewSet.as_view({"delete": "destroy"}),
name="project-issue-comment-reactions",
),
## End Comment Reactions
## IssueUserProperty
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-properties/",
IssueUserDisplayPropertyEndpoint.as_view(),
name="project-issue-display-properties",
),
## IssueUserProperty End
## Issue Archives
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/",
IssueArchiveViewSet.as_view({"get": "list"}),
name="project-issue-archive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/archive/",
IssueArchiveViewSet.as_view({"get": "retrieve", "post": "archive", "delete": "unarchive"}),
name="project-issue-archive-unarchive",
),
## End Issue Archives
## Issue Relation
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/issue-relation/",
IssueRelationViewSet.as_view({"get": "list", "post": "create"}),
name="issue-relation",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/remove-relation/",
IssueRelationViewSet.as_view({"post": "remove_relation"}),
name="issue-relation",
),
## End Issue Relation
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/deleted-issues/",
DeletedIssuesListViewSet.as_view(),
name="deleted-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-dates/",
IssueBulkUpdateDateEndpoint.as_view(),
name="project-issue-dates",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/",
IssueVersionEndpoint.as_view(),
name="issue-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/versions/<uuid:pk>/",
IssueVersionEndpoint.as_view(),
name="issue-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:work_item_id>/description-versions/",
WorkItemDescriptionVersionEndpoint.as_view(),
name="work-item-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/work-items/<uuid:work_item_id>/description-versions/<uuid:pk>/",
WorkItemDescriptionVersionEndpoint.as_view(),
name="work-item-versions",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/meta/",
IssueMetaEndpoint.as_view(),
name="issue-meta",
),
path(
"workspaces/<str:slug>/work-items/<str:project_identifier>-<str:issue_identifier>/",
IssueDetailIdentifierEndpoint.as_view(),
name="issue-detail-identifier",
),
]

View File

@@ -0,0 +1,101 @@
from django.urls import path
from plane.app.views import (
ModuleViewSet,
ModuleIssueViewSet,
ModuleLinkViewSet,
ModuleFavoriteViewSet,
ModuleUserPropertiesEndpoint,
ModuleArchiveUnarchiveEndpoint,
)
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/",
ModuleViewSet.as_view({"get": "list", "post": "create"}),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/",
ModuleViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-modules",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/modules/",
ModuleIssueViewSet.as_view({"post": "create_issue_modules"}),
name="issue-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/issues/",
ModuleIssueViewSet.as_view({"post": "create_module_issues", "get": "list"}),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/issues/<uuid:issue_id>/",
ModuleIssueViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/",
ModuleLinkViewSet.as_view({"get": "list", "post": "create"}),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/module-links/<uuid:pk>/",
ModuleLinkViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-issue-module-links",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/",
ModuleFavoriteViewSet.as_view({"get": "list", "post": "create"}),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-modules/<uuid:module_id>/",
ModuleFavoriteViewSet.as_view({"delete": "destroy"}),
name="user-favorite-module",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/user-properties/",
ModuleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/archive/",
ModuleArchiveUnarchiveEndpoint.as_view(),
name="module-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
ModuleArchiveUnarchiveEndpoint.as_view(),
name="module-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/<uuid:pk>/",
ModuleArchiveUnarchiveEndpoint.as_view(),
name="module-archive-unarchive",
),
]

Some files were not shown because too many files have changed in this diff Show More