Compare commits
135 Commits
v1.0.1
...
feature/me
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c1067d87fe | ||
|
|
97feef5982 | ||
|
|
76c92b1cc6 | ||
|
|
67c62fc69b | ||
|
|
14d8f43001 | ||
|
|
3b59a8560b | ||
|
|
819252cedd | ||
|
|
1dade05f54 | ||
|
|
34613e5e0c | ||
|
|
a8b20680e5 | ||
|
|
fc30c1854e | ||
|
|
c19e06d902 | ||
|
|
82155cab8d | ||
|
|
f9858fbd4b | ||
|
|
f3a7fd8be5 | ||
|
|
49bdd00dac | ||
|
|
2e985bd051 | ||
|
|
8e74f1ddb5 | ||
|
|
2a3f87cac1 | ||
|
|
217a6941a1 | ||
|
|
753e3be83f | ||
|
|
ebd0f66768 | ||
|
|
a07584b3af | ||
|
|
0d08634c78 | ||
|
|
d333104f43 | ||
|
|
a724247aec | ||
|
|
2e36a477ce | ||
|
|
6892de487f | ||
|
|
7b04821ef1 | ||
|
|
f8a216fb6e | ||
|
|
86b5d94ff8 | ||
|
|
fb3a505c22 | ||
|
|
72d372b685 | ||
|
|
536c1c37b1 | ||
|
|
40caea7d79 | ||
|
|
33c15ac138 | ||
|
|
05ab135ed2 | ||
|
|
19deef9298 | ||
|
|
c4837e7e5c | ||
|
|
b73ef12eac | ||
|
|
c52b223c59 | ||
|
|
ffc9101030 | ||
|
|
b5c5225867 | ||
|
|
407b3c5ba7 | ||
|
|
528db06cd8 | ||
|
|
0e1314d183 | ||
|
|
7ac35bfdbc | ||
|
|
cc6d647f5a | ||
|
|
fba1adda35 | ||
|
|
fe2518d53c | ||
|
|
62548e5c37 | ||
|
|
faa5d26601 | ||
|
|
ba90fa1274 | ||
|
|
51767ebbdb | ||
|
|
ad91148616 | ||
|
|
38df66044d | ||
|
|
1676fc1314 | ||
|
|
aaaaec6f36 | ||
|
|
e0b2fa2d6f | ||
|
|
4a2393881b | ||
|
|
583fec04d7 | ||
|
|
e7ff9b645b | ||
|
|
2b82ff699e | ||
|
|
d1136a549a | ||
|
|
ec4d3e738a | ||
|
|
c8380e1c30 | ||
|
|
cabc97afc0 | ||
|
|
349f0ecfec | ||
|
|
890ad5e969 | ||
|
|
0fc79d9ae5 | ||
|
|
d60ac2246d | ||
|
|
5d04718394 | ||
|
|
4bece298c1 | ||
|
|
469901ab88 | ||
|
|
13c7015b1c | ||
|
|
21ab2f8a82 | ||
|
|
a368b979d5 | ||
|
|
a5b881c609 | ||
|
|
9dbab2c5d3 | ||
|
|
8f913b0f4e | ||
|
|
31ac184107 | ||
|
|
23558b8efc | ||
|
|
4bb3b42c76 | ||
|
|
0c5eff7121 | ||
|
|
136530adf1 | ||
|
|
6128c64c31 | ||
|
|
a2bfdb682b | ||
|
|
f7582173ed | ||
|
|
24dc99a19a | ||
|
|
907075f51d | ||
|
|
b48ac45085 | ||
|
|
b115d0a772 | ||
|
|
ad0cdfe1d9 | ||
|
|
a50ef47a52 | ||
|
|
db4240d99b | ||
|
|
bf1d48709c | ||
|
|
c3c0c288a8 | ||
|
|
79e8bb3734 | ||
|
|
a6884440a0 | ||
|
|
b9e5f396fd | ||
|
|
fc40ebcaba | ||
|
|
54642037d3 | ||
|
|
0778089ff3 | ||
|
|
ac2afd6949 | ||
|
|
8162ce65cb | ||
|
|
6e4bdea1c2 | ||
|
|
daf8ec2e0a | ||
|
|
2a3ae31e4e | ||
|
|
9c27c41a5e | ||
|
|
a328da679c | ||
|
|
122496513b | ||
|
|
7363c4c692 | ||
|
|
012e683240 | ||
|
|
b3a37df4be | ||
|
|
cb94b19e61 | ||
|
|
50c4f1a6ac | ||
|
|
1d02313585 | ||
|
|
04ffc049b0 | ||
|
|
ca3db02ce8 | ||
|
|
ad7c2b20a2 | ||
|
|
89e39520ba | ||
|
|
6efaeb3ff6 | ||
|
|
e42819ef64 | ||
|
|
8825b0410a | ||
|
|
f583291d8a | ||
|
|
03c0068991 | ||
|
|
2fb47c4c1d | ||
|
|
136dd5c42f | ||
|
|
276d24143d | ||
|
|
a8e9ad68bf | ||
|
|
12c04d0798 | ||
|
|
eb39dc363f | ||
|
|
5cce1f2d2c | ||
|
|
1b669f76b8 | ||
|
|
a6b6eca4d9 |
50
.github/workflows/build-images.yaml
vendored
Normal file
50
.github/workflows/build-images.yaml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Publish Docker Images
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
matrix:
|
||||
service:
|
||||
- name: frontend
|
||||
context: ./worklenz-frontend
|
||||
container_name: worklenz_frontend
|
||||
- name: backend
|
||||
context: ./worklenz-backend
|
||||
container_name: worklenz_backend
|
||||
steps:
|
||||
- name: Checkout the codebase
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Login to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/${{ github.repository_owner }}/${{ matrix.service.container_name }}
|
||||
flavor: |
|
||||
latest=true
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: ${{ matrix.service.context }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
79
.gitignore
vendored
79
.gitignore
vendored
@@ -1,4 +1,79 @@
|
||||
.idea
|
||||
.vscode
|
||||
# Dependencies
|
||||
node_modules/
|
||||
.pnp/
|
||||
.pnp.js
|
||||
|
||||
# Build outputs
|
||||
dist/
|
||||
build/
|
||||
out/
|
||||
.next/
|
||||
.nuxt/
|
||||
.cache/
|
||||
|
||||
# Environment variables
|
||||
.env
|
||||
.env.local
|
||||
.env.development.local
|
||||
.env.test.local
|
||||
.env.production.local
|
||||
.env.development
|
||||
.env.production
|
||||
.env.*
|
||||
!.env.example
|
||||
!.env.template
|
||||
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
pnpm-debug.log*
|
||||
lerna-debug.log*
|
||||
|
||||
# Editor directories and files
|
||||
.vscode/*
|
||||
!.vscode/extensions.json
|
||||
.idea/
|
||||
.DS_Store
|
||||
*.suo
|
||||
*.ntvs*
|
||||
*.njsproj
|
||||
*.sln
|
||||
*.sw?
|
||||
*.sublime-workspace
|
||||
|
||||
# Testing
|
||||
coverage/
|
||||
.nyc_output/
|
||||
|
||||
# Temp files
|
||||
.temp/
|
||||
.tmp/
|
||||
temp/
|
||||
tmp/
|
||||
|
||||
# Debug
|
||||
.debug/
|
||||
|
||||
# Misc
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
.thumbs.db
|
||||
ehthumbs.db
|
||||
Desktop.ini
|
||||
$RECYCLE.BIN/
|
||||
|
||||
# Yarn
|
||||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
|
||||
# TypeScript
|
||||
*.tsbuildinfo
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ We have adopted a Code of Conduct to ensure a welcoming and inclusive environmen
|
||||
|
||||
## Coding Standards
|
||||
|
||||
- Follow the [Angular Style Guide](https://angular.io/guide/styleguide) for the frontend code.
|
||||
- Follow the [React Documentation](https://react.dev/learn) for best practices in React development.
|
||||
- Use [TypeScript](https://www.typescriptlang.org/) for both frontend and backend code.
|
||||
- Follow the [Conventional Commits](https://www.conventionalcommits.org/) specification for commit messages.
|
||||
|
||||
|
||||
365
README.md
365
README.md
@@ -7,11 +7,11 @@
|
||||
</h1>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://worklenz.com/features/task-management/">Task Management</a> |
|
||||
<a href="https://worklenz.com/features/time-tracking/">Time Tracking</a> |
|
||||
<a href="https://worklenz.com/features/analytics/">Analytics</a> |
|
||||
<a href="https://worklenz.com/features/resource-management/">Resource Management</a> |
|
||||
<a href="https://worklenz.com/features/templates/">Project Templates</a>
|
||||
<a href="https://worklenz.com/task-management/">Task Management</a> |
|
||||
<a href="https://worklenz.com/time-tracking/">Time Tracking</a> |
|
||||
<a href="https://worklenz.com/analytics/">Analytics</a> |
|
||||
<a href="https://worklenz.com/resource-management/">Resource Management</a> |
|
||||
<a href="https://worklenz.com/templates/">Project Templates</a>
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
@@ -39,20 +39,158 @@ comprehensive solution for managing projects, tasks, and collaboration within te
|
||||
|
||||
This repository contains the frontend and backend code for Worklenz.
|
||||
|
||||
- **Frontend**: Built using Angular, with [Ant Design of Angular](https://ng.ant.design/docs/introduce/en) as the UI
|
||||
library..
|
||||
- **Backend**: Built using a custom TypeScript implementation of ExpressJS, with PostgreSQL as the database, providing a
|
||||
robust, scalable, and type-safe backend.
|
||||
- **Frontend**: Built using React with Ant Design as the UI library.
|
||||
- **Backend**: Built using TypeScript, Express.js, with PostgreSQL as the database.
|
||||
|
||||
## Requirements
|
||||
|
||||
- Node.js version v18 or newer
|
||||
- Postgres version v15.6
|
||||
- Redis version v4.6.7 (not used yet. setup only.)
|
||||
- PostgreSQL version v15 or newer
|
||||
- Docker and Docker Compose (for containerized setup)
|
||||
|
||||
## Getting started with Worklenz.
|
||||
- Containerized Installation - Use docker to deploy Worklenz in production or development environments.
|
||||
- Manual installation - To get started with Worklenz, please follow this guide [worklenz setup guidelines](SETUP_THE_PROJECT.md).
|
||||
## Getting Started
|
||||
|
||||
These instructions will help you set up and run the Worklenz project on your local machine for development and testing purposes.
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Node.js (version 18 or higher)
|
||||
- PostgreSQL database
|
||||
- An S3-compatible storage service (like MinIO) or Azure Blob Storage
|
||||
|
||||
### Option 1: Manual Installation
|
||||
|
||||
1. Clone the repository
|
||||
```bash
|
||||
git clone https://github.com/Worklenz/worklenz.git
|
||||
cd worklenz
|
||||
```
|
||||
|
||||
2. Set up environment variables
|
||||
- Copy the example environment files
|
||||
```bash
|
||||
cp .env.example .env
|
||||
cp worklenz-backend/.env.example worklenz-backend/.env
|
||||
```
|
||||
- Update the environment variables with your configuration
|
||||
|
||||
3. Install dependencies
|
||||
```bash
|
||||
# Install backend dependencies
|
||||
cd worklenz-backend
|
||||
npm install
|
||||
|
||||
# Install frontend dependencies
|
||||
cd ../worklenz-frontend
|
||||
npm install
|
||||
```
|
||||
|
||||
4. Set up the database
|
||||
```bash
|
||||
# Create a PostgreSQL database named worklenz_db
|
||||
cd worklenz-backend
|
||||
|
||||
# Execute the SQL setup files in the correct order
|
||||
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
|
||||
```
|
||||
|
||||
5. Start the development servers
|
||||
```bash
|
||||
# In one terminal, start the backend
|
||||
cd worklenz-backend
|
||||
npm run dev
|
||||
|
||||
# In another terminal, start the frontend
|
||||
cd worklenz-frontend
|
||||
npm run dev
|
||||
```
|
||||
|
||||
6. Access the application at http://localhost:5000
|
||||
|
||||
### Option 2: Docker Setup
|
||||
|
||||
The project includes a fully configured Docker setup with:
|
||||
- Frontend React application
|
||||
- Backend server
|
||||
- PostgreSQL database
|
||||
- MinIO for S3-compatible storage
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/Worklenz/worklenz.git
|
||||
cd worklenz
|
||||
```
|
||||
|
||||
2. Start the Docker containers (choose one option):
|
||||
|
||||
**Using Docker Compose directly**
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
3. The application will be available at:
|
||||
- Frontend: http://localhost:5000
|
||||
- Backend API: http://localhost:3000
|
||||
- MinIO Console: http://localhost:9001 (login with minioadmin/minioadmin)
|
||||
|
||||
4. To stop the services:
|
||||
```bash
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Worklenz requires several environment variables to be configured for proper operation. These include:
|
||||
|
||||
- Database credentials
|
||||
- Session secrets
|
||||
- Storage configuration (S3 or Azure)
|
||||
- Authentication settings
|
||||
|
||||
Please refer to the `.env.example` files for a full list of required variables.
|
||||
|
||||
### MinIO Integration
|
||||
|
||||
The project uses MinIO as an S3-compatible object storage service, which provides an open-source alternative to AWS S3 for development and production.
|
||||
|
||||
- **MinIO Console**: http://localhost:9001
|
||||
- Username: minioadmin
|
||||
- Password: minioadmin
|
||||
|
||||
- **Default Bucket**: worklenz-bucket (created automatically when the containers start)
|
||||
|
||||
### Security Considerations
|
||||
|
||||
For production deployments:
|
||||
|
||||
1. Use strong, unique passwords and keys for all services
|
||||
2. Do not commit `.env` files to version control
|
||||
3. Use a production-grade PostgreSQL setup with proper backup procedures
|
||||
4. Enable HTTPS for all public endpoints
|
||||
5. Review and update dependencies regularly
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome contributions from the community! If you'd like to contribute, please follow our [contributing guidelines](CONTRIBUTING.md).
|
||||
|
||||
## Security
|
||||
|
||||
If you believe you have found a security vulnerability in Worklenz, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports.
|
||||
|
||||
Email [info@worklenz.com](mailto:info@worklenz.com) to disclose any security vulnerabilities.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the [MIT License](LICENSE).
|
||||
|
||||
## Screenshots
|
||||
|
||||
@@ -107,16 +245,201 @@ This repository contains the frontend and backend code for Worklenz.
|
||||
We welcome contributions from the community! If you'd like to contribute, please follow
|
||||
our [contributing guidelines](CONTRIBUTING.md).
|
||||
|
||||
### Security
|
||||
|
||||
If you believe you have found a security vulnerability in Worklenz, we encourage you to responsibly disclose this and
|
||||
not open a public issue. We will investigate all legitimate reports.
|
||||
|
||||
Email [info@worklenz.com](mailto:info@worklenz.com) to disclose any security vulnerabilities.
|
||||
|
||||
### License
|
||||
|
||||
Worklenz is open source and released under the [GNU Affero General Public License Version 3 (AGPLv3)](LICENSE).
|
||||
|
||||
By contributing to Worklenz, you agree that your contributions will be licensed under its AGPL.
|
||||
|
||||
# Worklenz React
|
||||
|
||||
This repository contains the React version of Worklenz with a Docker setup for easy development and deployment.
|
||||
|
||||
## Getting Started with Docker
|
||||
|
||||
The project includes a fully configured Docker setup with:
|
||||
- Frontend React application
|
||||
- Backend server
|
||||
- PostgreSQL database
|
||||
- MinIO for S3-compatible storage
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Docker and Docker Compose installed on your system
|
||||
- Git
|
||||
|
||||
### Quick Start
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/Worklenz/worklenz.git
|
||||
cd worklenz
|
||||
```
|
||||
|
||||
2. Start the Docker containers (choose one option):
|
||||
|
||||
**Option 1: Using the provided scripts (easiest)**
|
||||
- On Windows:
|
||||
```
|
||||
start.bat
|
||||
```
|
||||
- On Linux/macOS:
|
||||
```bash
|
||||
./start.sh
|
||||
```
|
||||
|
||||
**Option 2: Using Docker Compose directly**
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
3. The application will be available at:
|
||||
- Frontend: http://localhost:5000
|
||||
- Backend API: http://localhost:3000
|
||||
- MinIO Console: http://localhost:9001 (login with minioadmin/minioadmin)
|
||||
|
||||
4. To stop the services (choose one option):
|
||||
|
||||
**Option 1: Using the provided scripts**
|
||||
- On Windows:
|
||||
```
|
||||
stop.bat
|
||||
```
|
||||
- On Linux/macOS:
|
||||
```bash
|
||||
./stop.sh
|
||||
```
|
||||
|
||||
**Option 2: Using Docker Compose directly**
|
||||
```bash
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
## MinIO Integration
|
||||
|
||||
The project uses MinIO as an S3-compatible object storage service, which provides an open-source alternative to AWS S3 for development and production.
|
||||
|
||||
### Working with MinIO
|
||||
|
||||
MinIO provides an S3-compatible API, so any code that works with S3 will work with MinIO by simply changing the endpoint URL. The backend has been configured to use MinIO by default, with no additional configuration required.
|
||||
|
||||
- **MinIO Console**: http://localhost:9001
|
||||
- Username: minioadmin
|
||||
- Password: minioadmin
|
||||
|
||||
- **Default Bucket**: worklenz-bucket (created automatically when the containers start)
|
||||
|
||||
### Backend Storage Configuration
|
||||
|
||||
The backend is pre-configured to use MinIO with the following settings:
|
||||
|
||||
```javascript
|
||||
// S3 credentials with MinIO defaults
|
||||
export const REGION = process.env.AWS_REGION || "us-east-1";
|
||||
export const BUCKET = process.env.AWS_BUCKET || "worklenz-bucket";
|
||||
export const S3_URL = process.env.S3_URL || "http://minio:9000/worklenz-bucket";
|
||||
export const S3_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID || "minioadmin";
|
||||
export const S3_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY || "minioadmin";
|
||||
```
|
||||
|
||||
The S3 client is initialized with special MinIO configuration:
|
||||
|
||||
```javascript
|
||||
const s3Client = new S3Client({
|
||||
region: REGION,
|
||||
credentials: {
|
||||
accessKeyId: S3_ACCESS_KEY_ID || "",
|
||||
secretAccessKey: S3_SECRET_ACCESS_KEY || "",
|
||||
},
|
||||
endpoint: getEndpointFromUrl(), // Extracts endpoint from S3_URL
|
||||
forcePathStyle: true, // Required for MinIO
|
||||
});
|
||||
```
|
||||
|
||||
### Environment Configuration
|
||||
|
||||
The project uses the following environment file structure:
|
||||
|
||||
- **Frontend**:
|
||||
- `worklenz-frontend/.env.development` - Development environment variables
|
||||
- `worklenz-frontend/.env.production` - Production build variables
|
||||
|
||||
- **Backend**:
|
||||
- `worklenz-backend/.env` - Backend environment variables
|
||||
|
||||
### Setting Up Environment Files
|
||||
|
||||
The Docker environment script will create or overwrite all environment files:
|
||||
|
||||
```bash
|
||||
# For HTTP/WS
|
||||
./update-docker-env.sh your-hostname
|
||||
|
||||
# For HTTPS/WSS
|
||||
./update-docker-env.sh your-hostname true
|
||||
```
|
||||
|
||||
This script generates properly configured environment files for both development and production environments.
|
||||
|
||||
## Docker Deployment
|
||||
|
||||
### Local Development with Docker
|
||||
|
||||
1. Set up the environment files:
|
||||
```bash
|
||||
# For HTTP/WS
|
||||
./update-docker-env.sh
|
||||
|
||||
# For HTTPS/WSS
|
||||
./update-docker-env.sh localhost true
|
||||
```
|
||||
|
||||
2. Run the application using Docker Compose:
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
3. Access the application:
|
||||
- Frontend: http://localhost:5000
|
||||
- Backend API: http://localhost:3000 (or https://localhost:3000 with SSL)
|
||||
|
||||
### Remote Server Deployment
|
||||
|
||||
When deploying to a remote server:
|
||||
|
||||
1. Set up the environment files with your server's hostname:
|
||||
```bash
|
||||
# For HTTP/WS
|
||||
./update-docker-env.sh your-server-hostname
|
||||
|
||||
# For HTTPS/WSS
|
||||
./update-docker-env.sh your-server-hostname true
|
||||
```
|
||||
|
||||
This ensures that the frontend correctly connects to the backend API.
|
||||
|
||||
2. Pull and run the latest Docker images:
|
||||
```bash
|
||||
docker-compose pull
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
3. Access the application through your server's hostname:
|
||||
- Frontend: http://your-server-hostname:5000
|
||||
- Backend API: http://your-server-hostname:3000
|
||||
|
||||
### Environment Configuration
|
||||
|
||||
The Docker setup uses environment variables to configure the services:
|
||||
|
||||
- Frontend:
|
||||
- `VITE_API_URL`: URL of the backend API (default: http://backend:3000 for container networking)
|
||||
- `VITE_SOCKET_URL`: WebSocket URL for real-time communication (default: ws://backend:3000)
|
||||
|
||||
- Backend:
|
||||
- Database connection parameters
|
||||
- Storage configuration
|
||||
- Other backend settings
|
||||
|
||||
For custom configuration, edit the `.env` file or the `update-docker-env.sh` script.
|
||||
|
||||
|
||||
@@ -4,21 +4,20 @@ Getting started with development is a breeze! Follow these steps and you'll be c
|
||||
|
||||
## Requirements
|
||||
|
||||
- Node.js version v18 or newer - [Node.js](https://nodejs.org/en/download/current)
|
||||
- Postgres version v15.6 - [PostgreSQL](https://www.postgresql.org/download/)
|
||||
- Redis version v4.6.7 (not used yet. setup only.)
|
||||
- Node.js version v16 or newer - [Node.js](https://nodejs.org/en/download/)
|
||||
- PostgreSQL version v15 or newer - [PostgreSQL](https://www.postgresql.org/download/)
|
||||
- S3-compatible storage (like MinIO) for file storage
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- `$ npm install -g ts-node`
|
||||
- `$ npm install -g typescript`
|
||||
- `$ npm install -g grunt grunt-cli`
|
||||
- `$ npm install -g typescript` (optional, but recommended)
|
||||
|
||||
## Installation
|
||||
**Clone the repository:**
|
||||
|
||||
```bash
|
||||
git clone https://github.com/Worklenz/worklenz.git
|
||||
cd worklenz
|
||||
```
|
||||
|
||||
### Frontend installation
|
||||
@@ -32,13 +31,14 @@ Getting started with development is a breeze! Follow these steps and you'll be c
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
3. **Run the frontend:**
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
|
||||
4. Navigate to [http://localhost:4200](http://localhost:4200)
|
||||
4. Navigate to [http://localhost:5173](http://localhost:5173)
|
||||
|
||||
### Backend installation
|
||||
|
||||
@@ -54,13 +54,34 @@ Getting started with development is a breeze! Follow these steps and you'll be c
|
||||
|
||||
3. **Configure Environment Variables:**
|
||||
|
||||
- Create a copy of the `.env.template` file and name it `.env`.
|
||||
- Update the required fields in `.env` with the specific information.
|
||||
- Create a copy of the `.env.example` file and name it `.env`.
|
||||
- Update the required fields in `.env` with your specific configuration.
|
||||
|
||||
4. **Restore Database**
|
||||
4. **Set up Database**
|
||||
- Create a new database named `worklenz_db` on your local PostgreSQL server.
|
||||
- Update the `DATABASE_NAME` and `PASSWORD` in the `database/6_user_permission.sql` with your DB credentials.
|
||||
- Open a query console and execute the queries from the .sql files in the `database` directories, following the provided order.
|
||||
- Update the database connection details in your `.env` file.
|
||||
- Execute the SQL setup files in the correct order:
|
||||
|
||||
```bash
|
||||
# From your PostgreSQL client or command line
|
||||
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
|
||||
```
|
||||
|
||||
Alternatively, you can use the provided shell script:
|
||||
|
||||
```bash
|
||||
# Make sure the script is executable
|
||||
chmod +x database/00-init-db.sh
|
||||
# Run the script (may need modifications for local execution)
|
||||
./database/00-init-db.sh
|
||||
```
|
||||
|
||||
5. **Install Dependencies:**
|
||||
|
||||
@@ -68,48 +89,49 @@ Getting started with development is a breeze! Follow these steps and you'll be c
|
||||
npm install
|
||||
```
|
||||
|
||||
This command installs all the necessary libraries required to run the project.
|
||||
|
||||
6. **Run the Development Server:**
|
||||
|
||||
**a. Start the TypeScript compiler:**
|
||||
|
||||
Open a new terminal window and run the following command:
|
||||
|
||||
```bash
|
||||
grunt dev
|
||||
```
|
||||
|
||||
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript.
|
||||
|
||||
**b. Start the development server:**
|
||||
|
||||
Open another separate terminal window and run the following command:
|
||||
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
This starts the development server allowing you to work on the project.
|
||||
|
||||
7. **Run the Production Server:**
|
||||
|
||||
**a. Compile TypeScript to JavaScript:**
|
||||
**a. Build the project:**
|
||||
|
||||
Open a new terminal window and run the following command:
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
```bash
|
||||
grunt build
|
||||
```
|
||||
|
||||
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript for production use.
|
||||
This will compile the TypeScript code into JavaScript for production use.
|
||||
|
||||
**b. Start the production server:**
|
||||
|
||||
Once the compilation is complete, run the following command in the same terminal window:
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
## Docker Setup (Alternative)
|
||||
|
||||
This starts the production server for your application.
|
||||
For an easier setup, you can use Docker and Docker Compose:
|
||||
|
||||
1. Make sure you have Docker and Docker Compose installed on your system.
|
||||
|
||||
2. From the root directory, run:
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
3. Access the application:
|
||||
- Frontend: http://localhost:5000
|
||||
- Backend API: http://localhost:3000
|
||||
- MinIO Console: http://localhost:9001 (login with minioadmin/minioadmin)
|
||||
|
||||
4. To stop the services:
|
||||
|
||||
```bash
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
@@ -5,10 +5,14 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
container_name: worklenz_frontend
|
||||
ports:
|
||||
- "4200:4200"
|
||||
- "5000:5000"
|
||||
depends_on:
|
||||
backend:
|
||||
condition: service_started
|
||||
env_file:
|
||||
- ./worklenz-frontend/.env.production
|
||||
networks:
|
||||
- worklenz
|
||||
|
||||
backend:
|
||||
build:
|
||||
@@ -20,53 +24,88 @@ services:
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- ANGULAR_DIST_DIR
|
||||
- ANGULAR_SRC_DIR
|
||||
- AWS_REGION
|
||||
- BACKEND_PUBLIC_DIR
|
||||
- BACKEND_VIEWS_DIR
|
||||
- COMMIT_BUILD_IMMEDIATELY
|
||||
- COOKIE_SECRET
|
||||
- DB_HOST
|
||||
- DB_MAX_CLIENTS
|
||||
- DB_NAME
|
||||
- DB_PASSWORD
|
||||
- DB_PORT
|
||||
- DB_USER
|
||||
- GOOGLE_CALLBACK_URL
|
||||
- GOOGLE_CLIENT_ID
|
||||
- GOOGLE_CLIENT_SECRET
|
||||
- HOSTNAME
|
||||
- LOGIN_FAILURE_REDIRECT
|
||||
- NODE_ENV
|
||||
- PORT
|
||||
- SESSION_NAME
|
||||
- SESSION_SECRET
|
||||
- SLACK_WEBHOOK
|
||||
- SOCKET_IO_CORS
|
||||
- SOURCE_EMAIL
|
||||
- USE_PG_NATIVE
|
||||
- BUCKET
|
||||
- REGION
|
||||
- S3_URL
|
||||
- S3_ACCESS_KEY_ID
|
||||
- S3_SECRET_ACCESS_KEY
|
||||
minio:
|
||||
condition: service_started
|
||||
env_file:
|
||||
- ./worklenz-backend/.env
|
||||
networks:
|
||||
- worklenz
|
||||
|
||||
minio:
|
||||
image: minio/minio:latest
|
||||
container_name: worklenz_minio
|
||||
ports:
|
||||
- "9000:9000"
|
||||
- "9001:9001"
|
||||
environment:
|
||||
MINIO_ROOT_USER: ${S3_ACCESS_KEY_ID:-minioadmin}
|
||||
MINIO_ROOT_PASSWORD: ${S3_SECRET_ACCESS_KEY:-minioadmin}
|
||||
volumes:
|
||||
- worklenz_minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
networks:
|
||||
- worklenz
|
||||
|
||||
# MinIO setup helper - creates default bucket on startup
|
||||
createbuckets:
|
||||
image: minio/mc
|
||||
container_name: worklenz_createbuckets
|
||||
depends_on:
|
||||
- minio
|
||||
entrypoint: >
|
||||
/bin/sh -c '
|
||||
echo "Waiting for MinIO to start...";
|
||||
sleep 15;
|
||||
for i in 1 2 3 4 5; do
|
||||
echo "Attempt $i to connect to MinIO...";
|
||||
if /usr/bin/mc config host add myminio http://minio:9000 minioadmin minioadmin; then
|
||||
echo "Successfully connected to MinIO!";
|
||||
/usr/bin/mc mb --ignore-existing myminio/worklenz-bucket;
|
||||
/usr/bin/mc policy set public myminio/worklenz-bucket;
|
||||
exit 0;
|
||||
fi
|
||||
echo "Connection failed, retrying in 5 seconds...";
|
||||
sleep 5;
|
||||
done;
|
||||
echo "Failed to connect to MinIO after 5 attempts";
|
||||
exit 1;
|
||||
'
|
||||
networks:
|
||||
- worklenz
|
||||
db:
|
||||
image: postgres:15
|
||||
container_name: worklenz_db
|
||||
environment:
|
||||
POSTGRES_DB: "${DB_NAME}"
|
||||
POSTGRES_PASSWORD: "${DB_PASSWORD}"
|
||||
POSTGRES_USER: ${DB_USER:-postgres}
|
||||
POSTGRES_DB: ${DB_NAME:-worklenz_db}
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD:-password}
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -d ${DB_NAME} -U ${DB_USER}"]
|
||||
test: [ "CMD-SHELL", "pg_isready -d ${DB_NAME:-worklenz_db} -U ${DB_USER:-postgres}" ]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
networks:
|
||||
- worklenz
|
||||
volumes:
|
||||
- worklenz_postgres_data:/var/lib/postgresql/data
|
||||
- ./worklenz-backend/database/:/docker-entrypoint-initdb.d
|
||||
- type: bind
|
||||
source: ./worklenz-backend/database
|
||||
target: /docker-entrypoint-initdb.d
|
||||
consistency: cached
|
||||
command: >
|
||||
bash -c ' if command -v apt-get >/dev/null 2>&1; then
|
||||
apt-get update && apt-get install -y dos2unix
|
||||
elif command -v apk >/dev/null 2>&1; then
|
||||
apk add --no-cache dos2unix
|
||||
fi && find /docker-entrypoint-initdb.d -type f -name "*.sh" -exec sh -c '\''
|
||||
dos2unix "{}" 2>/dev/null || true
|
||||
chmod +x "{}"
|
||||
'\'' \; && exec docker-entrypoint.sh postgres '
|
||||
|
||||
volumes:
|
||||
worklenz_postgres_data:
|
||||
worklenz_minio_data:
|
||||
|
||||
|
||||
networks:
|
||||
worklenz:
|
||||
|
||||
39
docs/recurring-tasks-user-guide.md
Normal file
39
docs/recurring-tasks-user-guide.md
Normal file
@@ -0,0 +1,39 @@
|
||||
# Recurring Tasks: User Guide
|
||||
|
||||
## What Are Recurring Tasks?
|
||||
Recurring tasks are tasks that repeat automatically on a schedule you choose. This helps you save time and ensures important work is never forgotten. For example, you can set up a recurring task for weekly team meetings, monthly reports, or daily check-ins.
|
||||
|
||||
## Why Use Recurring Tasks?
|
||||
- **Save time:** No need to create the same task over and over.
|
||||
- **Stay organized:** Tasks appear automatically when needed.
|
||||
- **Never miss a deadline:** Tasks are created on time, every time.
|
||||
|
||||
## How to Set Up a Recurring Task
|
||||
1. Go to the tasks section in your workspace.
|
||||
2. Choose to create a new task and look for the option to make it recurring.
|
||||
3. Fill in the task details (name, description, assignees, etc.).
|
||||
4. Select your preferred schedule (see options below).
|
||||
5. Save the task. It will now be created automatically based on your chosen schedule.
|
||||
|
||||
## Schedule Options
|
||||
You can choose how often your task repeats. Here are the most common options:
|
||||
|
||||
- **Daily:** The task is created every day.
|
||||
- **Weekly:** The task is created once a week. You can pick the day (e.g., every Monday).
|
||||
- **Monthly:** The task is created once a month. You can pick the date (e.g., the 1st of every month).
|
||||
- **Weekdays:** The task is created every Monday to Friday.
|
||||
- **Custom:** Set your own schedule, such as every 2 days, every 3 weeks, or only on certain days.
|
||||
|
||||
### Examples
|
||||
- "Send team update" every Friday (weekly)
|
||||
- "Submit expense report" on the 1st of each month (monthly)
|
||||
- "Check backups" every day (daily)
|
||||
- "Review project status" every Monday and Thursday (custom)
|
||||
|
||||
## Tips
|
||||
- You can edit or stop a recurring task at any time.
|
||||
- Assign team members and labels to recurring tasks for better organization.
|
||||
- Check your task list regularly to see newly created recurring tasks.
|
||||
|
||||
## Need Help?
|
||||
If you have questions or need help setting up recurring tasks, contact your workspace admin or support team.
|
||||
56
docs/recurring-tasks.md
Normal file
56
docs/recurring-tasks.md
Normal file
@@ -0,0 +1,56 @@
|
||||
# Recurring Tasks Cron Job Documentation
|
||||
|
||||
## Overview
|
||||
The recurring tasks cron job automates the creation of tasks based on predefined templates and schedules. It ensures that tasks are generated at the correct intervals without manual intervention, supporting efficient project management and timely task assignment.
|
||||
|
||||
## Purpose
|
||||
- Automatically create tasks according to recurring schedules defined in the database.
|
||||
- Prevent duplicate task creation for the same schedule and date.
|
||||
- Assign team members and labels to newly created tasks as specified in the template.
|
||||
|
||||
## Scheduling Logic
|
||||
- The cron job is scheduled using the [cron](https://www.npmjs.com/package/cron) package.
|
||||
- The schedule is defined by a cron expression (e.g., `*/2 * * * *` for every 2 minutes, or `0 11 */1 * 1-5` for 11:00 UTC on weekdays).
|
||||
- On each tick, the job:
|
||||
1. Fetches all recurring task templates and their schedules.
|
||||
2. Determines the next occurrence for each template using `calculateNextEndDate`.
|
||||
3. Checks if a task for the next occurrence already exists.
|
||||
4. Creates a new task if it does not exist and the next occurrence is within the allowed future window.
|
||||
|
||||
## Database Interactions
|
||||
- **Templates and Schedules:**
|
||||
- Templates are stored in `task_recurring_templates`.
|
||||
- Schedules are stored in `task_recurring_schedules`.
|
||||
- The job joins these tables to get all necessary data for task creation.
|
||||
- **Task Creation:**
|
||||
- Uses a stored procedure `create_quick_task` to insert new tasks.
|
||||
- Assigns team members and labels by calling appropriate functions/controllers.
|
||||
- **State Tracking:**
|
||||
- Updates `last_checked_at` and `last_created_task_end_date` in the schedule after processing.
|
||||
|
||||
## Task Creation Process
|
||||
1. **Fetch Templates:** Retrieve all templates and their associated schedules.
|
||||
2. **Determine Next Occurrence:** Use the last task's end date or the schedule's creation date to calculate the next due date.
|
||||
3. **Check for Existing Task:** Ensure no duplicate task is created for the same schedule and date.
|
||||
4. **Create Task:**
|
||||
- Insert the new task using the template's data.
|
||||
- Assign team members and labels as specified.
|
||||
5. **Update Schedule:** Record the last checked and created dates for accurate future runs.
|
||||
|
||||
## Configuration & Extension Points
|
||||
- **Cron Expression:** Modify the `TIME` constant in the code to change the schedule.
|
||||
- **Task Template Structure:** Extend the template or schedule interfaces to support additional fields.
|
||||
- **Task Creation Logic:** Customize the task creation process or add new assignment/labeling logic as needed.
|
||||
|
||||
## Error Handling
|
||||
- Errors are logged using the `log_error` utility.
|
||||
- The job continues processing other templates even if one fails.
|
||||
|
||||
## References
|
||||
- Source: `src/cron_jobs/recurring-tasks.ts`
|
||||
- Utilities: `src/shared/utils.ts`
|
||||
- Database: `src/config/db.ts`
|
||||
- Controllers: `src/controllers/tasks-controller.ts`
|
||||
|
||||
---
|
||||
For further customization or troubleshooting, refer to the source code and update the documentation as needed.
|
||||
223
docs/task-progress-guide-for-users.md
Normal file
223
docs/task-progress-guide-for-users.md
Normal file
@@ -0,0 +1,223 @@
|
||||
# WorkLenz Task Progress Guide for Users
|
||||
|
||||
## Introduction
|
||||
WorkLenz offers three different ways to track and calculate task progress, each designed for different project management needs. This guide explains how each method works and when to use them.
|
||||
|
||||
## Default Progress Method
|
||||
|
||||
WorkLenz uses a simple completion-based approach as the default progress calculation method. This method is applied when no special progress methods are enabled.
|
||||
|
||||
### Example
|
||||
|
||||
If you have a parent task with four subtasks and two of the subtasks are marked complete:
|
||||
- Parent task: Not done
|
||||
- 2 subtasks: Done
|
||||
- 2 subtasks: Not done
|
||||
|
||||
The parent task will show as 40% complete (2 completed out of 5 total tasks).
|
||||
|
||||
## Available Progress Tracking Methods
|
||||
|
||||
WorkLenz provides these progress tracking methods:
|
||||
|
||||
1. **Manual Progress** - Directly input progress percentages for tasks
|
||||
2. **Weighted Progress** - Assign importance levels (weights) to tasks
|
||||
3. **Time-based Progress** - Calculate progress based on estimated time
|
||||
|
||||
Only one method can be enabled at a time for a project. If none are enabled, progress will be calculated based on task completion status.
|
||||
|
||||
## How to Select a Progress Method
|
||||
|
||||
1. Open the project drawer by clicking on the project settings icon or creating a new project
|
||||
2. In the project settings, find the "Progress Calculation Method" section
|
||||
3. Select your preferred method
|
||||
4. Save your changes
|
||||
|
||||
## Manual Progress Method
|
||||
|
||||
### How It Works
|
||||
|
||||
- You directly enter progress percentages (0-100%) for tasks without subtasks
|
||||
- Parent task progress is calculated as the average of all subtask progress values
|
||||
- Progress is updated in real-time as you adjust values
|
||||
|
||||
### When to Use Manual Progress
|
||||
|
||||
- For creative or subjective work where completion can't be measured objectively
|
||||
- When task progress doesn't follow a linear path
|
||||
- For projects where team members need flexibility in reporting progress
|
||||
|
||||
### Example
|
||||
|
||||
If you have a parent task with three subtasks:
|
||||
- Subtask A: 30% complete
|
||||
- Subtask B: 60% complete
|
||||
- Subtask C: 90% complete
|
||||
|
||||
The parent task will show as 60% complete (average of 30%, 60%, and 90%).
|
||||
|
||||
## Weighted Progress Method
|
||||
|
||||
### How It Works
|
||||
|
||||
- You assign "weight" values to tasks to indicate their importance
|
||||
- More important tasks have higher weights and influence the overall progress more
|
||||
- You still enter manual progress percentages for tasks without subtasks
|
||||
- Parent task progress is calculated using a weighted average
|
||||
|
||||
### When to Use Weighted Progress
|
||||
|
||||
- When some tasks are more important or time-consuming than others
|
||||
- For projects where all tasks aren't equal
|
||||
- When you want key deliverables to have more impact on overall progress
|
||||
|
||||
### Example
|
||||
|
||||
If you have a parent task with three subtasks:
|
||||
- Subtask A: 50% complete, Weight 60% (important task)
|
||||
- Subtask B: 75% complete, Weight 20% (less important task)
|
||||
- Subtask C: 25% complete, Weight 100% (critical task)
|
||||
|
||||
The parent task will be approximately 39% complete, with Subtask C having the greatest impact due to its higher weight.
|
||||
|
||||
### Important Notes About Weights
|
||||
|
||||
- Default weight is 100% if not specified
|
||||
- Weights range from 0% to 100%
|
||||
- Setting a weight to 0% removes that task from progress calculations
|
||||
- Only explicitly set weights for tasks that should have different importance
|
||||
- Weights are only relevant for subtasks, not for independent tasks
|
||||
|
||||
### Detailed Weighted Progress Calculation Example
|
||||
|
||||
To understand how weighted progress works with different weight values, consider this example:
|
||||
|
||||
For a parent task with two subtasks:
|
||||
- Subtask A: 80% complete, Weight 50%
|
||||
- Subtask B: 40% complete, Weight 100%
|
||||
|
||||
The calculation works as follows:
|
||||
|
||||
1. Each subtask's contribution is: (weight × progress) ÷ (sum of all weights)
|
||||
2. For Subtask A: (50 × 80%) ÷ (50 + 100) = 26.7%
|
||||
3. For Subtask B: (100 × 40%) ÷ (50 + 100) = 26.7%
|
||||
4. Total parent progress: 26.7% + 26.7% = 53.3%
|
||||
|
||||
The parent task would be approximately 53% complete.
|
||||
|
||||
This shows how the subtask with twice the weight (Subtask B) has twice the influence on the overall progress calculation, even though it has a lower completion percentage.
|
||||
|
||||
## Time-based Progress Method
|
||||
|
||||
### How It Works
|
||||
|
||||
- Use the task's time estimate as its "weight" in the progress calculation
|
||||
- You still enter manual progress percentages for tasks without subtasks
|
||||
- Tasks with longer time estimates have more influence on overall progress
|
||||
- Parent task progress is calculated based on time-weighted averages
|
||||
|
||||
### When to Use Time-based Progress
|
||||
|
||||
- For projects with well-defined time estimates
|
||||
- When task importance correlates with its duration
|
||||
- For billing or time-tracking focused projects
|
||||
- When you already maintain accurate time estimates
|
||||
|
||||
### Example
|
||||
|
||||
If you have a parent task with three subtasks:
|
||||
- Subtask A: 40% complete, Estimated Time 2.5 hours
|
||||
- Subtask B: 80% complete, Estimated Time 1 hour
|
||||
- Subtask C: 10% complete, Estimated Time 4 hours
|
||||
|
||||
The parent task will be approximately 29% complete, with the lengthy Subtask C pulling down the overall progress despite Subtask B being mostly complete.
|
||||
|
||||
### Important Notes About Time Estimates
|
||||
|
||||
- Tasks without time estimates don't influence progress calculations
|
||||
- Time is converted to minutes internally (a 2-hour task = 120 minutes)
|
||||
- Setting a time estimate to 0 removes that task from progress calculations
|
||||
- Time estimates serve dual purposes: scheduling/resource planning and progress weighting
|
||||
|
||||
### Detailed Time-based Progress Calculation Example
|
||||
|
||||
To understand how time-based progress works with different time estimates, consider this example:
|
||||
|
||||
For a parent task with three subtasks:
|
||||
- Subtask A: 40% complete, Estimated Time 2.5 hours
|
||||
- Subtask B: 80% complete, Estimated Time 1 hour
|
||||
- Subtask C: 10% complete, Estimated Time 4 hours
|
||||
|
||||
The calculation works as follows:
|
||||
|
||||
1. Convert hours to minutes: A = 150 min, B = 60 min, C = 240 min
|
||||
2. Total estimated time: 150 + 60 + 240 = 450 minutes
|
||||
3. Each subtask's contribution is: (time estimate × progress) ÷ (total time)
|
||||
4. For Subtask A: (150 × 40%) ÷ 450 = 13.3%
|
||||
5. For Subtask B: (60 × 80%) ÷ 450 = 10.7%
|
||||
6. For Subtask C: (240 × 10%) ÷ 450 = 5.3%
|
||||
7. Total parent progress: 13.3% + 10.7% + 5.3% = 29.3%
|
||||
|
||||
The parent task would be approximately 29% complete.
|
||||
|
||||
This demonstrates how tasks with longer time estimates (like Subtask C) have more influence on the overall progress calculation. Even though Subtask B is 80% complete, its shorter time estimate means it contributes less to the overall progress than the partially-completed but longer Subtask A.
|
||||
|
||||
### How It Works
|
||||
|
||||
- Tasks are either 0% (not done) or 100% (done)
|
||||
- Parent task progress = (completed tasks / total tasks) × 100%
|
||||
- Both the parent task and all subtasks count in this calculation
|
||||
|
||||
### When to Use Default Progress
|
||||
|
||||
- For simple projects with clear task completion criteria
|
||||
- When binary task status (done/not done) is sufficient
|
||||
- For teams new to project management who want simplicity
|
||||
|
||||
### Example
|
||||
|
||||
If you have a parent task with four subtasks and two of the subtasks are marked complete:
|
||||
- Parent task: Not done
|
||||
- 2 subtasks: Done
|
||||
- 2 subtasks: Not done
|
||||
|
||||
The parent task will show as 40% complete (2 completed out of 5 total tasks).
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Choose the Right Method for Your Project**
|
||||
- Consider your team's workflow and reporting needs
|
||||
- Match the method to your project's complexity
|
||||
|
||||
2. **Be Consistent**
|
||||
- Stick with one method throughout the project
|
||||
- Changing methods mid-project can cause confusion
|
||||
|
||||
3. **For Manual Progress**
|
||||
- Update progress regularly
|
||||
- Establish guidelines for progress reporting
|
||||
|
||||
4. **For Weighted Progress**
|
||||
- Assign weights based on objective criteria
|
||||
- Don't overuse extreme weights
|
||||
|
||||
5. **For Time-based Progress**
|
||||
- Keep time estimates accurate and up to date
|
||||
- Consider using time tracking to validate estimates
|
||||
|
||||
## Frequently Asked Questions
|
||||
|
||||
**Q: Can I change the progress method mid-project?**
|
||||
A: Yes, but it may cause progress values to change significantly. It's best to select a method at the project start.
|
||||
|
||||
**Q: What happens to task progress when I mark a task complete?**
|
||||
A: When a task is marked complete, its progress automatically becomes 100%, regardless of the progress method.
|
||||
|
||||
**Q: How do I enter progress for a task?**
|
||||
A: Open the task drawer, go to the Info tab, and use the progress slider for tasks without subtasks.
|
||||
|
||||
**Q: Can different projects use different progress methods?**
|
||||
A: Yes, each project can have its own progress method.
|
||||
|
||||
**Q: What if I don't see progress fields in my task drawer?**
|
||||
A: Progress input is only visible for tasks without subtasks. Parent tasks' progress is automatically calculated.
|
||||
550
docs/task-progress-methods.md
Normal file
550
docs/task-progress-methods.md
Normal file
@@ -0,0 +1,550 @@
|
||||
# Task Progress Tracking Methods in WorkLenz
|
||||
|
||||
## Overview
|
||||
WorkLenz supports three different methods for tracking task progress, each suitable for different project management approaches:
|
||||
|
||||
1. **Manual Progress** - Direct input of progress percentages
|
||||
2. **Weighted Progress** - Tasks have weights that affect overall progress calculation
|
||||
3. **Time-based Progress** - Progress calculated based on estimated time vs. time spent
|
||||
|
||||
These modes can be selected when creating or editing a project in the project drawer. Only one progress method can be enabled at a time. If none of these methods are enabled, progress will be calculated based on task completion status as described in the "Default Progress Tracking" section below.
|
||||
|
||||
## 1. Manual Progress Mode
|
||||
|
||||
This mode allows direct input of progress percentages for individual tasks without subtasks.
|
||||
|
||||
**Implementation:**
|
||||
- Enabled by setting `use_manual_progress` to true in the project settings
|
||||
- Progress is updated through the `on-update-task-progress.ts` socket event handler
|
||||
- The UI shows a manual progress input slider in the task drawer for tasks without subtasks
|
||||
- Updates the database with `progress_value` and sets `manual_progress` flag to true
|
||||
|
||||
**Calculation Logic:**
|
||||
- For tasks without subtasks: Uses the manually set progress value
|
||||
- For parent tasks: Calculates the average of all subtask progress values
|
||||
- Subtask progress comes from either manual values or completion status (0% or 100%)
|
||||
|
||||
**Code Example:**
|
||||
```typescript
|
||||
// Manual progress update via socket.io
|
||||
socket?.emit(SocketEvents.UPDATE_TASK_PROGRESS.toString(), JSON.stringify({
|
||||
task_id: task.id,
|
||||
progress_value: value,
|
||||
parent_task_id: task.parent_task_id
|
||||
}));
|
||||
```
|
||||
|
||||
## 2. Weighted Progress Mode
|
||||
|
||||
This mode allows assigning different weights to subtasks to reflect their relative importance in the overall task or project progress.
|
||||
|
||||
**Implementation:**
|
||||
- Enabled by setting `use_weighted_progress` to true in the project settings
|
||||
- Weights are updated through the `on-update-task-weight.ts` socket event handler
|
||||
- The UI shows a weight input for subtasks in the task drawer
|
||||
- Manual progress input is still required for tasks without subtasks
|
||||
- Default weight is 100 if not specified
|
||||
- Weight values range from 0 to 100%
|
||||
|
||||
**Calculation Logic:**
|
||||
- For tasks without subtasks: Uses the manually entered progress value
|
||||
- Progress is calculated using a weighted average: `SUM(progress_value * weight) / SUM(weight)`
|
||||
- This gives more influence to tasks with higher weights
|
||||
- A parent task's progress is the weighted average of its subtasks' progress values
|
||||
|
||||
**Code Example:**
|
||||
```typescript
|
||||
// Weight update via socket.io
|
||||
socket?.emit(SocketEvents.UPDATE_TASK_WEIGHT.toString(), JSON.stringify({
|
||||
task_id: task.id,
|
||||
weight: value,
|
||||
parent_task_id: task.parent_task_id
|
||||
}));
|
||||
```
|
||||
|
||||
## 3. Time-based Progress Mode
|
||||
|
||||
This mode calculates progress based on estimated time vs. actual time spent.
|
||||
|
||||
**Implementation:**
|
||||
- Enabled by setting `use_time_progress` to true in the project settings
|
||||
- Uses task time estimates (hours and minutes) for calculation
|
||||
- Manual progress input is still required for tasks without subtasks
|
||||
- No separate socket handler needed as it's calculated automatically
|
||||
|
||||
**Calculation Logic:**
|
||||
- For tasks without subtasks: Uses the manually entered progress value
|
||||
- Progress is calculated using time as the weight: `SUM(progress_value * estimated_minutes) / SUM(estimated_minutes)`
|
||||
- For tasks with time tracking, estimated vs. actual time can be factored in
|
||||
- Parent task progress is weighted by the estimated time of each subtask
|
||||
|
||||
**SQL Example:**
|
||||
```sql
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value,
|
||||
COALESCE(total_hours * 60 + total_minutes, 0) AS estimated_minutes
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * estimated_minutes) / NULLIF(SUM(estimated_minutes), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
```
|
||||
|
||||
## Default Progress Tracking (when no special mode is selected)
|
||||
|
||||
If no specific progress mode is enabled, the system falls back to a traditional completion-based calculation:
|
||||
|
||||
**Implementation:**
|
||||
- Default mode when all three special modes are disabled
|
||||
- Based on task completion status only
|
||||
|
||||
**Calculation Logic:**
|
||||
- For tasks without subtasks: 0% if not done, 100% if done
|
||||
- For parent tasks: `(completed_tasks / total_tasks) * 100`
|
||||
- Counts both the parent and all subtasks in the calculation
|
||||
|
||||
**SQL Example:**
|
||||
```sql
|
||||
-- Traditional calculation based on completion status
|
||||
SELECT (CASE
|
||||
WHEN EXISTS(SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = _task_id
|
||||
AND is_done IS TRUE) THEN 1
|
||||
ELSE 0 END)
|
||||
INTO _parent_task_done;
|
||||
|
||||
SELECT COUNT(*)
|
||||
FROM tasks_with_status_view
|
||||
WHERE parent_task_id = _task_id
|
||||
AND is_done IS TRUE
|
||||
INTO _sub_tasks_done;
|
||||
|
||||
_total_completed = _parent_task_done + _sub_tasks_done;
|
||||
_total_tasks = _sub_tasks_count + 1; -- +1 for the parent task
|
||||
|
||||
IF _total_tasks = 0 THEN
|
||||
_ratio = 0;
|
||||
ELSE
|
||||
_ratio = (_total_completed / _total_tasks) * 100;
|
||||
END IF;
|
||||
```
|
||||
|
||||
## Technical Implementation Details
|
||||
|
||||
The progress calculation logic is implemented in PostgreSQL functions, primarily in the `get_task_complete_ratio` function. Progress updates flow through the system as follows:
|
||||
|
||||
1. **User Action**: User updates task progress or weight in the UI
|
||||
2. **Socket Event**: Client emits socket event (UPDATE_TASK_PROGRESS or UPDATE_TASK_WEIGHT)
|
||||
3. **Server Handler**: Server processes the event in the respective handler function
|
||||
4. **Database Update**: Progress/weight value is updated in the database
|
||||
5. **Recalculation**: If needed, parent task progress is recalculated
|
||||
6. **Broadcast**: Changes are broadcast to all clients in the project room
|
||||
7. **UI Update**: Client UI updates to reflect the new progress values
|
||||
|
||||
This architecture allows for real-time updates and consistent progress calculation across all clients.
|
||||
|
||||
## Manual Progress Input Implementation
|
||||
|
||||
Regardless of which progress tracking method is selected for a project, tasks without subtasks (leaf tasks) require manual progress input. This section details how manual progress input is implemented and used across all progress tracking methods.
|
||||
|
||||
### UI Component
|
||||
|
||||
The manual progress input component is implemented in `worklenz-frontend/src/components/task-drawer/shared/info-tab/details/task-drawer-progress/task-drawer-progress.tsx` and includes:
|
||||
|
||||
1. **Progress Slider**: A slider UI control that allows users to set progress values from 0% to 100%
|
||||
2. **Progress Input Field**: A numeric input field that accepts direct entry of progress percentage
|
||||
3. **Progress Display**: Visual representation of the current progress value
|
||||
|
||||
The component is conditionally rendered in the task drawer for tasks that don't have subtasks.
|
||||
|
||||
**Usage Across Progress Methods:**
|
||||
- In **Manual Progress Mode**: Only the progress slider/input is shown
|
||||
- In **Weighted Progress Mode**: Both the progress slider/input and weight input are shown
|
||||
- In **Time-based Progress Mode**: The progress slider/input is shown alongside time estimate fields
|
||||
|
||||
### Progress Update Flow
|
||||
|
||||
When a user updates a task's progress manually, the following process occurs:
|
||||
|
||||
1. **User Input**: User adjusts the progress slider or enters a value in the input field
|
||||
2. **UI Event Handler**: The UI component captures the change event and validates the input
|
||||
3. **Socket Event Emission**: The component emits a `UPDATE_TASK_PROGRESS` socket event with:
|
||||
```typescript
|
||||
{
|
||||
task_id: task.id,
|
||||
progress_value: value, // The new progress value (0-100)
|
||||
parent_task_id: task.parent_task_id // For recalculation
|
||||
}
|
||||
```
|
||||
4. **Server Processing**: The socket event handler on the server:
|
||||
- Updates the task's `progress_value` in the database
|
||||
- Sets the `manual_progress` flag to true
|
||||
- Triggers recalculation of parent task progress
|
||||
|
||||
### Progress Calculation Across Methods
|
||||
|
||||
The calculation of progress differs based on the active progress method:
|
||||
|
||||
1. **For Leaf Tasks (no subtasks)** in all methods:
|
||||
- Progress is always the manually entered value (`progress_value`)
|
||||
- If the task is marked as completed, progress is automatically set to 100%
|
||||
|
||||
2. **For Parent Tasks**:
|
||||
- **Manual Progress Mode**: Simple average of all subtask progress values
|
||||
- **Weighted Progress Mode**: Weighted average where each subtask's progress is multiplied by its weight
|
||||
- **Time-based Progress Mode**: Weighted average where each subtask's progress is multiplied by its estimated time
|
||||
- **Default Mode**: Percentage of completed tasks (including parent) vs. total tasks
|
||||
|
||||
### Detailed Calculation for Weighted Progress Method
|
||||
|
||||
In Weighted Progress mode, both the manual progress input and weight assignment are critical components:
|
||||
|
||||
1. **Manual Progress Input**:
|
||||
- For leaf tasks (without subtasks), users must manually input progress percentages (0-100%)
|
||||
- If a leaf task is marked as complete, its progress is automatically set to 100%
|
||||
- If a leaf task's progress is not manually set, it defaults to 0% (or 100% if completed)
|
||||
|
||||
2. **Weight Assignment**:
|
||||
- Each task can be assigned a weight value between 0-100% (default 100% if not specified)
|
||||
- Higher weight values give tasks more influence in parent task progress calculations
|
||||
- A weight of 0% means the task doesn't contribute to the parent's progress calculation
|
||||
|
||||
3. **Parent Task Calculation**:
|
||||
The weighted progress formula is:
|
||||
```
|
||||
ParentProgress = ∑(SubtaskProgress * SubtaskWeight) / ∑(SubtaskWeight)
|
||||
```
|
||||
|
||||
**Example Calculation**:
|
||||
Consider a parent task with three subtasks:
|
||||
- Subtask A: Progress 50%, Weight 60%
|
||||
- Subtask B: Progress 75%, Weight 20%
|
||||
- Subtask C: Progress 25%, Weight 100%
|
||||
|
||||
Calculation:
|
||||
```
|
||||
ParentProgress = ((50 * 60) + (75 * 20) + (25 * 100)) / (60 + 20 + 100)
|
||||
ParentProgress = (3000 + 1500 + 2500) / 180
|
||||
ParentProgress = 7000 / 180
|
||||
ParentProgress = 38.89%
|
||||
```
|
||||
|
||||
Notice that Subtask C, despite having the lowest progress, has a significant impact on the parent task progress due to its higher weight.
|
||||
|
||||
4. **Zero Weight Handling**:
|
||||
Tasks with zero weight are excluded from the calculation:
|
||||
- Subtask A: Progress 40%, Weight 50%
|
||||
- Subtask B: Progress 80%, Weight 0%
|
||||
|
||||
Calculation:
|
||||
```
|
||||
ParentProgress = ((40 * 50) + (80 * 0)) / (50 + 0)
|
||||
ParentProgress = 2000 / 50
|
||||
ParentProgress = 40%
|
||||
```
|
||||
|
||||
In this case, only Subtask A influences the parent task progress because Subtask B has a weight of 0%.
|
||||
|
||||
5. **Default Weight Behavior**:
|
||||
When weights aren't explicitly assigned to some tasks:
|
||||
- Subtask A: Progress 30%, Weight 60% (explicitly set)
|
||||
- Subtask B: Progress 70%, Weight not set (defaults to 100%)
|
||||
- Subtask C: Progress 90%, Weight not set (defaults to 100%)
|
||||
|
||||
Calculation:
|
||||
```
|
||||
ParentProgress = ((30 * 60) + (70 * 100) + (90 * 100)) / (60 + 100 + 100)
|
||||
ParentProgress = (1800 + 7000 + 9000) / 260
|
||||
ParentProgress = 17800 / 260
|
||||
ParentProgress = 68.46%
|
||||
```
|
||||
|
||||
Note that Subtasks B and C have more influence than Subtask A because they have higher default weights.
|
||||
|
||||
6. **All Zero Weights Edge Case**:
|
||||
If all subtasks have zero weight, the progress is calculated as 0%:
|
||||
```
|
||||
ParentProgress = SUM(progress_value * 0) / SUM(0) = 0 / 0 = undefined
|
||||
```
|
||||
|
||||
The SQL implementation handles this with `NULLIF` and `COALESCE` to return 0% in this case.
|
||||
|
||||
4. **Actual SQL Implementation**:
|
||||
The database function implements the weighted calculation as follows:
|
||||
```sql
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- If subtask has manual progress, use that value
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
-- Otherwise use completion status (0 or 100)
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value,
|
||||
COALESCE(weight, 100) AS weight
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * weight) / NULLIF(SUM(weight), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
```
|
||||
|
||||
This SQL implementation:
|
||||
- Gets all non-archived subtasks of the parent task
|
||||
- For each subtask, determines its progress value:
|
||||
- If manual progress is set, uses that value
|
||||
- Otherwise, uses 100% if the task is done or 0% if not done
|
||||
- Uses COALESCE to default weight to 100 if not specified
|
||||
- Calculates the weighted average, handling the case where sum of weights might be zero
|
||||
- Returns 0 if there are no subtasks with weights
|
||||
|
||||
### Detailed Calculation for Time-based Progress Method
|
||||
|
||||
In Time-based Progress mode, the task's estimated time serves as its weight in progress calculations:
|
||||
|
||||
1. **Manual Progress Input**:
|
||||
- As with weighted progress, leaf tasks require manual progress input
|
||||
- Progress is entered as a percentage (0-100%)
|
||||
- Completed tasks are automatically set to 100% progress
|
||||
|
||||
2. **Time Estimation**:
|
||||
- Each task has an estimated time in hours and minutes
|
||||
- These values are stored in `total_hours` and `total_minutes` fields
|
||||
- Time estimates effectively function as weights in progress calculations
|
||||
- Tasks with longer estimated durations have more influence on parent task progress
|
||||
- Tasks with zero or no time estimate don't contribute to the parent's progress calculation
|
||||
|
||||
3. **Parent Task Calculation**:
|
||||
The time-based progress formula is:
|
||||
```
|
||||
ParentProgress = ∑(SubtaskProgress * SubtaskEstimatedMinutes) / ∑(SubtaskEstimatedMinutes)
|
||||
```
|
||||
where `SubtaskEstimatedMinutes = (SubtaskHours * 60) + SubtaskMinutes`
|
||||
|
||||
**Example Calculation**:
|
||||
Consider a parent task with three subtasks:
|
||||
- Subtask A: Progress 40%, Estimated Time 2h 30m (150 minutes)
|
||||
- Subtask B: Progress 80%, Estimated Time 1h (60 minutes)
|
||||
- Subtask C: Progress 10%, Estimated Time 4h (240 minutes)
|
||||
|
||||
Calculation:
|
||||
```
|
||||
ParentProgress = ((40 * 150) + (80 * 60) + (10 * 240)) / (150 + 60 + 240)
|
||||
ParentProgress = (6000 + 4800 + 2400) / 450
|
||||
ParentProgress = 13200 / 450
|
||||
ParentProgress = 29.33%
|
||||
```
|
||||
|
||||
Note how Subtask C, with its large time estimate, significantly pulls down the overall progress despite Subtask B being mostly complete.
|
||||
|
||||
4. **Zero Time Estimate Handling**:
|
||||
Tasks with zero time estimate are excluded from the calculation:
|
||||
- Subtask A: Progress 40%, Estimated Time 3h (180 minutes)
|
||||
- Subtask B: Progress 80%, Estimated Time 0h (0 minutes)
|
||||
|
||||
Calculation:
|
||||
```
|
||||
ParentProgress = ((40 * 180) + (80 * 0)) / (180 + 0)
|
||||
ParentProgress = 7200 / 180
|
||||
ParentProgress = 40%
|
||||
```
|
||||
|
||||
In this case, only Subtask A influences the parent task progress because Subtask B has no time estimate.
|
||||
|
||||
5. **All Zero Time Estimates Edge Case**:
|
||||
If all subtasks have zero time estimates, the progress is calculated as 0%:
|
||||
```
|
||||
ParentProgress = SUM(progress_value * 0) / SUM(0) = 0 / 0 = undefined
|
||||
```
|
||||
|
||||
The SQL implementation handles this with `NULLIF` and `COALESCE` to return 0% in this case.
|
||||
|
||||
6. **Actual SQL Implementation**:
|
||||
The SQL function for this calculation first converts hours to minutes for consistent measurement:
|
||||
```sql
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- If subtask has manual progress, use that value
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
-- Otherwise use completion status (0 or 100)
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value,
|
||||
COALESCE(total_hours * 60 + total_minutes, 0) AS estimated_minutes
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * estimated_minutes) / NULLIF(SUM(estimated_minutes), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
```
|
||||
|
||||
This implementation:
|
||||
- Gets all non-archived subtasks of the parent task
|
||||
- Determines each subtask's progress value (manual or completion-based)
|
||||
- Calculates total minutes by converting hours to minutes and adding them together
|
||||
- Uses COALESCE to treat NULL time estimates as 0 minutes
|
||||
- Uses NULLIF to handle cases where all time estimates are zero
|
||||
- Returns 0% progress if there are no subtasks with time estimates
|
||||
|
||||
### Common Implementation Considerations
|
||||
|
||||
For both weighted and time-based progress calculation:
|
||||
|
||||
1. **Null Handling**:
|
||||
- Tasks with NULL progress values are treated as 0% progress (unless completed)
|
||||
- Tasks with NULL weights default to 100 in weighted mode
|
||||
- Tasks with NULL time estimates are treated as 0 minutes in time-based mode
|
||||
|
||||
2. **Progress Propagation**:
|
||||
- When a leaf task's progress changes, all ancestor tasks are recalculated
|
||||
- Progress updates are propagated through socket events to all connected clients
|
||||
- The recalculation happens server-side to ensure consistency
|
||||
|
||||
3. **Edge Cases**:
|
||||
- If all subtasks have zero weight/time, the system falls back to a simple average
|
||||
- If a parent task has no subtasks, its own manual progress value is used
|
||||
- If a task is archived, it's excluded from parent task calculations
|
||||
|
||||
### Database Implementation
|
||||
|
||||
The manual progress value is stored in the `tasks` table with these relevant fields:
|
||||
|
||||
```sql
|
||||
tasks (
|
||||
-- other fields
|
||||
progress_value FLOAT, -- The manually entered progress value (0-100)
|
||||
manual_progress BOOLEAN, -- Flag indicating if progress was manually set
|
||||
weight INTEGER DEFAULT 100, -- For weighted progress calculation
|
||||
total_hours INTEGER, -- For time-based progress calculation
|
||||
total_minutes INTEGER -- For time-based progress calculation
|
||||
)
|
||||
```
|
||||
|
||||
### Integration with Parent Task Calculation
|
||||
|
||||
When a subtask's progress is updated manually, the parent task's progress is automatically recalculated based on the active progress method:
|
||||
|
||||
```typescript
|
||||
// Pseudocode for parent task recalculation
|
||||
function recalculateParentTaskProgress(taskId, parentTaskId) {
|
||||
if (!parentTaskId) return;
|
||||
|
||||
// Get project settings to determine active progress method
|
||||
const project = getProjectByTaskId(taskId);
|
||||
|
||||
if (project.use_manual_progress) {
|
||||
// Calculate average of all subtask progress values
|
||||
updateParentProgress(parentTaskId, calculateAverageProgress(parentTaskId));
|
||||
}
|
||||
else if (project.use_weighted_progress) {
|
||||
// Calculate weighted average using subtask weights
|
||||
updateParentProgress(parentTaskId, calculateWeightedProgress(parentTaskId));
|
||||
}
|
||||
else if (project.use_time_progress) {
|
||||
// Calculate weighted average using time estimates
|
||||
updateParentProgress(parentTaskId, calculateTimeBasedProgress(parentTaskId));
|
||||
}
|
||||
else {
|
||||
// Default: Calculate based on task completion
|
||||
updateParentProgress(parentTaskId, calculateCompletionBasedProgress(parentTaskId));
|
||||
}
|
||||
|
||||
// If this parent has a parent, continue recalculation up the tree
|
||||
const grandparentId = getParentTaskId(parentTaskId);
|
||||
if (grandparentId) {
|
||||
recalculateParentTaskProgress(parentTaskId, grandparentId);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This recursive approach ensures that changes to any task's progress are properly propagated up the task hierarchy.
|
||||
|
||||
## Associated Files and Components
|
||||
|
||||
### Backend Files
|
||||
|
||||
1. **Socket Event Handlers**:
|
||||
- `worklenz-backend/src/socket.io/commands/on-update-task-progress.ts` - Handles manual progress updates
|
||||
- `worklenz-backend/src/socket.io/commands/on-update-task-weight.ts` - Handles task weight updates
|
||||
|
||||
2. **Database Functions**:
|
||||
- `worklenz-backend/database/migrations/20250423000000-subtask-manual-progress.sql` - Contains the `get_task_complete_ratio` function that calculates progress based on the selected method
|
||||
- Functions that support project creation/updates with progress mode settings:
|
||||
- `create_project`
|
||||
- `update_project`
|
||||
|
||||
3. **Controllers**:
|
||||
- `worklenz-backend/src/controllers/project-workload/workload-gannt-base.ts` - Contains the `calculateTaskCompleteRatio` method
|
||||
- `worklenz-backend/src/controllers/projects-controller.ts` - Handles project-level progress calculations
|
||||
|
||||
### Frontend Files
|
||||
|
||||
1. **Project Configuration**:
|
||||
- `worklenz-frontend/src/components/projects/project-drawer/project-drawer.tsx` - Contains UI for selecting progress method when creating/editing projects
|
||||
|
||||
2. **Progress Visualization Components**:
|
||||
- `worklenz-frontend/src/components/project-list/project-list-table/project-list-progress/progress-list-progress.tsx` - Displays project progress
|
||||
- `worklenz-frontend/src/pages/projects/project-view-1/taskList/taskListTable/taskListTableCells/TaskProgress.tsx` - Displays task progress
|
||||
- `worklenz-frontend/src/pages/projects/projectView/taskList/task-list-table/task-list-table-cells/task-list-progress-cell/task-list-progress-cell.tsx` - Alternative task progress cell
|
||||
|
||||
3. **Progress Input Components**:
|
||||
- `worklenz-frontend/src/components/task-drawer/shared/info-tab/details/task-drawer-progress/task-drawer-progress.tsx` - Component for inputting task progress/weight
|
||||
|
||||
## Choosing the Right Progress Method
|
||||
|
||||
Each progress method is suitable for different types of projects:
|
||||
|
||||
- **Manual Progress**: Best for creative work where progress is subjective
|
||||
- **Weighted Progress**: Ideal for projects where some tasks are more significant than others
|
||||
- **Time-based Progress**: Perfect for projects where time estimates are reliable and important
|
||||
|
||||
Project managers can choose the appropriate method when creating or editing a project in the project drawer, based on their team's workflow and project requirements.
|
||||
185
start.bat
Normal file
185
start.bat
Normal file
@@ -0,0 +1,185 @@
|
||||
@echo off
|
||||
echo Starting Worklenz setup... > worklenz_startup.log
|
||||
echo %DATE% %TIME% >> worklenz_startup.log
|
||||
echo.
|
||||
echo " __ __ _ _"
|
||||
echo " \ \ / / | | | |"
|
||||
echo " \ \ /\ / /__ _ __| | _| | ___ _ __ ____"
|
||||
echo " \ \/ \/ / _ \| '__| |/ / |/ _ \ '_ \|_ /"
|
||||
echo " \ /\ / (_) | | | <| | __/ | | |/ /"
|
||||
echo " \/ \/ \___/|_| |_|\_\_|\___|_| |_/___|"
|
||||
echo.
|
||||
echo W O R K L E N Z
|
||||
echo.
|
||||
echo Starting Worklenz Docker Environment...
|
||||
echo.
|
||||
|
||||
REM Check for Docker installation
|
||||
echo Checking for Docker installation...
|
||||
where docker >nul 2>>worklenz_startup.log
|
||||
IF %ERRORLEVEL% NEQ 0 (
|
||||
echo [91mWarning: Docker is not installed or not in PATH[0m
|
||||
echo Warning: Docker is not installed or not in PATH >> worklenz_startup.log
|
||||
echo Please install Docker first: https://docs.docker.com/get-docker/
|
||||
echo [93mContinuing for debugging purposes...[0m
|
||||
) ELSE (
|
||||
echo [92m^✓[0m Docker is installed
|
||||
echo Docker is installed >> worklenz_startup.log
|
||||
)
|
||||
|
||||
REM Check for docker-compose installation
|
||||
echo Checking for docker-compose...
|
||||
where docker-compose >nul 2>>worklenz_startup.log
|
||||
IF %ERRORLEVEL% NEQ 0 (
|
||||
echo [91mWarning: docker-compose is not installed or not in PATH[0m
|
||||
echo Warning: docker-compose is not installed or not in PATH >> worklenz_startup.log
|
||||
echo [93mContinuing for debugging purposes...[0m
|
||||
) ELSE (
|
||||
echo [92m^✓[0m docker-compose is installed
|
||||
echo docker-compose is installed >> worklenz_startup.log
|
||||
)
|
||||
|
||||
REM Check for update-docker-env.sh
|
||||
IF EXIST update-docker-env.sh (
|
||||
echo [94mFound update-docker-env.sh script. You can use it to update environment variables.[0m
|
||||
echo Found update-docker-env.sh script >> worklenz_startup.log
|
||||
)
|
||||
|
||||
REM Run preflight checks
|
||||
echo Running Docker daemon check...
|
||||
docker info >nul 2>>worklenz_startup.log
|
||||
IF %ERRORLEVEL% NEQ 0 (
|
||||
echo [91mWarning: Docker daemon is not running[0m
|
||||
echo Warning: Docker daemon is not running >> worklenz_startup.log
|
||||
echo Please start Docker and try again
|
||||
echo [93mContinuing for debugging purposes...[0m
|
||||
) ELSE (
|
||||
echo [92m^✓[0m Docker daemon is running
|
||||
echo Docker daemon is running >> worklenz_startup.log
|
||||
)
|
||||
|
||||
REM Stop any running containers
|
||||
echo Stopping any running containers...
|
||||
docker-compose down > nul 2>>worklenz_startup.log
|
||||
IF %ERRORLEVEL% NEQ 0 (
|
||||
echo [91mWarning: Error stopping containers[0m
|
||||
echo Warning: Error stopping containers >> worklenz_startup.log
|
||||
echo [93mContinuing anyway...[0m
|
||||
)
|
||||
|
||||
REM Start the containers
|
||||
echo Starting containers...
|
||||
echo Attempting to start containers... >> worklenz_startup.log
|
||||
|
||||
REM Start with docker-compose
|
||||
docker-compose up -d > docker_up_output.txt 2>&1
|
||||
type docker_up_output.txt >> worklenz_startup.log
|
||||
|
||||
REM Check for errors in output
|
||||
findstr /C:"Error" docker_up_output.txt > nul
|
||||
IF %ERRORLEVEL% EQU 0 (
|
||||
echo [91mErrors detected during startup[0m
|
||||
echo Errors detected during startup >> worklenz_startup.log
|
||||
type docker_up_output.txt
|
||||
)
|
||||
|
||||
del docker_up_output.txt > nul 2>&1
|
||||
|
||||
REM Wait for services to be ready
|
||||
echo Waiting for services to start...
|
||||
timeout /t 10 /nobreak > nul
|
||||
echo After timeout, checking services >> worklenz_startup.log
|
||||
|
||||
REM Check service status using docker-compose
|
||||
echo Checking service status...
|
||||
echo Checking service status... >> worklenz_startup.log
|
||||
docker-compose ps --services --filter "status=running" > running_services.txt 2>>worklenz_startup.log
|
||||
|
||||
REM Log services output
|
||||
type running_services.txt >> worklenz_startup.log
|
||||
|
||||
echo.
|
||||
echo Checking individual services:
|
||||
echo Checking individual services: >> worklenz_startup.log
|
||||
|
||||
REM Check frontend
|
||||
findstr /C:"frontend" running_services.txt > nul
|
||||
IF %ERRORLEVEL% EQU 0 (
|
||||
echo [92m^✓[0m Frontend is running
|
||||
echo Frontend URL: http://localhost:5000 (or https://localhost:5000 if SSL is enabled)
|
||||
echo Frontend is running >> worklenz_startup.log
|
||||
) ELSE (
|
||||
echo [91m^✗[0m Frontend service failed to start
|
||||
echo Frontend service failed to start >> worklenz_startup.log
|
||||
)
|
||||
|
||||
REM Check backend
|
||||
findstr /C:"backend" running_services.txt > nul
|
||||
IF %ERRORLEVEL% EQU 0 (
|
||||
echo [92m^✓[0m Backend is running
|
||||
echo Backend URL: http://localhost:3000 (or https://localhost:3000 if SSL is enabled)
|
||||
echo Backend is running >> worklenz_startup.log
|
||||
) ELSE (
|
||||
echo [91m^✗[0m Backend service failed to start
|
||||
echo Backend service failed to start >> worklenz_startup.log
|
||||
)
|
||||
|
||||
REM Check MinIO
|
||||
findstr /C:"minio" running_services.txt > nul
|
||||
IF %ERRORLEVEL% EQU 0 (
|
||||
echo [92m^✓[0m MinIO is running
|
||||
echo MinIO Console URL: http://localhost:9001 (login: minioadmin/minioadmin)
|
||||
echo MinIO is running >> worklenz_startup.log
|
||||
) ELSE (
|
||||
echo [91m^✗[0m MinIO service failed to start
|
||||
echo MinIO service failed to start >> worklenz_startup.log
|
||||
|
||||
REM Check MinIO logs
|
||||
echo Checking MinIO logs for errors:
|
||||
docker-compose logs minio --tail=20 > minio_logs.txt
|
||||
type minio_logs.txt
|
||||
type minio_logs.txt >> worklenz_startup.log
|
||||
del minio_logs.txt > nul 2>&1
|
||||
)
|
||||
|
||||
REM Check Database
|
||||
findstr /C:"db" running_services.txt > nul
|
||||
IF %ERRORLEVEL% EQU 0 (
|
||||
echo [92m^✓[0m Database is running
|
||||
echo Database is running >> worklenz_startup.log
|
||||
) ELSE (
|
||||
echo [91m^✗[0m Database service failed to start
|
||||
echo Database service failed to start >> worklenz_startup.log
|
||||
)
|
||||
|
||||
del running_services.txt > nul 2>&1
|
||||
|
||||
REM Check if all services are running
|
||||
set allRunning=1
|
||||
docker-compose ps --services | findstr /V /C:"frontend" /C:"backend" /C:"minio" /C:"db" > remaining_services.txt
|
||||
FOR /F "tokens=*" %%s IN (remaining_services.txt) DO (
|
||||
findstr /C:"%%s" running_services.txt > nul || set allRunning=0
|
||||
)
|
||||
del remaining_services.txt > nul 2>&1
|
||||
|
||||
IF %allRunning% EQU 1 (
|
||||
echo.
|
||||
echo [92mWorklenz setup completed![0m
|
||||
echo Setup completed successfully >> worklenz_startup.log
|
||||
) ELSE (
|
||||
echo.
|
||||
echo [93mWarning: Some services may not be running correctly.[0m
|
||||
echo Warning: Some services may not be running correctly >> worklenz_startup.log
|
||||
echo Run 'docker-compose logs' to check for errors.
|
||||
)
|
||||
|
||||
echo You can access the application at: http://localhost:5000
|
||||
echo To stop the services, run: stop.bat
|
||||
echo To update environment variables, run: update-docker-env.sh
|
||||
echo.
|
||||
echo Note: To enable SSL, set ENABLE_SSL=true in your .env file and run update-docker-env.sh
|
||||
echo.
|
||||
echo For any errors, check worklenz_startup.log file
|
||||
echo.
|
||||
echo Press any key to exit...
|
||||
pause > nul
|
||||
151
start.sh
Executable file
151
start.sh
Executable file
@@ -0,0 +1,151 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Colors for terminal output
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Print banner
|
||||
echo -e "${GREEN}"
|
||||
echo " __ __ _ _"
|
||||
echo " \ \ / / | | | |"
|
||||
echo " \ \ /\ / /__ _ __| | _| | ___ _ __ ____"
|
||||
echo " \ \/ \/ / _ \| '__| |/ / |/ _ \ '_ \|_ /"
|
||||
echo " \ /\ / (_) | | | <| | __/ | | |/ /"
|
||||
echo " \/ \/ \___/|_| |_|\_\_|\___|_| |_/___|"
|
||||
echo ""
|
||||
echo " W O R K L E N Z "
|
||||
echo -e "${NC}"
|
||||
echo "Starting Worklenz Docker Environment..."
|
||||
|
||||
# Check if update-docker-env.sh exists and is executable
|
||||
if [ -f update-docker-env.sh ] && [ -x update-docker-env.sh ]; then
|
||||
echo -e "${BLUE}Found update-docker-env.sh script. You can use it to update environment variables.${NC}"
|
||||
fi
|
||||
|
||||
# Function to check if a service is running
|
||||
check_service() {
|
||||
local service_name=$1
|
||||
local container_name=$2
|
||||
local url=$3
|
||||
local max_attempts=30
|
||||
local attempt=1
|
||||
|
||||
echo -e "${BLUE}Checking ${service_name} service...${NC}"
|
||||
|
||||
# First check if the container is running
|
||||
while [ $attempt -le $max_attempts ]; do
|
||||
if docker ps | grep -q "${container_name}"; then
|
||||
# Container is running
|
||||
if [ -z "$url" ]; then
|
||||
# No URL to check, assume service is up
|
||||
echo -e "${GREEN}✓${NC} ${service_name} is running"
|
||||
return 0
|
||||
else
|
||||
# Check if service endpoint is responding
|
||||
if curl -s -f -o /dev/null "$url"; then
|
||||
echo -e "${GREEN}✓${NC} ${service_name} is running and responding at ${url}"
|
||||
return 0
|
||||
else
|
||||
if [ $attempt -eq $max_attempts ]; then
|
||||
echo -e "${YELLOW}⚠${NC} ${service_name} container is running but not responding at ${url}"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
else
|
||||
if [ $attempt -eq $max_attempts ]; then
|
||||
echo -e "${RED}✗${NC} ${service_name} failed to start"
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo -n "."
|
||||
attempt=$((attempt+1))
|
||||
sleep 1
|
||||
done
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
# Check if Docker is installed
|
||||
if ! command -v docker &> /dev/null; then
|
||||
echo -e "${RED}Error: Docker is not installed or not in PATH${NC}"
|
||||
echo "Please install Docker first: https://docs.docker.com/get-docker/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if Docker daemon is running
|
||||
echo -e "${BLUE}Running preflight checks...${NC}"
|
||||
if ! docker info &> /dev/null; then
|
||||
echo -e "${RED}Error: Docker daemon is not running${NC}"
|
||||
echo "Please start Docker and try again"
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${GREEN}✓${NC} Docker is running"
|
||||
|
||||
# Determine Docker Compose command to use
|
||||
DOCKER_COMPOSE_CMD=""
|
||||
if command -v docker compose &> /dev/null; then
|
||||
DOCKER_COMPOSE_CMD="docker compose"
|
||||
echo -e "${GREEN}✓${NC} Using Docker Compose V2"
|
||||
elif command -v docker-compose &> /dev/null; then
|
||||
DOCKER_COMPOSE_CMD="docker-compose"
|
||||
echo -e "${YELLOW}⚠${NC} Using legacy Docker Compose"
|
||||
else
|
||||
echo -e "${RED}Error: Docker Compose is not installed or not in PATH${NC}"
|
||||
echo "Please install Docker Compose: https://docs.docker.com/compose/install/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if any of the ports are already in use
|
||||
ports=(3000 5000 9000 9001 5432)
|
||||
for port in "${ports[@]}"; do
|
||||
if lsof -i:"$port" > /dev/null 2>&1; then
|
||||
echo -e "${YELLOW}⚠ Warning: Port $port is already in use. This may cause conflicts.${NC}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Start the containers
|
||||
echo -e "${BLUE}Starting Worklenz services...${NC}"
|
||||
$DOCKER_COMPOSE_CMD down
|
||||
$DOCKER_COMPOSE_CMD up -d
|
||||
|
||||
# Wait for services to fully initialize
|
||||
echo -e "${BLUE}Waiting for services to initialize...${NC}"
|
||||
echo "This may take a minute or two depending on your system..."
|
||||
|
||||
# Check each service
|
||||
check_service "Database" "worklenz_db" ""
|
||||
DB_STATUS=$?
|
||||
|
||||
check_service "MinIO" "worklenz_minio" "http://localhost:9000/minio/health/live"
|
||||
MINIO_STATUS=$?
|
||||
|
||||
check_service "Backend" "worklenz_backend" "http://localhost:3000/public/health"
|
||||
BACKEND_STATUS=$?
|
||||
|
||||
check_service "Frontend" "worklenz_frontend" "http://localhost:5000"
|
||||
FRONTEND_STATUS=$?
|
||||
|
||||
# Display service URLs
|
||||
echo -e "\n${BLUE}Service URLs:${NC}"
|
||||
[ $FRONTEND_STATUS -eq 0 ] && echo " • Frontend: http://localhost:5000 (or https://localhost:5000 if SSL is enabled)"
|
||||
[ $BACKEND_STATUS -eq 0 ] && echo " • Backend API: http://localhost:3000 (or https://localhost:3000 if SSL is enabled)"
|
||||
[ $MINIO_STATUS -eq 0 ] && echo " • MinIO Console: http://localhost:9001 (login: minioadmin/minioadmin)"
|
||||
|
||||
# Check if all services are up
|
||||
if [ $DB_STATUS -eq 0 ] && [ $MINIO_STATUS -eq 0 ] && [ $BACKEND_STATUS -eq 0 ] && [ $FRONTEND_STATUS -eq 0 ]; then
|
||||
echo -e "\n${GREEN}✅ All Worklenz services are running successfully!${NC}"
|
||||
else
|
||||
echo -e "\n${YELLOW}⚠ Some services may not be running properly. Check the logs for more details:${NC}"
|
||||
echo " $DOCKER_COMPOSE_CMD logs"
|
||||
fi
|
||||
|
||||
echo -e "\n${BLUE}Useful commands:${NC}"
|
||||
echo " • View logs: $DOCKER_COMPOSE_CMD logs -f"
|
||||
echo " • Stop services: ./stop.sh"
|
||||
echo " • Update environment variables: ./update-docker-env.sh"
|
||||
echo -e "\n${YELLOW}Note:${NC} To enable SSL, set ENABLE_SSL=true in your .env file and run ./update-docker-env.sh"
|
||||
7
stop.bat
Normal file
7
stop.bat
Normal file
@@ -0,0 +1,7 @@
|
||||
@echo off
|
||||
echo [91mStopping Worklenz Docker Environment...[0m
|
||||
|
||||
REM Stop the containers
|
||||
docker-compose down
|
||||
|
||||
echo [92mWorklenz services have been stopped.[0m
|
||||
50
stop.sh
Executable file
50
stop.sh
Executable file
@@ -0,0 +1,50 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Colors for terminal output
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Print banner
|
||||
echo -e "${RED}"
|
||||
echo " __ __ _ _"
|
||||
echo " \ \ / / | | | |"
|
||||
echo " \ \ /\ / /__ _ __| | _| | ___ _ __ ____"
|
||||
echo " \ \/ \/ / _ \| '__| |/ / |/ _ \ '_ \|_ /"
|
||||
echo " \ /\ / (_) | | | <| | __/ | | |/ /"
|
||||
echo " \/ \/ \___/|_| |_|\_\_|\___|_| |_/___|"
|
||||
echo ""
|
||||
echo " W O R K L E N Z "
|
||||
echo -e "${NC}"
|
||||
echo -e "${BLUE}Stopping Worklenz Docker Environment...${NC}"
|
||||
|
||||
# Determine Docker Compose command to use
|
||||
DOCKER_COMPOSE_CMD=""
|
||||
if command -v docker compose &> /dev/null; then
|
||||
DOCKER_COMPOSE_CMD="docker compose"
|
||||
echo -e "${GREEN}✓${NC} Using Docker Compose V2"
|
||||
elif command -v docker-compose &> /dev/null; then
|
||||
DOCKER_COMPOSE_CMD="docker-compose"
|
||||
echo -e "${YELLOW}⚠${NC} Using legacy Docker Compose"
|
||||
else
|
||||
echo -e "${RED}Error: Docker Compose is not installed or not in PATH${NC}"
|
||||
echo "Please install Docker Compose: https://docs.docker.com/compose/install/"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Stop the containers
|
||||
echo -e "${BLUE}Stopping all services...${NC}"
|
||||
$DOCKER_COMPOSE_CMD down
|
||||
|
||||
# Check if containers are still running
|
||||
if docker ps | grep -q "worklenz_"; then
|
||||
echo -e "${YELLOW}⚠ Some Worklenz containers are still running. Forcing stop...${NC}"
|
||||
docker stop $(docker ps -q --filter "name=worklenz_")
|
||||
echo -e "${GREEN}✓${NC} Forced stop completed."
|
||||
else
|
||||
echo -e "${GREEN}✓${NC} All Worklenz services have been stopped successfully."
|
||||
fi
|
||||
|
||||
echo -e "\n${BLUE}To start Worklenz again, run:${NC} ./start.sh"
|
||||
244
task-progress-methods.md
Normal file
244
task-progress-methods.md
Normal file
@@ -0,0 +1,244 @@
|
||||
# Task Progress Tracking Methods in WorkLenz
|
||||
|
||||
## Overview
|
||||
WorkLenz supports three different methods for tracking task progress, each suitable for different project management approaches:
|
||||
|
||||
1. **Manual Progress** - Direct input of progress percentages
|
||||
2. **Weighted Progress** - Tasks have weights that affect overall progress calculation
|
||||
3. **Time-based Progress** - Progress calculated based on estimated time vs. time spent
|
||||
|
||||
These modes can be selected when creating or editing a project in the project drawer.
|
||||
|
||||
## 1. Manual Progress Mode
|
||||
|
||||
This mode allows direct input of progress percentages for individual tasks without subtasks.
|
||||
|
||||
**Implementation:**
|
||||
- Enabled by setting `use_manual_progress` to true in the project settings
|
||||
- Progress is updated through the `on-update-task-progress.ts` socket event handler
|
||||
- The UI shows a manual progress input slider in the task drawer for tasks without subtasks
|
||||
- Updates the database with `progress_value` and sets `manual_progress` flag to true
|
||||
|
||||
**Calculation Logic:**
|
||||
- For tasks without subtasks: Uses the manually set progress value
|
||||
- For parent tasks: Calculates the average of all subtask progress values
|
||||
- Subtask progress comes from either manual values or completion status (0% or 100%)
|
||||
|
||||
**Code Example:**
|
||||
```typescript
|
||||
// Manual progress update via socket.io
|
||||
socket?.emit(SocketEvents.UPDATE_TASK_PROGRESS.toString(), JSON.stringify({
|
||||
task_id: task.id,
|
||||
progress_value: value,
|
||||
parent_task_id: task.parent_task_id
|
||||
}));
|
||||
```
|
||||
|
||||
### Showing Progress in Subtask Rows
|
||||
|
||||
When manual progress is enabled in a project, progress is shown in the following ways:
|
||||
|
||||
1. **In Task List Views**:
|
||||
- Subtasks display their individual progress values in the progress column
|
||||
- Parent tasks display the calculated average progress of all subtasks
|
||||
|
||||
2. **Implementation Details**:
|
||||
- The progress values are stored in the `progress_value` column in the database
|
||||
- For subtasks with manual progress set, the value is shown directly
|
||||
- For subtasks without manual progress, the completion status determines the value (0% or 100%)
|
||||
- The task view model includes both `progress` and `complete_ratio` properties
|
||||
|
||||
**Relevant Components:**
|
||||
```typescript
|
||||
// From task-list-progress-cell.tsx
|
||||
const TaskListProgressCell = ({ task }: TaskListProgressCellProps) => {
|
||||
return task.is_sub_task ? null : (
|
||||
<Tooltip title={`${task.completed_count || 0} / ${task.total_tasks_count || 0}`}>
|
||||
<Progress
|
||||
percent={task.complete_ratio || 0}
|
||||
type="circle"
|
||||
size={24}
|
||||
style={{ cursor: 'default' }}
|
||||
/>
|
||||
</Tooltip>
|
||||
);
|
||||
};
|
||||
```
|
||||
|
||||
**Task Progress Calculation in Backend:**
|
||||
```typescript
|
||||
// From tasks-controller-base.ts
|
||||
// For tasks without subtasks, respect manual progress if set
|
||||
if (task.manual_progress === true && task.progress_value !== null) {
|
||||
// For manually set progress, use that value directly
|
||||
task.progress = parseInt(task.progress_value);
|
||||
task.complete_ratio = parseInt(task.progress_value);
|
||||
}
|
||||
```
|
||||
|
||||
## 2. Weighted Progress Mode
|
||||
|
||||
This mode allows assigning different weights to subtasks to reflect their relative importance in the overall task or project progress.
|
||||
|
||||
**Implementation:**
|
||||
- Enabled by setting `use_weighted_progress` to true in the project settings
|
||||
- Weights are updated through the `on-update-task-weight.ts` socket event handler
|
||||
- The UI shows a weight input for subtasks in the task drawer
|
||||
- Default weight is 100 if not specified
|
||||
|
||||
**Calculation Logic:**
|
||||
- Progress is calculated using a weighted average: `SUM(progress_value * weight) / SUM(weight)`
|
||||
- This gives more influence to tasks with higher weights
|
||||
- A parent task's progress is the weighted average of its subtasks' progress
|
||||
|
||||
**Code Example:**
|
||||
```typescript
|
||||
// Weight update via socket.io
|
||||
socket?.emit(SocketEvents.UPDATE_TASK_WEIGHT.toString(), JSON.stringify({
|
||||
task_id: task.id,
|
||||
weight: value,
|
||||
parent_task_id: task.parent_task_id
|
||||
}));
|
||||
```
|
||||
|
||||
## 3. Time-based Progress Mode
|
||||
|
||||
This mode calculates progress based on estimated time vs. actual time spent.
|
||||
|
||||
**Implementation:**
|
||||
- Enabled by setting `use_time_progress` to true in the project settings
|
||||
- Uses task time estimates (hours and minutes) for calculation
|
||||
- No separate socket handler needed as it's calculated automatically
|
||||
|
||||
**Calculation Logic:**
|
||||
- Progress is calculated using time as the weight: `SUM(progress_value * estimated_minutes) / SUM(estimated_minutes)`
|
||||
- For tasks with time tracking, estimated vs. actual time can be factored in
|
||||
- Parent task progress is weighted by the estimated time of each subtask
|
||||
|
||||
**SQL Example:**
|
||||
```sql
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value,
|
||||
COALESCE(total_hours * 60 + total_minutes, 0) AS estimated_minutes
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * estimated_minutes) / NULLIF(SUM(estimated_minutes), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
```
|
||||
|
||||
## Default Progress Tracking (when no special mode is selected)
|
||||
|
||||
If no specific progress mode is enabled, the system falls back to a traditional completion-based calculation:
|
||||
|
||||
**Implementation:**
|
||||
- Default mode when all three special modes are disabled
|
||||
- Based on task completion status only
|
||||
|
||||
**Calculation Logic:**
|
||||
- For tasks without subtasks: 0% if not done, 100% if done
|
||||
- For parent tasks: `(completed_tasks / total_tasks) * 100`
|
||||
- Counts both the parent and all subtasks in the calculation
|
||||
|
||||
**SQL Example:**
|
||||
```sql
|
||||
-- Traditional calculation based on completion status
|
||||
SELECT (CASE
|
||||
WHEN EXISTS(SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = _task_id
|
||||
AND is_done IS TRUE) THEN 1
|
||||
ELSE 0 END)
|
||||
INTO _parent_task_done;
|
||||
|
||||
SELECT COUNT(*)
|
||||
FROM tasks_with_status_view
|
||||
WHERE parent_task_id = _task_id
|
||||
AND is_done IS TRUE
|
||||
INTO _sub_tasks_done;
|
||||
|
||||
_total_completed = _parent_task_done + _sub_tasks_done;
|
||||
_total_tasks = _sub_tasks_count + 1; -- +1 for the parent task
|
||||
|
||||
IF _total_tasks = 0 THEN
|
||||
_ratio = 0;
|
||||
ELSE
|
||||
_ratio = (_total_completed / _total_tasks) * 100;
|
||||
END IF;
|
||||
```
|
||||
|
||||
## Technical Implementation Details
|
||||
|
||||
The progress calculation logic is implemented in PostgreSQL functions, primarily in the `get_task_complete_ratio` function. Progress updates flow through the system as follows:
|
||||
|
||||
1. **User Action**: User updates task progress or weight in the UI
|
||||
2. **Socket Event**: Client emits socket event (UPDATE_TASK_PROGRESS or UPDATE_TASK_WEIGHT)
|
||||
3. **Server Handler**: Server processes the event in the respective handler function
|
||||
4. **Database Update**: Progress/weight value is updated in the database
|
||||
5. **Recalculation**: If needed, parent task progress is recalculated
|
||||
6. **Broadcast**: Changes are broadcast to all clients in the project room
|
||||
7. **UI Update**: Client UI updates to reflect the new progress values
|
||||
|
||||
This architecture allows for real-time updates and consistent progress calculation across all clients.
|
||||
|
||||
## Associated Files and Components
|
||||
|
||||
### Backend Files
|
||||
|
||||
1. **Socket Event Handlers**:
|
||||
- `worklenz-backend/src/socket.io/commands/on-update-task-progress.ts` - Handles manual progress updates
|
||||
- `worklenz-backend/src/socket.io/commands/on-update-task-weight.ts` - Handles task weight updates
|
||||
|
||||
2. **Database Functions**:
|
||||
- `worklenz-backend/database/migrations/20250423000000-subtask-manual-progress.sql` - Contains the `get_task_complete_ratio` function that calculates progress based on the selected method
|
||||
- Functions that support project creation/updates with progress mode settings:
|
||||
- `create_project`
|
||||
- `update_project`
|
||||
|
||||
3. **Controllers**:
|
||||
- `worklenz-backend/src/controllers/project-workload/workload-gannt-base.ts` - Contains the `calculateTaskCompleteRatio` method
|
||||
- `worklenz-backend/src/controllers/projects-controller.ts` - Handles project-level progress calculations
|
||||
- `worklenz-backend/src/controllers/tasks-controller-base.ts` - Handles task progress calculation and updates task view models
|
||||
|
||||
### Frontend Files
|
||||
|
||||
1. **Project Configuration**:
|
||||
- `worklenz-frontend/src/components/projects/project-drawer/project-drawer.tsx` - Contains UI for selecting progress method when creating/editing projects
|
||||
|
||||
2. **Progress Visualization Components**:
|
||||
- `worklenz-frontend/src/components/project-list/project-list-table/project-list-progress/progress-list-progress.tsx` - Displays project progress
|
||||
- `worklenz-frontend/src/pages/projects/project-view-1/taskList/taskListTable/taskListTableCells/TaskProgress.tsx` - Displays task progress
|
||||
- `worklenz-frontend/src/pages/projects/projectView/taskList/task-list-table/task-list-table-cells/task-list-progress-cell/task-list-progress-cell.tsx` - Alternative task progress cell
|
||||
- `worklenz-frontend/src/components/task-list-common/task-row/task-row-progress/task-row-progress.tsx` - Displays progress in task rows
|
||||
|
||||
3. **Progress Input Components**:
|
||||
- `worklenz-frontend/src/components/task-drawer/shared/info-tab/details/task-drawer-progress/task-drawer-progress.tsx` - Component for inputting task progress/weight
|
||||
|
||||
## Choosing the Right Progress Method
|
||||
|
||||
Each progress method is suitable for different types of projects:
|
||||
|
||||
- **Manual Progress**: Best for creative work where progress is subjective
|
||||
- **Weighted Progress**: Ideal for projects where some tasks are more significant than others
|
||||
- **Time-based Progress**: Perfect for projects where time estimates are reliable and important
|
||||
|
||||
Project managers can choose the appropriate method when creating or editing a project in the project drawer, based on their team's workflow and project requirements.
|
||||
141
update-docker-env.sh
Executable file
141
update-docker-env.sh
Executable file
@@ -0,0 +1,141 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Script to set environment variables for Docker deployment
|
||||
# Usage: ./update-docker-env.sh [hostname] [use_ssl]
|
||||
|
||||
# Default hostname if not provided
|
||||
DEFAULT_HOSTNAME="localhost"
|
||||
HOSTNAME=${1:-$DEFAULT_HOSTNAME}
|
||||
|
||||
# Check if SSL should be used
|
||||
USE_SSL=${2:-false}
|
||||
|
||||
# Set protocol prefixes based on SSL flag
|
||||
if [ "$USE_SSL" = "true" ]; then
|
||||
HTTP_PREFIX="https://"
|
||||
WS_PREFIX="wss://"
|
||||
else
|
||||
HTTP_PREFIX="http://"
|
||||
WS_PREFIX="ws://"
|
||||
fi
|
||||
|
||||
# Frontend URLs
|
||||
FRONTEND_URL="${HTTP_PREFIX}${HOSTNAME}:5000"
|
||||
MINIO_DASHBOARD_URL="${HTTP_PREFIX}${HOSTNAME}:9001"
|
||||
|
||||
# Create or overwrite frontend .env.development file
|
||||
mkdir -p worklenz-frontend
|
||||
cat > worklenz-frontend/.env.development << EOL
|
||||
# API Connection
|
||||
VITE_API_URL=http://localhost:3000
|
||||
VITE_SOCKET_URL=ws://localhost:3000
|
||||
|
||||
# Application Environment
|
||||
VITE_APP_TITLE=Worklenz
|
||||
VITE_APP_ENV=development
|
||||
|
||||
# Mixpanel
|
||||
VITE_MIXPANEL_TOKEN=
|
||||
|
||||
# Recaptcha
|
||||
VITE_ENABLE_RECAPTCHA=false
|
||||
VITE_RECAPTCHA_SITE_KEY=
|
||||
|
||||
# Session ID
|
||||
VITE_WORKLENZ_SESSION_ID=worklenz-session-id
|
||||
EOL
|
||||
|
||||
# Create frontend .env.production file
|
||||
cat > worklenz-frontend/.env.production << EOL
|
||||
# API Connection
|
||||
VITE_API_URL=${HTTP_PREFIX}${HOSTNAME}:3000
|
||||
VITE_SOCKET_URL=${WS_PREFIX}${HOSTNAME}:3000
|
||||
|
||||
# Application Environment
|
||||
VITE_APP_TITLE=Worklenz
|
||||
VITE_APP_ENV=production
|
||||
|
||||
# Mixpanel
|
||||
VITE_MIXPANEL_TOKEN=
|
||||
|
||||
# Recaptcha
|
||||
VITE_ENABLE_RECAPTCHA=false
|
||||
VITE_RECAPTCHA_SITE_KEY=
|
||||
|
||||
# Session ID
|
||||
VITE_WORKLENZ_SESSION_ID=worklenz-session-id
|
||||
EOL
|
||||
|
||||
# Create backend environment file
|
||||
mkdir -p worklenz-backend
|
||||
cat > worklenz-backend/.env << EOL
|
||||
# Server
|
||||
NODE_ENV=production
|
||||
PORT=3000
|
||||
SESSION_NAME=worklenz.sid
|
||||
SESSION_SECRET=change_me_in_production
|
||||
COOKIE_SECRET=change_me_in_production
|
||||
|
||||
# CORS
|
||||
SOCKET_IO_CORS=${FRONTEND_URL}
|
||||
SERVER_CORS=${FRONTEND_URL}
|
||||
|
||||
|
||||
# Google Login
|
||||
GOOGLE_CLIENT_ID="your_google_client_id"
|
||||
GOOGLE_CLIENT_SECRET="your_google_client_secret"
|
||||
GOOGLE_CALLBACK_URL="${FRONTEND_URL}/secure/google/verify"
|
||||
LOGIN_FAILURE_REDIRECT="${FRONTEND_URL}/auth/authenticating"
|
||||
LOGIN_SUCCESS_REDIRECT="${FRONTEND_URL}/auth/authenticating"
|
||||
|
||||
# Database
|
||||
DB_HOST=db
|
||||
DB_PORT=5432
|
||||
DB_USER=postgres
|
||||
DB_PASSWORD=password
|
||||
DB_NAME=worklenz_db
|
||||
DB_MAX_CLIENTS=50
|
||||
USE_PG_NATIVE=true
|
||||
|
||||
# Storage Configuration
|
||||
STORAGE_PROVIDER=s3
|
||||
AWS_REGION=us-east-1
|
||||
AWS_BUCKET=worklenz-bucket
|
||||
AWS_ACCESS_KEY_ID=minioadmin
|
||||
AWS_SECRET_ACCESS_KEY=minioadmin
|
||||
S3_URL=http://minio:9000
|
||||
|
||||
# Backend Directories
|
||||
BACKEND_PUBLIC_DIR=./public
|
||||
BACKEND_VIEWS_DIR=./views
|
||||
|
||||
# Host
|
||||
HOSTNAME=${HOSTNAME}
|
||||
FRONTEND_URL=${FRONTEND_URL}
|
||||
|
||||
# Email
|
||||
SOURCE_EMAIL=no-reply@example.com
|
||||
|
||||
# Notifications
|
||||
SLACK_WEBHOOK=
|
||||
|
||||
# Other Settings
|
||||
COMMIT_BUILD_IMMEDIATELY=true
|
||||
|
||||
# JWT Secret
|
||||
JWT_SECRET=change_me_in_production
|
||||
EOL
|
||||
|
||||
echo "Environment configuration updated for ${HOSTNAME} with" $([ "$USE_SSL" = "true" ] && echo "HTTPS/WSS" || echo "HTTP/WS")
|
||||
echo "Created/updated environment files:"
|
||||
echo "- worklenz-frontend/.env.development (development)"
|
||||
echo "- worklenz-frontend/.env.production (production build)"
|
||||
echo "- worklenz-backend/.env"
|
||||
echo
|
||||
echo "To run with Docker Compose, use: docker-compose up -d"
|
||||
echo
|
||||
echo "Frontend URL: ${FRONTEND_URL}"
|
||||
echo "API URL: ${HTTP_PREFIX}${HOSTNAME}:3000"
|
||||
echo "Socket URL: ${WS_PREFIX}${HOSTNAME}:3000"
|
||||
echo "MinIO Dashboard URL: ${MINIO_DASHBOARD_URL}"
|
||||
echo "CORS is configured to allow requests from: ${FRONTEND_URL}"
|
||||
@@ -2,56 +2,80 @@
|
||||
NODE_ENV=development
|
||||
PORT=3000
|
||||
SESSION_NAME=worklenz.sid
|
||||
SESSION_SECRET="YOUR_SESSION_SECRET_HERE"
|
||||
COOKIE_SECRET="YOUR_COOKIE_SECRET_HERE"
|
||||
SESSION_SECRET="your_session_secret"
|
||||
COOKIE_SECRET="your_cookie_secret"
|
||||
|
||||
# CORS
|
||||
SOCKET_IO_CORS=http://localhost:4200
|
||||
SOCKET_IO_CORS=http://localhost:5000
|
||||
SERVER_CORS=*
|
||||
|
||||
# Database
|
||||
DB_USER=DATABASE_USER_HERE # default : worklenz_backend (update "user-permission.sql" if needed)
|
||||
DB_PASSWORD=DATABASE_PASSWORD_HERE
|
||||
DB_NAME=DATABASE_NAME_HERE # default : worklenz_db
|
||||
DB_HOST=DATABASE_HOST_HERE # default : localhost
|
||||
DB_PORT=DATABASE_PORT_HERE # default : 5432
|
||||
DB_USER=postgres
|
||||
DB_PASSWORD=your_db_password
|
||||
DB_NAME=worklenz_db
|
||||
DB_HOST=localhost
|
||||
DB_PORT=5432
|
||||
DB_MAX_CLIENTS=50
|
||||
|
||||
# Google Login
|
||||
GOOGLE_CLIENT_ID="GOOGLE_CLIENT_ID_HERE"
|
||||
GOOGLE_CLIENT_SECRET="GOOGLE_CLIENT_SECRET_HERE"
|
||||
GOOGLE_CALLBACK_URL="http://localhost:3000/secure/google/verify"
|
||||
LOGIN_FAILURE_REDIRECT="/"
|
||||
LOGIN_SUCCESS_REDIRECT="http://localhost:4200/auth/authenticate"
|
||||
GOOGLE_CLIENT_ID="your_google_client_id"
|
||||
GOOGLE_CLIENT_SECRET="your_google_client_secret"
|
||||
GOOGLE_CALLBACK_URL="http://localhost:5000/secure/google/verify"
|
||||
LOGIN_FAILURE_REDIRECT="http://localhost:5000/auth/authenticating"
|
||||
LOGIN_SUCCESS_REDIRECT="http://localhost:5000/auth/authenticating"
|
||||
|
||||
# CLI
|
||||
ANGULAR_DIST_DIR="/path/worklenz_frontend/dist/worklenz"
|
||||
ANGULAR_SRC_DIR="/path/worklenz_frontend"
|
||||
BACKEND_PUBLIC_DIR="/path/worklenz_backend/src/public"
|
||||
BACKEND_VIEWS_DIR="/path/worklenz_backend/src/views/admin"
|
||||
COMMIT_BUILD_IMMEDIATELY=true
|
||||
ANGULAR_DIST_DIR="path/to/frontend/dist"
|
||||
ANGULAR_SRC_DIR="path/to/frontend"
|
||||
BACKEND_PUBLIC_DIR="path/to/backend/public"
|
||||
BACKEND_VIEWS_DIR="path/to/backend/views"
|
||||
COMMIT_BUILD_IMMEDIATELY=false
|
||||
|
||||
# HOST
|
||||
HOSTNAME=localhost:4200
|
||||
HOSTNAME=localhost:5000
|
||||
|
||||
# SLACK
|
||||
SLACK_WEBHOOK=SLACK_WEBHOOK_HERE
|
||||
USE_PG_NATIVE=true
|
||||
SLACK_WEBHOOK=your_slack_webhook_url
|
||||
USE_PG_NATIVE=false
|
||||
|
||||
# JWT SECRET
|
||||
JWT_SECRET=JWT_SECRET_CODE_HERE
|
||||
JWT_SECRET=your_jwt_secret
|
||||
|
||||
# AWS
|
||||
AWS_REGION="us-west-2"
|
||||
AWS_ACCESS_KEY_ID="AWS_ACCESS_KEY_ID_HERE"
|
||||
AWS_SECRET_ACCESS_KEY="AWS_SECRET_ACCESS_KEY_HERE"
|
||||
# FRONTEND_URL
|
||||
FRONTEND_URL=http://localhost:5000
|
||||
|
||||
# S3 Credentials
|
||||
REGION="us-west-2"
|
||||
BUCKET="BUCKET_NAME_HERE"
|
||||
S3_URL="S3_URL_HERE"
|
||||
S3_ACCESS_KEY_ID="S3_ACCESS_KEY_ID_HERE"
|
||||
S3_SECRET_ACCESS_KEY="S3_SECRET_ACCESS_KEY_HERE"
|
||||
# STORAGE
|
||||
STORAGE_PROVIDER=s3 # values s3 or azure
|
||||
|
||||
# SES email
|
||||
SOURCE_EMAIL="SOURCE_EMAIL_HERE" #Worklenz <noreply@worklenz.com>
|
||||
# AWS - SES
|
||||
AWS_REGION="your_aws_region"
|
||||
AWS_ACCESS_KEY_ID="your_aws_access_key_id"
|
||||
AWS_SECRET_ACCESS_KEY="your_aws_secret_access_key"
|
||||
|
||||
# S3
|
||||
S3_REGION="S3_REGION"
|
||||
S3_BUCKET="your_s3_bucket"
|
||||
S3_URL="your_s3_url"
|
||||
S3_ACCESS_KEY_ID="S3_ACCESS_KEY_ID"
|
||||
S3_SECRET_ACCESS_KEY="S3_SECRET_ACCESS_KEY"
|
||||
|
||||
# Azure Storage
|
||||
AZURE_STORAGE_ACCOUNT_NAME="your_storage_account_name"
|
||||
AZURE_STORAGE_CONTAINER="your_storage_container"
|
||||
AZURE_STORAGE_ACCOUNT_KEY="your_storage_account_key"
|
||||
AZURE_STORAGE_URL="your_storage_url"
|
||||
|
||||
# DIRECTPAY
|
||||
DP_STAGE=DEV
|
||||
DP_URL=your_url
|
||||
DP_MERCHANT_ID=your_merchant_id
|
||||
DP_SECRET_KEY=your_secret_key
|
||||
DP_API_KEY=your_api_key
|
||||
|
||||
CONTACT_US_EMAIL=support@example.com
|
||||
|
||||
GOOGLE_CAPTCHA_SECRET_KEY=your_captcha_secret_key
|
||||
GOOGLE_CAPTCHA_PASS_SCORE=0.8
|
||||
|
||||
# Email Cronjobs
|
||||
ENABLE_EMAIL_CRONJOBS=true
|
||||
@@ -1,5 +1,5 @@
|
||||
# Use the official Node.js 18 image as a base
|
||||
FROM node:18
|
||||
# Use the official Node.js 20 image as a base
|
||||
FROM node:20
|
||||
|
||||
# Create and set the working directory
|
||||
WORKDIR /usr/src/app
|
||||
@@ -23,4 +23,4 @@ RUN npm run build
|
||||
EXPOSE 3000
|
||||
|
||||
# Start the application
|
||||
CMD ["npm", "start"]
|
||||
CMD ["npm", "start"]
|
||||
@@ -1,81 +1,96 @@
|
||||
# Worklenz Backend
|
||||
|
||||
1. **Open your IDE:**
|
||||
This is the Express.js backend for the Worklenz project management application.
|
||||
|
||||
Open the project directory in your preferred code editor or IDE like Visual Studio Code.
|
||||
## Getting Started
|
||||
|
||||
2. **Configure Environment Variables:**
|
||||
Follow these steps to set up the backend for development:
|
||||
|
||||
- Create a copy of the `.env.template` file and name it `.env`.
|
||||
- Update the required fields in `.env` with the specific information.
|
||||
1. **Configure Environment Variables:**
|
||||
|
||||
3. **Restore Database**
|
||||
- Create a new database named `worklenz_db` on your local PostgreSQL server.
|
||||
- Update the `DATABASE_NAME` and `PASSWORD` in the `database/6_user_permission.sql` with your DB credentials.
|
||||
- Open a query console and execute the queries from the .sql files in the `database` directories, following the provided order.
|
||||
- Create a copy of the `.env.example` file and name it `.env`.
|
||||
- Update the required fields in `.env` with your specific configuration.
|
||||
|
||||
4. **Install Dependencies:**
|
||||
2. **Set up Database:**
|
||||
- Create a new database named `worklenz_db` on your local PostgreSQL server.
|
||||
- Update the database connection details in your `.env` file.
|
||||
- Execute the SQL setup files in the correct order:
|
||||
|
||||
```bash
|
||||
# From your PostgreSQL client or command line
|
||||
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
|
||||
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
|
||||
```
|
||||
|
||||
Alternatively, you can use the provided shell script:
|
||||
|
||||
```bash
|
||||
# Make sure the script is executable
|
||||
chmod +x database/00-init-db.sh
|
||||
# Run the script (may need modifications for local execution)
|
||||
./database/00-init-db.sh
|
||||
```
|
||||
|
||||
3. **Install Dependencies:**
|
||||
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
This command installs all the necessary libraries required to run the project.
|
||||
4. **Run the Development Server:**
|
||||
|
||||
5. **Run the Development Server:**
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
**a. Start the TypeScript compiler:**
|
||||
This starts the development server with hot reloading enabled.
|
||||
|
||||
Open a new terminal window and run the following command:
|
||||
5. **Build for Production:**
|
||||
|
||||
```bash
|
||||
grunt dev
|
||||
```
|
||||
```bash
|
||||
npm run build
|
||||
```
|
||||
|
||||
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript.
|
||||
This will compile the TypeScript code into JavaScript for production use.
|
||||
|
||||
**b. Start the development server:**
|
||||
6. **Start Production Server:**
|
||||
|
||||
Open another separate terminal window and run the following command:
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
## API Documentation
|
||||
|
||||
This starts the development server allowing you to work on the project.
|
||||
The API endpoints are organized into logical controllers and follow RESTful design principles. The main API routes are prefixed with `/api/v1`.
|
||||
|
||||
6. **Run the Production Server:**
|
||||
### Authentication
|
||||
|
||||
**a. Compile TypeScript to JavaScript:**
|
||||
Authentication is handled via JWT tokens. Protected routes require a valid token in the Authorization header.
|
||||
|
||||
Open a new terminal window and run the following command:
|
||||
### File Storage
|
||||
|
||||
```bash
|
||||
grunt build
|
||||
```
|
||||
The application supports both S3-compatible storage and Azure Blob Storage for file uploads. Configure your preferred storage option in the `.env` file.
|
||||
|
||||
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript for production use.
|
||||
## Development Guidelines
|
||||
|
||||
**b. Start the production server:**
|
||||
- Code should be written in TypeScript
|
||||
- Follow the established patterns for controllers, services, and middlewares
|
||||
- Add proper error handling for all API endpoints
|
||||
- Write unit tests for critical functionality
|
||||
- Document API endpoints with clear descriptions and examples
|
||||
|
||||
Once the compilation is complete, run the following command in the same terminal window:
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
npm start
|
||||
```
|
||||
```bash
|
||||
npm test
|
||||
```
|
||||
|
||||
This starts the production server for your application.
|
||||
## Docker Support
|
||||
|
||||
### CLI
|
||||
|
||||
- Create controller: `$ node new controller Test`
|
||||
- Create angular release: `$ node new release`
|
||||
|
||||
### Developement Rules
|
||||
|
||||
- Controllers should only generate/create using the CLI (`node new controller Projects`)
|
||||
- Validations should only be done using a middleware placed under src/validators/ and used inside the routers (E.g., api-router.ts)
|
||||
- Validators should only generate/create using the CLI (`node new vaidator projects-params`)
|
||||
|
||||
## Pull submodules
|
||||
- git submodule update --init --recursive
|
||||
The backend can be run in a Docker container. See the main project README for Docker setup instructions.
|
||||
|
||||
55
worklenz-backend/database/00-init-db.sh
Normal file
55
worklenz-backend/database/00-init-db.sh
Normal file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# This script controls the order of SQL file execution during database initialization
|
||||
echo "Starting database initialization..."
|
||||
|
||||
# Check if we have SQL files in expected locations
|
||||
if [ -f "/docker-entrypoint-initdb.d/sql/0_extensions.sql" ]; then
|
||||
SQL_DIR="/docker-entrypoint-initdb.d/sql"
|
||||
echo "Using SQL files from sql/ subdirectory"
|
||||
elif [ -f "/docker-entrypoint-initdb.d/0_extensions.sql" ]; then
|
||||
# First time setup - move files to subdirectory
|
||||
echo "Moving SQL files to sql/ subdirectory..."
|
||||
mkdir -p /docker-entrypoint-initdb.d/sql
|
||||
|
||||
# Move all SQL files (except this script) to the subdirectory
|
||||
for f in /docker-entrypoint-initdb.d/*.sql; do
|
||||
if [ -f "$f" ]; then
|
||||
cp "$f" /docker-entrypoint-initdb.d/sql/
|
||||
echo "Copied $f to sql/ subdirectory"
|
||||
fi
|
||||
done
|
||||
|
||||
SQL_DIR="/docker-entrypoint-initdb.d/sql"
|
||||
else
|
||||
echo "SQL files not found in expected locations!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Execute SQL files in the correct order
|
||||
echo "Executing 0_extensions.sql..."
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/0_extensions.sql"
|
||||
|
||||
echo "Executing 1_tables.sql..."
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/1_tables.sql"
|
||||
|
||||
echo "Executing indexes.sql..."
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/indexes.sql"
|
||||
|
||||
echo "Executing 4_functions.sql..."
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/4_functions.sql"
|
||||
|
||||
echo "Executing triggers.sql..."
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/triggers.sql"
|
||||
|
||||
echo "Executing 3_views.sql..."
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/3_views.sql"
|
||||
|
||||
echo "Executing 2_dml.sql..."
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/2_dml.sql"
|
||||
|
||||
echo "Executing 5_database_user.sql..."
|
||||
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/5_database_user.sql"
|
||||
|
||||
echo "Database initialization completed successfully"
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,77 +0,0 @@
|
||||
CREATE OR REPLACE FUNCTION sys_insert_task_priorities() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO task_priorities (name, value, color_code) VALUES ('Low', 0, '#75c997');
|
||||
INSERT INTO task_priorities (name, value, color_code) VALUES ('Medium', 1, '#fbc84c');
|
||||
INSERT INTO task_priorities (name, value, color_code) VALUES ('High', 2, '#f37070');
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_project_access_levels() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO project_access_levels (name, key)
|
||||
VALUES ('Admin', 'ADMIN');
|
||||
INSERT INTO project_access_levels (name, key)
|
||||
VALUES ('Member', 'MEMBER');
|
||||
INSERT INTO project_access_levels (name, key)
|
||||
VALUES ('Project Manager', 'PROJECT_MANAGER');
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_task_status_categories() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO sys_task_status_categories (name, color_code, index, is_todo)
|
||||
VALUES ('To do', '#a9a9a9', 0, TRUE);
|
||||
INSERT INTO sys_task_status_categories (name, color_code, index, is_doing)
|
||||
VALUES ('Doing', '#70a6f3', 1, TRUE);
|
||||
INSERT INTO sys_task_status_categories (name, color_code, index, is_done)
|
||||
VALUES ('Done', '#75c997', 2, TRUE);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_project_statuses() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO sys_project_statuses (name, color_code, icon, sort_order, is_default)
|
||||
VALUES ('Cancelled', '#f37070', 'close-circle', 0, FALSE),
|
||||
('Blocked', '#cbc8a1', 'stop', 1, FALSE),
|
||||
('On Hold', '#cbc8a1', 'stop', 2, FALSE),
|
||||
('Proposed', '#cbc8a1', 'clock-circle', 3, TRUE),
|
||||
('In Planning', '#cbc8a1', 'clock-circle', 4, FALSE),
|
||||
('In Progress', '#80ca79', 'clock-circle', 5, FALSE),
|
||||
('Completed', '#80ca79', 'check-circle', 6, FALSE);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_project_healths() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
|
||||
VALUES ('Not Set', '#a9a9a9', 0, TRUE);
|
||||
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
|
||||
VALUES ('Needs Attention', '#fbc84c', 1, FALSE);
|
||||
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
|
||||
VALUES ('At Risk', '#f37070', 2, FALSE);
|
||||
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
|
||||
VALUES ('Good', '#75c997', 3, FALSE);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
|
||||
SELECT sys_insert_task_priorities();
|
||||
SELECT sys_insert_project_access_levels();
|
||||
SELECT sys_insert_task_status_categories();
|
||||
SELECT sys_insert_project_statuses();
|
||||
SELECT sys_insert_project_healths();
|
||||
|
||||
DROP FUNCTION sys_insert_task_priorities();
|
||||
DROP FUNCTION sys_insert_project_access_levels();
|
||||
DROP FUNCTION sys_insert_task_status_categories();
|
||||
DROP FUNCTION sys_insert_project_statuses();
|
||||
DROP FUNCTION sys_insert_project_healths();
|
||||
|
||||
INSERT INTO timezones (name, abbrev, utc_offset)
|
||||
SELECT name, abbrev, utc_offset
|
||||
FROM pg_timezone_names;
|
||||
@@ -1,34 +0,0 @@
|
||||
CREATE VIEW task_labels_view(name, task_id, label_id) AS
|
||||
SELECT (SELECT team_labels.name
|
||||
FROM team_labels
|
||||
WHERE team_labels.id = task_labels.label_id) AS name,
|
||||
task_labels.task_id,
|
||||
task_labels.label_id
|
||||
FROM task_labels;
|
||||
|
||||
CREATE VIEW tasks_with_status_view(task_id, parent_task_id, is_todo, is_doing, is_done) AS
|
||||
SELECT tasks.id AS task_id,
|
||||
tasks.parent_task_id,
|
||||
stsc.is_todo,
|
||||
stsc.is_doing,
|
||||
stsc.is_done
|
||||
FROM tasks
|
||||
JOIN task_statuses ts ON tasks.status_id = ts.id
|
||||
JOIN sys_task_status_categories stsc ON ts.category_id = stsc.id
|
||||
WHERE tasks.archived IS FALSE;
|
||||
|
||||
CREATE VIEW team_member_info_view(avatar_url, email, name, user_id, team_member_id, team_id) AS
|
||||
SELECT u.avatar_url,
|
||||
COALESCE(u.email, (SELECT email_invitations.email
|
||||
FROM email_invitations
|
||||
WHERE email_invitations.team_member_id = team_members.id)) AS email,
|
||||
COALESCE(u.name, (SELECT email_invitations.name
|
||||
FROM email_invitations
|
||||
WHERE email_invitations.team_member_id = team_members.id)) AS name,
|
||||
u.id AS user_id,
|
||||
team_members.id AS team_member_id,
|
||||
team_members.team_id
|
||||
FROM team_members
|
||||
LEFT JOIN users u ON team_members.user_id = u.id;
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,35 +0,0 @@
|
||||
-- Default ROLE : worklenz_client
|
||||
-- Default USER : worklenz_backend
|
||||
-- Change DATABASE_NAME, ROLE, PASSWORD and USER as needed.
|
||||
|
||||
REVOKE CREATE ON SCHEMA public FROM PUBLIC;
|
||||
CREATE ROLE worklenz_client;
|
||||
|
||||
GRANT CONNECT ON DATABASE 'DATABASE_NAME' TO worklenz_client;
|
||||
GRANT INSERT, SELECT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO worklenz_client;
|
||||
|
||||
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON task_priorities FROM worklenz_client;
|
||||
GRANT SELECT ON task_priorities TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON project_access_levels FROM worklenz_client;
|
||||
GRANT SELECT ON project_access_levels TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON timezones FROM worklenz_client;
|
||||
GRANT SELECT ON timezones TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON worklenz_alerts FROM worklenz_client;
|
||||
GRANT SELECT ON worklenz_alerts TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON sys_task_status_categories FROM worklenz_client;
|
||||
GRANT SELECT ON sys_task_status_categories TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON sys_project_statuses FROM worklenz_client;
|
||||
GRANT SELECT ON sys_project_statuses TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON sys_project_healths FROM worklenz_client;
|
||||
GRANT SELECT ON sys_project_healths TO worklenz_client;
|
||||
|
||||
CREATE USER worklenz_backend WITH PASSWORD 'PASSWORD';
|
||||
GRANT worklenz_client TO worklenz_backend;
|
||||
@@ -1 +1,36 @@
|
||||
All database DDLs, DMLs and migrations relates to the application should be stored here as well.
|
||||
|
||||
# Worklenz Database
|
||||
|
||||
## Directory Structure
|
||||
|
||||
- `sql/` - Contains all SQL files needed for database initialization
|
||||
- `migrations/` - Contains database migration scripts
|
||||
- `00-init-db.sh` - Initialization script that executes SQL files in the correct order
|
||||
|
||||
## SQL File Execution Order
|
||||
|
||||
The database initialization files should be executed in the following order:
|
||||
|
||||
1. `sql/0_extensions.sql` - PostgreSQL extensions
|
||||
2. `sql/1_tables.sql` - Table definitions and constraints
|
||||
3. `sql/indexes.sql` - All database indexes
|
||||
4. `sql/4_functions.sql` - Database functions
|
||||
5. `sql/triggers.sql` - Database triggers
|
||||
6. `sql/3_views.sql` - Database views
|
||||
7. `sql/2_dml.sql` - Data Manipulation Language statements (inserts, updates)
|
||||
8. `sql/5_database_user.sql` - Database user setup
|
||||
|
||||
## Docker-based Setup
|
||||
|
||||
In the Docker environment, we use a shell script called `00-init-db.sh` to control the SQL file execution order:
|
||||
|
||||
1. The shell script creates a `sql/` subdirectory if it doesn't exist
|
||||
2. It copies all .sql files into this subdirectory
|
||||
3. It executes the SQL files from the subdirectory in the correct order
|
||||
|
||||
This approach prevents the SQL files from being executed twice by Docker's automatic initialization mechanism, which would cause errors for objects that already exist.
|
||||
|
||||
## Manual Setup
|
||||
|
||||
If you're setting up the database manually, please follow the execution order listed above. Ensure your SQL files are in the `sql/` subdirectory before executing the script.
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
-- Migration: Add manual task progress
|
||||
-- Date: 2025-04-22
|
||||
-- Version: 1.0.0
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Add manual progress fields to tasks table
|
||||
ALTER TABLE tasks
|
||||
ADD COLUMN IF NOT EXISTS manual_progress BOOLEAN DEFAULT FALSE,
|
||||
ADD COLUMN IF NOT EXISTS progress_value INTEGER DEFAULT NULL,
|
||||
ADD COLUMN IF NOT EXISTS weight INTEGER DEFAULT NULL;
|
||||
|
||||
-- Update function to consider manual progress
|
||||
CREATE OR REPLACE FUNCTION get_task_complete_ratio(_task_id uuid) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_parent_task_done FLOAT = 0;
|
||||
_sub_tasks_done FLOAT = 0;
|
||||
_sub_tasks_count FLOAT = 0;
|
||||
_total_completed FLOAT = 0;
|
||||
_total_tasks FLOAT = 0;
|
||||
_ratio FLOAT = 0;
|
||||
_is_manual BOOLEAN = FALSE;
|
||||
_manual_value INTEGER = NULL;
|
||||
BEGIN
|
||||
-- Check if manual progress is set
|
||||
SELECT manual_progress, progress_value
|
||||
FROM tasks
|
||||
WHERE id = _task_id
|
||||
INTO _is_manual, _manual_value;
|
||||
|
||||
-- If manual progress is enabled and has a value, use it directly
|
||||
IF _is_manual IS TRUE AND _manual_value IS NOT NULL THEN
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'ratio', _manual_value,
|
||||
'total_completed', 0,
|
||||
'total_tasks', 0,
|
||||
'is_manual', TRUE
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- Otherwise calculate automatically as before
|
||||
SELECT (CASE
|
||||
WHEN EXISTS(SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = _task_id
|
||||
AND is_done IS TRUE) THEN 1
|
||||
ELSE 0 END)
|
||||
INTO _parent_task_done;
|
||||
SELECT COUNT(*) FROM tasks WHERE parent_task_id = _task_id AND archived IS FALSE INTO _sub_tasks_count;
|
||||
|
||||
SELECT COUNT(*)
|
||||
FROM tasks_with_status_view
|
||||
WHERE parent_task_id = _task_id
|
||||
AND is_done IS TRUE
|
||||
INTO _sub_tasks_done;
|
||||
|
||||
_total_completed = _parent_task_done + _sub_tasks_done;
|
||||
_total_tasks = _sub_tasks_count; -- +1 for the parent task
|
||||
|
||||
IF _total_tasks > 0 THEN
|
||||
_ratio = (_total_completed / _total_tasks) * 100;
|
||||
ELSE
|
||||
_ratio = _parent_task_done * 100;
|
||||
END IF;
|
||||
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'ratio', _ratio,
|
||||
'total_completed', _total_completed,
|
||||
'total_tasks', _total_tasks,
|
||||
'is_manual', FALSE
|
||||
);
|
||||
END
|
||||
$$;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,687 @@
|
||||
-- Migration: Enhance manual task progress with subtask support
|
||||
-- Date: 2025-04-23
|
||||
-- Version: 1.0.0
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Update function to consider subtask manual progress when calculating parent task progress
|
||||
CREATE OR REPLACE FUNCTION get_task_complete_ratio(_task_id uuid) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_parent_task_done FLOAT = 0;
|
||||
_sub_tasks_done FLOAT = 0;
|
||||
_sub_tasks_count FLOAT = 0;
|
||||
_total_completed FLOAT = 0;
|
||||
_total_tasks FLOAT = 0;
|
||||
_ratio FLOAT = 0;
|
||||
_is_manual BOOLEAN = FALSE;
|
||||
_manual_value INTEGER = NULL;
|
||||
_project_id UUID;
|
||||
_use_manual_progress BOOLEAN = FALSE;
|
||||
_use_weighted_progress BOOLEAN = FALSE;
|
||||
_use_time_progress BOOLEAN = FALSE;
|
||||
BEGIN
|
||||
-- Check if manual progress is set for this task
|
||||
SELECT manual_progress, progress_value, project_id
|
||||
FROM tasks
|
||||
WHERE id = _task_id
|
||||
INTO _is_manual, _manual_value, _project_id;
|
||||
|
||||
-- Check if the project uses manual progress
|
||||
IF _project_id IS NOT NULL THEN
|
||||
SELECT COALESCE(use_manual_progress, FALSE),
|
||||
COALESCE(use_weighted_progress, FALSE),
|
||||
COALESCE(use_time_progress, FALSE)
|
||||
FROM projects
|
||||
WHERE id = _project_id
|
||||
INTO _use_manual_progress, _use_weighted_progress, _use_time_progress;
|
||||
END IF;
|
||||
|
||||
-- Get all subtasks
|
||||
SELECT COUNT(*)
|
||||
FROM tasks
|
||||
WHERE parent_task_id = _task_id AND archived IS FALSE
|
||||
INTO _sub_tasks_count;
|
||||
|
||||
-- If manual progress is enabled and has a value AND there are no subtasks, use it directly
|
||||
IF _is_manual IS TRUE AND _manual_value IS NOT NULL AND _sub_tasks_count = 0 THEN
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'ratio', _manual_value,
|
||||
'total_completed', 0,
|
||||
'total_tasks', 0,
|
||||
'is_manual', TRUE
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- If there are no subtasks, just use the parent task's status
|
||||
IF _sub_tasks_count = 0 THEN
|
||||
SELECT (CASE
|
||||
WHEN EXISTS(SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = _task_id
|
||||
AND is_done IS TRUE) THEN 1
|
||||
ELSE 0 END)
|
||||
INTO _parent_task_done;
|
||||
|
||||
_ratio = _parent_task_done * 100;
|
||||
ELSE
|
||||
-- If project uses manual progress, calculate based on subtask manual progress values
|
||||
IF _use_manual_progress IS TRUE THEN
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- If subtask has manual progress, use that value
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
-- Otherwise use completion status (0 or 100)
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(AVG(progress_value), 0)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
-- If project uses weighted progress, calculate based on subtask weights
|
||||
ELSIF _use_weighted_progress IS TRUE THEN
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- If subtask has manual progress, use that value
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
-- Otherwise use completion status (0 or 100)
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value,
|
||||
COALESCE(weight, 100) AS weight
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * weight) / NULLIF(SUM(weight), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
-- If project uses time-based progress, calculate based on estimated time
|
||||
ELSIF _use_time_progress IS TRUE THEN
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- If subtask has manual progress, use that value
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
-- Otherwise use completion status (0 or 100)
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value,
|
||||
COALESCE(total_minutes, 0) AS estimated_minutes
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * estimated_minutes) / NULLIF(SUM(estimated_minutes), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
ELSE
|
||||
-- Traditional calculation based on completion status
|
||||
SELECT (CASE
|
||||
WHEN EXISTS(SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = _task_id
|
||||
AND is_done IS TRUE) THEN 1
|
||||
ELSE 0 END)
|
||||
INTO _parent_task_done;
|
||||
|
||||
SELECT COUNT(*)
|
||||
FROM tasks_with_status_view
|
||||
WHERE parent_task_id = _task_id
|
||||
AND is_done IS TRUE
|
||||
INTO _sub_tasks_done;
|
||||
|
||||
_total_completed = _parent_task_done + _sub_tasks_done;
|
||||
_total_tasks = _sub_tasks_count + 1; -- +1 for the parent task
|
||||
|
||||
IF _total_tasks = 0 THEN
|
||||
_ratio = 0;
|
||||
ELSE
|
||||
_ratio = (_total_completed / _total_tasks) * 100;
|
||||
END IF;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Ensure ratio is between 0 and 100
|
||||
IF _ratio < 0 THEN
|
||||
_ratio = 0;
|
||||
ELSIF _ratio > 100 THEN
|
||||
_ratio = 100;
|
||||
END IF;
|
||||
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'ratio', _ratio,
|
||||
'total_completed', _total_completed,
|
||||
'total_tasks', _total_tasks,
|
||||
'is_manual', _is_manual
|
||||
);
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE OR REPLACE FUNCTION update_project(_body json) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_user_id UUID;
|
||||
_team_id UUID;
|
||||
_client_id UUID;
|
||||
_project_id UUID;
|
||||
_project_manager_team_member_id UUID;
|
||||
_client_name TEXT;
|
||||
_project_name TEXT;
|
||||
BEGIN
|
||||
-- need a test, can be throw errors
|
||||
_client_name = TRIM((_body ->> 'client_name')::TEXT);
|
||||
_project_name = TRIM((_body ->> 'name')::TEXT);
|
||||
|
||||
-- add inside the controller
|
||||
_user_id = (_body ->> 'user_id')::UUID;
|
||||
_team_id = (_body ->> 'team_id')::UUID;
|
||||
_project_manager_team_member_id = (_body ->> 'team_member_id')::UUID;
|
||||
|
||||
-- cache exists client if exists
|
||||
SELECT id FROM clients WHERE LOWER(name) = LOWER(_client_name) AND team_id = _team_id INTO _client_id;
|
||||
|
||||
-- insert client if not exists
|
||||
IF is_null_or_empty(_client_id) IS TRUE AND is_null_or_empty(_client_name) IS FALSE
|
||||
THEN
|
||||
INSERT INTO clients (name, team_id) VALUES (_client_name, _team_id) RETURNING id INTO _client_id;
|
||||
END IF;
|
||||
|
||||
-- check whether the project name is already in
|
||||
IF EXISTS(
|
||||
SELECT name FROM projects WHERE LOWER(name) = LOWER(_project_name)
|
||||
AND team_id = _team_id AND id != (_body ->> 'id')::UUID
|
||||
)
|
||||
THEN
|
||||
RAISE 'PROJECT_EXISTS_ERROR:%', _project_name;
|
||||
END IF;
|
||||
|
||||
-- update the project
|
||||
UPDATE projects
|
||||
SET name = _project_name,
|
||||
notes = (_body ->> 'notes')::TEXT,
|
||||
color_code = (_body ->> 'color_code')::TEXT,
|
||||
status_id = (_body ->> 'status_id')::UUID,
|
||||
health_id = (_body ->> 'health_id')::UUID,
|
||||
key = (_body ->> 'key')::TEXT,
|
||||
start_date = (_body ->> 'start_date')::TIMESTAMPTZ,
|
||||
end_date = (_body ->> 'end_date')::TIMESTAMPTZ,
|
||||
client_id = _client_id,
|
||||
folder_id = (_body ->> 'folder_id')::UUID,
|
||||
category_id = (_body ->> 'category_id')::UUID,
|
||||
updated_at = CURRENT_TIMESTAMP,
|
||||
estimated_working_days = (_body ->> 'working_days')::INTEGER,
|
||||
estimated_man_days = (_body ->> 'man_days')::INTEGER,
|
||||
hours_per_day = (_body ->> 'hours_per_day')::INTEGER,
|
||||
use_manual_progress = COALESCE((_body ->> 'use_manual_progress')::BOOLEAN, FALSE),
|
||||
use_weighted_progress = COALESCE((_body ->> 'use_weighted_progress')::BOOLEAN, FALSE),
|
||||
use_time_progress = COALESCE((_body ->> 'use_time_progress')::BOOLEAN, FALSE)
|
||||
WHERE id = (_body ->> 'id')::UUID
|
||||
AND team_id = _team_id
|
||||
RETURNING id INTO _project_id;
|
||||
|
||||
UPDATE project_members SET project_access_level_id = (SELECT id FROM project_access_levels WHERE key = 'MEMBER') WHERE project_id = _project_id;
|
||||
|
||||
IF NOT (_project_manager_team_member_id IS NULL)
|
||||
THEN
|
||||
PERFORM update_project_manager(_project_manager_team_member_id, _project_id::UUID);
|
||||
END IF;
|
||||
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'id', _project_id,
|
||||
'name', (_body ->> 'name')::TEXT,
|
||||
'project_manager_id', _project_manager_team_member_id::UUID
|
||||
);
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- 3. Also modify the create_project function to handle the new fields during project creation
|
||||
CREATE OR REPLACE FUNCTION create_project(_body json) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_project_id UUID;
|
||||
_user_id UUID;
|
||||
_team_id UUID;
|
||||
_team_member_id UUID;
|
||||
_client_id UUID;
|
||||
_client_name TEXT;
|
||||
_project_name TEXT;
|
||||
_project_created_log TEXT;
|
||||
_project_member_added_log TEXT;
|
||||
_project_created_log_id UUID;
|
||||
_project_manager_team_member_id UUID;
|
||||
_project_key TEXT;
|
||||
BEGIN
|
||||
_client_name = TRIM((_body ->> 'client_name')::TEXT);
|
||||
_project_name = TRIM((_body ->> 'name')::TEXT);
|
||||
_project_key = TRIM((_body ->> 'key')::TEXT);
|
||||
_project_created_log = (_body ->> 'project_created_log')::TEXT;
|
||||
_project_member_added_log = (_body ->> 'project_member_added_log')::TEXT;
|
||||
_user_id = (_body ->> 'user_id')::UUID;
|
||||
_team_id = (_body ->> 'team_id')::UUID;
|
||||
_project_manager_team_member_id = (_body ->> 'project_manager_id')::UUID;
|
||||
|
||||
SELECT id FROM team_members WHERE user_id = _user_id AND team_id = _team_id INTO _team_member_id;
|
||||
|
||||
-- cache exists client if exists
|
||||
SELECT id FROM clients WHERE LOWER(name) = LOWER(_client_name) AND team_id = _team_id INTO _client_id;
|
||||
|
||||
-- insert client if not exists
|
||||
IF is_null_or_empty(_client_id) IS TRUE AND is_null_or_empty(_client_name) IS FALSE
|
||||
THEN
|
||||
INSERT INTO clients (name, team_id) VALUES (_client_name, _team_id) RETURNING id INTO _client_id;
|
||||
END IF;
|
||||
|
||||
-- check whether the project name is already in
|
||||
IF EXISTS(SELECT name FROM projects WHERE LOWER(name) = LOWER(_project_name) AND team_id = _team_id)
|
||||
THEN
|
||||
RAISE 'PROJECT_EXISTS_ERROR:%', _project_name;
|
||||
END IF;
|
||||
|
||||
-- create the project
|
||||
INSERT
|
||||
INTO projects (name, key, color_code, start_date, end_date, team_id, notes, owner_id, status_id, health_id, folder_id,
|
||||
category_id, estimated_working_days, estimated_man_days, hours_per_day,
|
||||
use_manual_progress, use_weighted_progress, use_time_progress, client_id)
|
||||
VALUES (_project_name,
|
||||
UPPER(_project_key),
|
||||
(_body ->> 'color_code')::TEXT,
|
||||
(_body ->> 'start_date')::TIMESTAMPTZ,
|
||||
(_body ->> 'end_date')::TIMESTAMPTZ,
|
||||
_team_id,
|
||||
(_body ->> 'notes')::TEXT,
|
||||
_user_id,
|
||||
(_body ->> 'status_id')::UUID,
|
||||
(_body ->> 'health_id')::UUID,
|
||||
(_body ->> 'folder_id')::UUID,
|
||||
(_body ->> 'category_id')::UUID,
|
||||
(_body ->> 'working_days')::INTEGER,
|
||||
(_body ->> 'man_days')::INTEGER,
|
||||
(_body ->> 'hours_per_day')::INTEGER,
|
||||
COALESCE((_body ->> 'use_manual_progress')::BOOLEAN, FALSE),
|
||||
COALESCE((_body ->> 'use_weighted_progress')::BOOLEAN, FALSE),
|
||||
COALESCE((_body ->> 'use_time_progress')::BOOLEAN, FALSE),
|
||||
_client_id)
|
||||
RETURNING id INTO _project_id;
|
||||
|
||||
-- register the project log
|
||||
INSERT INTO project_logs (project_id, team_id, description)
|
||||
VALUES (_project_id, _team_id, _project_created_log)
|
||||
RETURNING id INTO _project_created_log_id;
|
||||
|
||||
-- insert the project creator as a project member
|
||||
INSERT INTO project_members (team_member_id, project_access_level_id, project_id, role_id)
|
||||
VALUES (_team_member_id, (SELECT id FROM project_access_levels WHERE key = 'ADMIN'),
|
||||
_project_id,
|
||||
(SELECT id FROM roles WHERE team_id = _team_id AND default_role IS TRUE));
|
||||
|
||||
-- insert statuses
|
||||
INSERT INTO task_statuses (name, project_id, team_id, category_id, sort_order)
|
||||
VALUES ('To Do', _project_id, _team_id, (SELECT id FROM sys_task_status_categories WHERE is_todo IS TRUE), 0);
|
||||
INSERT INTO task_statuses (name, project_id, team_id, category_id, sort_order)
|
||||
VALUES ('Doing', _project_id, _team_id, (SELECT id FROM sys_task_status_categories WHERE is_doing IS TRUE), 1);
|
||||
INSERT INTO task_statuses (name, project_id, team_id, category_id, sort_order)
|
||||
VALUES ('Done', _project_id, _team_id, (SELECT id FROM sys_task_status_categories WHERE is_done IS TRUE), 2);
|
||||
|
||||
-- insert default project columns
|
||||
PERFORM insert_task_list_columns(_project_id);
|
||||
|
||||
-- add project manager role if exists
|
||||
IF NOT is_null_or_empty(_project_manager_team_member_id) THEN
|
||||
PERFORM update_project_manager(_project_manager_team_member_id, _project_id);
|
||||
END IF;
|
||||
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'id', _project_id,
|
||||
'name', _project_name,
|
||||
'project_created_log_id', _project_created_log_id
|
||||
);
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- 4. Update the getById function to include the new fields in the response
|
||||
CREATE OR REPLACE FUNCTION getProjectById(_project_id UUID, _team_id UUID) RETURNS JSON
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_result JSON;
|
||||
BEGIN
|
||||
SELECT ROW_TO_JSON(rec) INTO _result
|
||||
FROM (SELECT p.id,
|
||||
p.name,
|
||||
p.key,
|
||||
p.color_code,
|
||||
p.start_date,
|
||||
p.end_date,
|
||||
c.name AS client_name,
|
||||
c.id AS client_id,
|
||||
p.notes,
|
||||
p.created_at,
|
||||
p.updated_at,
|
||||
ts.name AS status,
|
||||
ts.color_code AS status_color,
|
||||
ts.icon AS status_icon,
|
||||
ts.id AS status_id,
|
||||
h.name AS health,
|
||||
h.color_code AS health_color,
|
||||
h.icon AS health_icon,
|
||||
h.id AS health_id,
|
||||
pc.name AS category_name,
|
||||
pc.color_code AS category_color,
|
||||
pc.id AS category_id,
|
||||
p.phase_label,
|
||||
p.estimated_man_days AS man_days,
|
||||
p.estimated_working_days AS working_days,
|
||||
p.hours_per_day,
|
||||
p.use_manual_progress,
|
||||
p.use_weighted_progress,
|
||||
-- Additional fields
|
||||
COALESCE((SELECT ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(t)))
|
||||
FROM (SELECT pm.id,
|
||||
pm.project_id,
|
||||
tm.id AS team_member_id,
|
||||
tm.user_id,
|
||||
u.name,
|
||||
u.email,
|
||||
u.avatar_url,
|
||||
u.phone_number,
|
||||
pal.name AS access_level,
|
||||
pal.key AS access_level_key,
|
||||
pal.id AS access_level_id,
|
||||
EXISTS(SELECT 1
|
||||
FROM project_members
|
||||
INNER JOIN project_access_levels ON
|
||||
project_members.project_access_level_id = project_access_levels.id
|
||||
WHERE project_id = p.id
|
||||
AND project_access_levels.key = 'PROJECT_MANAGER'
|
||||
AND team_member_id = tm.id) AS is_project_manager
|
||||
FROM project_members pm
|
||||
INNER JOIN team_members tm ON pm.team_member_id = tm.id
|
||||
INNER JOIN users u ON tm.user_id = u.id
|
||||
INNER JOIN project_access_levels pal ON pm.project_access_level_id = pal.id
|
||||
WHERE pm.project_id = p.id) t), '[]'::JSON) AS members,
|
||||
(SELECT COUNT(DISTINCT (id))
|
||||
FROM tasks
|
||||
WHERE archived IS FALSE
|
||||
AND project_id = p.id) AS task_count,
|
||||
(SELECT ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(t)))
|
||||
FROM (SELECT project_members.id,
|
||||
project_members.project_id,
|
||||
team_members.id AS team_member_id,
|
||||
team_members.user_id,
|
||||
users.name,
|
||||
users.email,
|
||||
users.avatar_url,
|
||||
project_access_levels.name AS access_level,
|
||||
project_access_levels.key AS access_level_key,
|
||||
project_access_levels.id AS access_level_id
|
||||
FROM project_members
|
||||
INNER JOIN team_members ON project_members.team_member_id = team_members.id
|
||||
INNER JOIN users ON team_members.user_id = users.id
|
||||
INNER JOIN project_access_levels
|
||||
ON project_members.project_access_level_id = project_access_levels.id
|
||||
WHERE project_id = p.id
|
||||
AND project_access_levels.key = 'PROJECT_MANAGER'
|
||||
LIMIT 1) t) AS project_manager,
|
||||
|
||||
(SELECT EXISTS(SELECT 1
|
||||
FROM project_subscribers
|
||||
WHERE project_id = p.id
|
||||
AND user_id = (SELECT user_id
|
||||
FROM project_members
|
||||
WHERE team_member_id = (SELECT id
|
||||
FROM team_members
|
||||
WHERE user_id IN
|
||||
(SELECT user_id FROM is_member_of_project_cte))
|
||||
AND project_id = p.id))) AS subscribed,
|
||||
(SELECT name
|
||||
FROM users
|
||||
WHERE id =
|
||||
(SELECT owner_id FROM projects WHERE id = p.id)) AS project_owner,
|
||||
(SELECT default_view
|
||||
FROM project_members
|
||||
WHERE project_id = p.id
|
||||
AND team_member_id IN (SELECT id FROM is_member_of_project_cte)) AS team_member_default_view,
|
||||
(SELECT EXISTS(SELECT user_id
|
||||
FROM archived_projects
|
||||
WHERE user_id IN (SELECT user_id FROM is_member_of_project_cte)
|
||||
AND project_id = p.id)) AS archived,
|
||||
|
||||
(SELECT EXISTS(SELECT user_id
|
||||
FROM favorite_projects
|
||||
WHERE user_id IN (SELECT user_id FROM is_member_of_project_cte)
|
||||
AND project_id = p.id)) AS favorite
|
||||
|
||||
FROM projects p
|
||||
LEFT JOIN sys_project_statuses ts ON p.status_id = ts.id
|
||||
LEFT JOIN sys_project_healths h ON p.health_id = h.id
|
||||
LEFT JOIN project_categories pc ON p.category_id = pc.id
|
||||
LEFT JOIN clients c ON p.client_id = c.id,
|
||||
LATERAL (SELECT id, user_id
|
||||
FROM team_members
|
||||
WHERE id = (SELECT team_member_id
|
||||
FROM project_members
|
||||
WHERE project_id = p.id
|
||||
AND team_member_id IN (SELECT id
|
||||
FROM team_members
|
||||
WHERE team_id = _team_id)
|
||||
LIMIT 1)) is_member_of_project_cte
|
||||
|
||||
WHERE p.id = _project_id
|
||||
AND p.team_id = _team_id) rec;
|
||||
|
||||
RETURN _result;
|
||||
END
|
||||
$$;
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.get_task_form_view_model(_user_id UUID, _team_id UUID, _task_id UUID, _project_id UUID) RETURNS JSON
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_task JSON;
|
||||
_priorities JSON;
|
||||
_projects JSON;
|
||||
_statuses JSON;
|
||||
_team_members JSON;
|
||||
_assignees JSON;
|
||||
_phases JSON;
|
||||
BEGIN
|
||||
|
||||
-- Select task info
|
||||
SELECT COALESCE(ROW_TO_JSON(rec), '{}'::JSON)
|
||||
INTO _task
|
||||
FROM (WITH RECURSIVE task_hierarchy AS (
|
||||
-- Base case: Start with the given task
|
||||
SELECT id,
|
||||
parent_task_id,
|
||||
0 AS level
|
||||
FROM tasks
|
||||
WHERE id = _task_id
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- Recursive case: Traverse up to parent tasks
|
||||
SELECT t.id,
|
||||
t.parent_task_id,
|
||||
th.level + 1 AS level
|
||||
FROM tasks t
|
||||
INNER JOIN task_hierarchy th ON t.id = th.parent_task_id
|
||||
WHERE th.parent_task_id IS NOT NULL)
|
||||
SELECT id,
|
||||
name,
|
||||
description,
|
||||
start_date,
|
||||
end_date,
|
||||
done,
|
||||
total_minutes,
|
||||
priority_id,
|
||||
project_id,
|
||||
created_at,
|
||||
updated_at,
|
||||
status_id,
|
||||
parent_task_id,
|
||||
sort_order,
|
||||
(SELECT phase_id FROM task_phase WHERE task_id = tasks.id) AS phase_id,
|
||||
CONCAT((SELECT key FROM projects WHERE id = tasks.project_id), '-', task_no) AS task_key,
|
||||
(SELECT start_time
|
||||
FROM task_timers
|
||||
WHERE task_id = tasks.id
|
||||
AND user_id = _user_id) AS timer_start_time,
|
||||
parent_task_id IS NOT NULL AS is_sub_task,
|
||||
(SELECT COUNT('*')
|
||||
FROM tasks
|
||||
WHERE parent_task_id = tasks.id
|
||||
AND archived IS FALSE) AS sub_tasks_count,
|
||||
(SELECT COUNT(*)
|
||||
FROM tasks_with_status_view tt
|
||||
WHERE (tt.parent_task_id = tasks.id OR tt.task_id = tasks.id)
|
||||
AND tt.is_done IS TRUE)
|
||||
AS completed_count,
|
||||
(SELECT COUNT(*) FROM task_attachments WHERE task_id = tasks.id) AS attachments_count,
|
||||
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(r))), '[]'::JSON)
|
||||
FROM (SELECT task_labels.label_id AS id,
|
||||
(SELECT name FROM team_labels WHERE id = task_labels.label_id),
|
||||
(SELECT color_code FROM team_labels WHERE id = task_labels.label_id)
|
||||
FROM task_labels
|
||||
WHERE task_id = tasks.id
|
||||
ORDER BY name) r) AS labels,
|
||||
(SELECT color_code
|
||||
FROM sys_task_status_categories
|
||||
WHERE id = (SELECT category_id FROM task_statuses WHERE id = tasks.status_id)) AS status_color,
|
||||
(SELECT COUNT(*) FROM tasks WHERE parent_task_id = _task_id) AS sub_tasks_count,
|
||||
(SELECT name FROM users WHERE id = tasks.reporter_id) AS reporter,
|
||||
(SELECT get_task_assignees(tasks.id)) AS assignees,
|
||||
(SELECT id FROM team_members WHERE user_id = _user_id AND team_id = _team_id) AS team_member_id,
|
||||
billable,
|
||||
schedule_id,
|
||||
progress_value,
|
||||
weight,
|
||||
(SELECT MAX(level) FROM task_hierarchy) AS task_level
|
||||
FROM tasks
|
||||
WHERE id = _task_id) rec;
|
||||
|
||||
SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec))), '[]'::JSON)
|
||||
INTO _priorities
|
||||
FROM (SELECT id, name FROM task_priorities ORDER BY value) rec;
|
||||
|
||||
SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec))), '[]'::JSON)
|
||||
INTO _phases
|
||||
FROM (SELECT id, name FROM project_phases WHERE project_id = _project_id ORDER BY name) rec;
|
||||
|
||||
SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec))), '[]'::JSON)
|
||||
INTO _projects
|
||||
FROM (SELECT id, name
|
||||
FROM projects
|
||||
WHERE team_id = _team_id
|
||||
AND (CASE
|
||||
WHEN (is_owner(_user_id, _team_id) OR is_admin(_user_id, _team_id) IS TRUE) THEN TRUE
|
||||
ELSE is_member_of_project(projects.id, _user_id, _team_id) END)
|
||||
ORDER BY name) rec;
|
||||
|
||||
SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec))), '[]'::JSON)
|
||||
INTO _statuses
|
||||
FROM (SELECT id, name FROM task_statuses WHERE project_id = _project_id) rec;
|
||||
|
||||
SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec))), '[]'::JSON)
|
||||
INTO _team_members
|
||||
FROM (SELECT team_members.id,
|
||||
(SELECT name FROM team_member_info_view WHERE team_member_info_view.team_member_id = team_members.id),
|
||||
(SELECT email FROM team_member_info_view WHERE team_member_info_view.team_member_id = team_members.id),
|
||||
(SELECT avatar_url
|
||||
FROM team_member_info_view
|
||||
WHERE team_member_info_view.team_member_id = team_members.id)
|
||||
FROM team_members
|
||||
LEFT JOIN users u ON team_members.user_id = u.id
|
||||
WHERE team_id = _team_id
|
||||
AND team_members.active IS TRUE) rec;
|
||||
|
||||
SELECT get_task_assignees(_task_id) INTO _assignees;
|
||||
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'task', _task,
|
||||
'priorities', _priorities,
|
||||
'projects', _projects,
|
||||
'statuses', _statuses,
|
||||
'team_members', _team_members,
|
||||
'assignees', _assignees,
|
||||
'phases', _phases
|
||||
);
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Add use_manual_progress, use_weighted_progress, and use_time_progress to projects table if they don't exist
|
||||
ALTER TABLE projects
|
||||
ADD COLUMN IF NOT EXISTS use_manual_progress BOOLEAN DEFAULT FALSE,
|
||||
ADD COLUMN IF NOT EXISTS use_weighted_progress BOOLEAN DEFAULT FALSE,
|
||||
ADD COLUMN IF NOT EXISTS use_time_progress BOOLEAN DEFAULT FALSE;
|
||||
|
||||
-- Add a trigger to reset manual progress when a task gets a new subtask
|
||||
CREATE OR REPLACE FUNCTION reset_parent_task_manual_progress() RETURNS TRIGGER AS
|
||||
$$
|
||||
BEGIN
|
||||
-- When a task gets a new subtask (parent_task_id is set), reset the parent's manual_progress flag
|
||||
IF NEW.parent_task_id IS NOT NULL THEN
|
||||
UPDATE tasks
|
||||
SET manual_progress = false
|
||||
WHERE id = NEW.parent_task_id
|
||||
AND manual_progress = true;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create the trigger on the tasks table
|
||||
DROP TRIGGER IF EXISTS reset_parent_manual_progress_trigger ON tasks;
|
||||
CREATE TRIGGER reset_parent_manual_progress_trigger
|
||||
AFTER INSERT OR UPDATE OF parent_task_id ON tasks
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION reset_parent_task_manual_progress();
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,157 @@
|
||||
-- Migration: Add progress and weight activity types support
|
||||
-- Date: 2025-04-24
|
||||
-- Version: 1.0.0
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Update the get_activity_logs_by_task function to handle progress and weight attribute types
|
||||
CREATE OR REPLACE FUNCTION get_activity_logs_by_task(_task_id uuid) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_result JSON;
|
||||
BEGIN
|
||||
SELECT ROW_TO_JSON(rec)
|
||||
INTO _result
|
||||
FROM (SELECT (SELECT tasks.created_at FROM tasks WHERE tasks.id = _task_id),
|
||||
(SELECT name
|
||||
FROM users
|
||||
WHERE id = (SELECT reporter_id FROM tasks WHERE id = _task_id)),
|
||||
(SELECT avatar_url
|
||||
FROM users
|
||||
WHERE id = (SELECT reporter_id FROM tasks WHERE id = _task_id)),
|
||||
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec2))), '[]'::JSON)
|
||||
FROM (SELECT task_id,
|
||||
created_at,
|
||||
attribute_type,
|
||||
log_type,
|
||||
|
||||
-- Case for previous value
|
||||
(CASE
|
||||
WHEN (attribute_type = 'status')
|
||||
THEN (SELECT name FROM task_statuses WHERE id = old_value::UUID)
|
||||
WHEN (attribute_type = 'priority')
|
||||
THEN (SELECT name FROM task_priorities WHERE id = old_value::UUID)
|
||||
WHEN (attribute_type = 'phase' AND old_value <> 'Unmapped')
|
||||
THEN (SELECT name FROM project_phases WHERE id = old_value::UUID)
|
||||
WHEN (attribute_type = 'progress' OR attribute_type = 'weight')
|
||||
THEN old_value
|
||||
ELSE (old_value) END) AS previous,
|
||||
|
||||
-- Case for current value
|
||||
(CASE
|
||||
WHEN (attribute_type = 'assignee')
|
||||
THEN (SELECT name FROM users WHERE id = new_value::UUID)
|
||||
WHEN (attribute_type = 'label')
|
||||
THEN (SELECT name FROM team_labels WHERE id = new_value::UUID)
|
||||
WHEN (attribute_type = 'status')
|
||||
THEN (SELECT name FROM task_statuses WHERE id = new_value::UUID)
|
||||
WHEN (attribute_type = 'priority')
|
||||
THEN (SELECT name FROM task_priorities WHERE id = new_value::UUID)
|
||||
WHEN (attribute_type = 'phase' AND new_value <> 'Unmapped')
|
||||
THEN (SELECT name FROM project_phases WHERE id = new_value::UUID)
|
||||
WHEN (attribute_type = 'progress' OR attribute_type = 'weight')
|
||||
THEN new_value
|
||||
ELSE (new_value) END) AS current,
|
||||
|
||||
-- Case for assigned user
|
||||
(CASE
|
||||
WHEN (attribute_type = 'assignee')
|
||||
THEN (SELECT ROW_TO_JSON(rec)
|
||||
FROM (SELECT (CASE
|
||||
WHEN (new_value IS NOT NULL)
|
||||
THEN (SELECT name FROM users WHERE users.id = new_value::UUID)
|
||||
ELSE (next_string) END) AS name,
|
||||
(SELECT avatar_url FROM users WHERE users.id = new_value::UUID)) rec)
|
||||
ELSE (NULL) END) AS assigned_user,
|
||||
|
||||
-- Case for label data
|
||||
(CASE
|
||||
WHEN (attribute_type = 'label')
|
||||
THEN (SELECT ROW_TO_JSON(rec)
|
||||
FROM (SELECT (SELECT name FROM team_labels WHERE id = new_value::UUID),
|
||||
(SELECT color_code FROM team_labels WHERE id = new_value::UUID)) rec)
|
||||
ELSE (NULL) END) AS label_data,
|
||||
|
||||
-- Case for previous status
|
||||
(CASE
|
||||
WHEN (attribute_type = 'status')
|
||||
THEN (SELECT ROW_TO_JSON(rec)
|
||||
FROM (SELECT (SELECT name FROM task_statuses WHERE id = old_value::UUID),
|
||||
(SELECT color_code
|
||||
FROM sys_task_status_categories
|
||||
WHERE id = (SELECT category_id FROM task_statuses WHERE id = old_value::UUID)),
|
||||
(SELECT color_code_dark
|
||||
FROM sys_task_status_categories
|
||||
WHERE id = (SELECT category_id FROM task_statuses WHERE id = old_value::UUID))) rec)
|
||||
ELSE (NULL) END) AS previous_status,
|
||||
|
||||
-- Case for next status
|
||||
(CASE
|
||||
WHEN (attribute_type = 'status')
|
||||
THEN (SELECT ROW_TO_JSON(rec)
|
||||
FROM (SELECT (SELECT name FROM task_statuses WHERE id = new_value::UUID),
|
||||
(SELECT color_code
|
||||
FROM sys_task_status_categories
|
||||
WHERE id = (SELECT category_id FROM task_statuses WHERE id = new_value::UUID)),
|
||||
(SELECT color_code_dark
|
||||
FROM sys_task_status_categories
|
||||
WHERE id = (SELECT category_id FROM task_statuses WHERE id = new_value::UUID))) rec)
|
||||
ELSE (NULL) END) AS next_status,
|
||||
|
||||
-- Case for previous priority
|
||||
(CASE
|
||||
WHEN (attribute_type = 'priority')
|
||||
THEN (SELECT ROW_TO_JSON(rec)
|
||||
FROM (SELECT (SELECT name FROM task_priorities WHERE id = old_value::UUID),
|
||||
(SELECT color_code FROM task_priorities WHERE id = old_value::UUID)) rec)
|
||||
ELSE (NULL) END) AS previous_priority,
|
||||
|
||||
-- Case for next priority
|
||||
(CASE
|
||||
WHEN (attribute_type = 'priority')
|
||||
THEN (SELECT ROW_TO_JSON(rec)
|
||||
FROM (SELECT (SELECT name FROM task_priorities WHERE id = new_value::UUID),
|
||||
(SELECT color_code FROM task_priorities WHERE id = new_value::UUID)) rec)
|
||||
ELSE (NULL) END) AS next_priority,
|
||||
|
||||
-- Case for previous phase
|
||||
(CASE
|
||||
WHEN (attribute_type = 'phase' AND old_value <> 'Unmapped')
|
||||
THEN (SELECT ROW_TO_JSON(rec)
|
||||
FROM (SELECT (SELECT name FROM project_phases WHERE id = old_value::UUID),
|
||||
(SELECT color_code FROM project_phases WHERE id = old_value::UUID)) rec)
|
||||
ELSE (NULL) END) AS previous_phase,
|
||||
|
||||
-- Case for next phase
|
||||
(CASE
|
||||
WHEN (attribute_type = 'phase' AND new_value <> 'Unmapped')
|
||||
THEN (SELECT ROW_TO_JSON(rec)
|
||||
FROM (SELECT (SELECT name FROM project_phases WHERE id = new_value::UUID),
|
||||
(SELECT color_code FROM project_phases WHERE id = new_value::UUID)) rec)
|
||||
ELSE (NULL) END) AS next_phase,
|
||||
|
||||
-- Case for done by
|
||||
(SELECT ROW_TO_JSON(rec)
|
||||
FROM (SELECT (SELECT name FROM users WHERE users.id = tal.user_id),
|
||||
(SELECT avatar_url FROM users WHERE users.id = tal.user_id)) rec) AS done_by,
|
||||
|
||||
-- Add log text for progress and weight
|
||||
(CASE
|
||||
WHEN (attribute_type = 'progress')
|
||||
THEN 'updated the progress of'
|
||||
WHEN (attribute_type = 'weight')
|
||||
THEN 'updated the weight of'
|
||||
ELSE ''
|
||||
END) AS log_text
|
||||
|
||||
|
||||
FROM task_activity_logs tal
|
||||
WHERE task_id = _task_id
|
||||
ORDER BY created_at DESC) rec2) AS logs) rec;
|
||||
RETURN _result;
|
||||
END;
|
||||
$$;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,243 @@
|
||||
-- Migration: Update time-based progress mode to work for all tasks
|
||||
-- Date: 2025-04-25
|
||||
-- Version: 1.0.0
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Update function to use time-based progress for all tasks
|
||||
CREATE OR REPLACE FUNCTION get_task_complete_ratio(_task_id uuid) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_parent_task_done FLOAT = 0;
|
||||
_sub_tasks_done FLOAT = 0;
|
||||
_sub_tasks_count FLOAT = 0;
|
||||
_total_completed FLOAT = 0;
|
||||
_total_tasks FLOAT = 0;
|
||||
_ratio FLOAT = 0;
|
||||
_is_manual BOOLEAN = FALSE;
|
||||
_manual_value INTEGER = NULL;
|
||||
_project_id UUID;
|
||||
_use_manual_progress BOOLEAN = FALSE;
|
||||
_use_weighted_progress BOOLEAN = FALSE;
|
||||
_use_time_progress BOOLEAN = FALSE;
|
||||
_task_complete BOOLEAN = FALSE;
|
||||
BEGIN
|
||||
-- Check if manual progress is set for this task
|
||||
SELECT manual_progress, progress_value, project_id,
|
||||
EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = tasks.id
|
||||
AND is_done IS TRUE
|
||||
) AS is_complete
|
||||
FROM tasks
|
||||
WHERE id = _task_id
|
||||
INTO _is_manual, _manual_value, _project_id, _task_complete;
|
||||
|
||||
-- Check if the project uses manual progress
|
||||
IF _project_id IS NOT NULL THEN
|
||||
SELECT COALESCE(use_manual_progress, FALSE),
|
||||
COALESCE(use_weighted_progress, FALSE),
|
||||
COALESCE(use_time_progress, FALSE)
|
||||
FROM projects
|
||||
WHERE id = _project_id
|
||||
INTO _use_manual_progress, _use_weighted_progress, _use_time_progress;
|
||||
END IF;
|
||||
|
||||
-- Get all subtasks
|
||||
SELECT COUNT(*)
|
||||
FROM tasks
|
||||
WHERE parent_task_id = _task_id AND archived IS FALSE
|
||||
INTO _sub_tasks_count;
|
||||
|
||||
-- If task is complete, always return 100%
|
||||
IF _task_complete IS TRUE THEN
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'ratio', 100,
|
||||
'total_completed', 1,
|
||||
'total_tasks', 1,
|
||||
'is_manual', FALSE
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- Use manual progress value in two cases:
|
||||
-- 1. When task has manual_progress = TRUE and progress_value is set
|
||||
-- 2. When project has use_manual_progress = TRUE and progress_value is set
|
||||
IF (_is_manual IS TRUE AND _manual_value IS NOT NULL) OR
|
||||
(_use_manual_progress IS TRUE AND _manual_value IS NOT NULL) THEN
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'ratio', _manual_value,
|
||||
'total_completed', 0,
|
||||
'total_tasks', 0,
|
||||
'is_manual', TRUE
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- If there are no subtasks, just use the parent task's status (unless in time-based mode)
|
||||
IF _sub_tasks_count = 0 THEN
|
||||
-- Use time-based estimation for tasks without subtasks if enabled
|
||||
IF _use_time_progress IS TRUE THEN
|
||||
-- For time-based tasks without subtasks, we still need some progress calculation
|
||||
-- If the task is completed, return 100%
|
||||
-- Otherwise, use the progress value if set manually, or 0
|
||||
SELECT
|
||||
CASE
|
||||
WHEN _task_complete IS TRUE THEN 100
|
||||
ELSE COALESCE(_manual_value, 0)
|
||||
END
|
||||
INTO _ratio;
|
||||
ELSE
|
||||
-- Traditional calculation for non-time-based tasks
|
||||
SELECT (CASE WHEN _task_complete IS TRUE THEN 1 ELSE 0 END)
|
||||
INTO _parent_task_done;
|
||||
|
||||
_ratio = _parent_task_done * 100;
|
||||
END IF;
|
||||
ELSE
|
||||
-- If project uses manual progress, calculate based on subtask manual progress values
|
||||
IF _use_manual_progress IS TRUE THEN
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
t.id,
|
||||
t.manual_progress,
|
||||
t.progress_value,
|
||||
EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) AS is_complete
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
),
|
||||
subtask_with_values AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- For completed tasks, always use 100%
|
||||
WHEN is_complete IS TRUE THEN 100
|
||||
-- For tasks with progress value set, use it regardless of manual_progress flag
|
||||
WHEN progress_value IS NOT NULL THEN progress_value
|
||||
-- Default to 0 for incomplete tasks with no progress value
|
||||
ELSE 0
|
||||
END AS progress_value
|
||||
FROM subtask_progress
|
||||
)
|
||||
SELECT COALESCE(AVG(progress_value), 0)
|
||||
FROM subtask_with_values
|
||||
INTO _ratio;
|
||||
-- If project uses weighted progress, calculate based on subtask weights
|
||||
ELSIF _use_weighted_progress IS TRUE THEN
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
t.id,
|
||||
t.manual_progress,
|
||||
t.progress_value,
|
||||
EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) AS is_complete,
|
||||
COALESCE(t.weight, 100) AS weight
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
),
|
||||
subtask_with_values AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- For completed tasks, always use 100%
|
||||
WHEN is_complete IS TRUE THEN 100
|
||||
-- For tasks with progress value set, use it regardless of manual_progress flag
|
||||
WHEN progress_value IS NOT NULL THEN progress_value
|
||||
-- Default to 0 for incomplete tasks with no progress value
|
||||
ELSE 0
|
||||
END AS progress_value,
|
||||
weight
|
||||
FROM subtask_progress
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * weight) / NULLIF(SUM(weight), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_with_values
|
||||
INTO _ratio;
|
||||
-- If project uses time-based progress, calculate based on estimated time
|
||||
ELSIF _use_time_progress IS TRUE THEN
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
t.id,
|
||||
t.manual_progress,
|
||||
t.progress_value,
|
||||
EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) AS is_complete,
|
||||
COALESCE(t.total_minutes, 0) AS estimated_minutes
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
),
|
||||
subtask_with_values AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- For completed tasks, always use 100%
|
||||
WHEN is_complete IS TRUE THEN 100
|
||||
-- For tasks with progress value set, use it regardless of manual_progress flag
|
||||
WHEN progress_value IS NOT NULL THEN progress_value
|
||||
-- Default to 0 for incomplete tasks with no progress value
|
||||
ELSE 0
|
||||
END AS progress_value,
|
||||
estimated_minutes
|
||||
FROM subtask_progress
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * estimated_minutes) / NULLIF(SUM(estimated_minutes), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_with_values
|
||||
INTO _ratio;
|
||||
ELSE
|
||||
-- Traditional calculation based on completion status
|
||||
SELECT (CASE WHEN _task_complete IS TRUE THEN 1 ELSE 0 END)
|
||||
INTO _parent_task_done;
|
||||
|
||||
SELECT COUNT(*)
|
||||
FROM tasks_with_status_view
|
||||
WHERE parent_task_id = _task_id
|
||||
AND is_done IS TRUE
|
||||
INTO _sub_tasks_done;
|
||||
|
||||
_total_completed = _parent_task_done + _sub_tasks_done;
|
||||
_total_tasks = _sub_tasks_count + 1; -- +1 for the parent task
|
||||
|
||||
IF _total_tasks = 0 THEN
|
||||
_ratio = 0;
|
||||
ELSE
|
||||
_ratio = (_total_completed / _total_tasks) * 100;
|
||||
END IF;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Ensure ratio is between 0 and 100
|
||||
IF _ratio < 0 THEN
|
||||
_ratio = 0;
|
||||
ELSIF _ratio > 100 THEN
|
||||
_ratio = 100;
|
||||
END IF;
|
||||
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'ratio', _ratio,
|
||||
'total_completed', _total_completed,
|
||||
'total_tasks', _total_tasks,
|
||||
'is_manual', _is_manual
|
||||
);
|
||||
END
|
||||
$$;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,289 @@
|
||||
-- Migration: Improve parent task progress calculation using weights and time estimation
|
||||
-- Date: 2025-04-26
|
||||
-- Version: 1.0.0
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Update function to better calculate parent task progress based on subtask weights or time estimations
|
||||
CREATE OR REPLACE FUNCTION get_task_complete_ratio(_task_id uuid) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_parent_task_done FLOAT = 0;
|
||||
_sub_tasks_done FLOAT = 0;
|
||||
_sub_tasks_count FLOAT = 0;
|
||||
_total_completed FLOAT = 0;
|
||||
_total_tasks FLOAT = 0;
|
||||
_ratio FLOAT = 0;
|
||||
_is_manual BOOLEAN = FALSE;
|
||||
_manual_value INTEGER = NULL;
|
||||
_project_id UUID;
|
||||
_use_manual_progress BOOLEAN = FALSE;
|
||||
_use_weighted_progress BOOLEAN = FALSE;
|
||||
_use_time_progress BOOLEAN = FALSE;
|
||||
BEGIN
|
||||
-- Check if manual progress is set for this task
|
||||
SELECT manual_progress, progress_value, project_id
|
||||
FROM tasks
|
||||
WHERE id = _task_id
|
||||
INTO _is_manual, _manual_value, _project_id;
|
||||
|
||||
-- Check if the project uses manual progress
|
||||
IF _project_id IS NOT NULL THEN
|
||||
SELECT COALESCE(use_manual_progress, FALSE),
|
||||
COALESCE(use_weighted_progress, FALSE),
|
||||
COALESCE(use_time_progress, FALSE)
|
||||
FROM projects
|
||||
WHERE id = _project_id
|
||||
INTO _use_manual_progress, _use_weighted_progress, _use_time_progress;
|
||||
END IF;
|
||||
|
||||
-- Get all subtasks
|
||||
SELECT COUNT(*)
|
||||
FROM tasks
|
||||
WHERE parent_task_id = _task_id AND archived IS FALSE
|
||||
INTO _sub_tasks_count;
|
||||
|
||||
-- Only respect manual progress for tasks without subtasks
|
||||
IF _is_manual IS TRUE AND _manual_value IS NOT NULL AND _sub_tasks_count = 0 THEN
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'ratio', _manual_value,
|
||||
'total_completed', 0,
|
||||
'total_tasks', 0,
|
||||
'is_manual', TRUE
|
||||
);
|
||||
END IF;
|
||||
|
||||
-- If there are no subtasks, just use the parent task's status
|
||||
IF _sub_tasks_count = 0 THEN
|
||||
-- For tasks without subtasks in time-based mode
|
||||
IF _use_time_progress IS TRUE THEN
|
||||
SELECT
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = _task_id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE COALESCE(_manual_value, 0)
|
||||
END
|
||||
INTO _ratio;
|
||||
ELSE
|
||||
-- Traditional calculation for non-time-based tasks
|
||||
SELECT (CASE
|
||||
WHEN EXISTS(SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = _task_id
|
||||
AND is_done IS TRUE) THEN 1
|
||||
ELSE 0 END)
|
||||
INTO _parent_task_done;
|
||||
|
||||
_ratio = _parent_task_done * 100;
|
||||
END IF;
|
||||
ELSE
|
||||
-- For parent tasks with subtasks, always use the appropriate calculation based on project mode
|
||||
-- If project uses manual progress, calculate based on subtask manual progress values
|
||||
IF _use_manual_progress IS TRUE THEN
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- If subtask has manual progress, use that value
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
-- Otherwise use completion status (0 or 100)
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(AVG(progress_value), 0)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
-- If project uses weighted progress, calculate based on subtask weights
|
||||
ELSIF _use_weighted_progress IS TRUE THEN
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- If subtask has manual progress, use that value
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
-- Otherwise use completion status (0 or 100)
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value,
|
||||
COALESCE(weight, 100) AS weight -- Default weight is 100 if not specified
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * weight) / NULLIF(SUM(weight), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
-- If project uses time-based progress, calculate based on estimated time (total_minutes)
|
||||
ELSIF _use_time_progress IS TRUE THEN
|
||||
WITH subtask_progress AS (
|
||||
SELECT
|
||||
CASE
|
||||
-- If subtask has manual progress, use that value
|
||||
WHEN manual_progress IS TRUE AND progress_value IS NOT NULL THEN
|
||||
progress_value
|
||||
-- Otherwise use completion status (0 or 100)
|
||||
ELSE
|
||||
CASE
|
||||
WHEN EXISTS(
|
||||
SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = t.id
|
||||
AND is_done IS TRUE
|
||||
) THEN 100
|
||||
ELSE 0
|
||||
END
|
||||
END AS progress_value,
|
||||
COALESCE(total_minutes, 0) AS estimated_minutes -- Use time estimation for weighting
|
||||
FROM tasks t
|
||||
WHERE t.parent_task_id = _task_id
|
||||
AND t.archived IS FALSE
|
||||
)
|
||||
SELECT COALESCE(
|
||||
SUM(progress_value * estimated_minutes) / NULLIF(SUM(estimated_minutes), 0),
|
||||
0
|
||||
)
|
||||
FROM subtask_progress
|
||||
INTO _ratio;
|
||||
ELSE
|
||||
-- Traditional calculation based on completion status when no special mode is enabled
|
||||
SELECT (CASE
|
||||
WHEN EXISTS(SELECT 1
|
||||
FROM tasks_with_status_view
|
||||
WHERE tasks_with_status_view.task_id = _task_id
|
||||
AND is_done IS TRUE) THEN 1
|
||||
ELSE 0 END)
|
||||
INTO _parent_task_done;
|
||||
|
||||
SELECT COUNT(*)
|
||||
FROM tasks_with_status_view
|
||||
WHERE parent_task_id = _task_id
|
||||
AND is_done IS TRUE
|
||||
INTO _sub_tasks_done;
|
||||
|
||||
_total_completed = _parent_task_done + _sub_tasks_done;
|
||||
_total_tasks = _sub_tasks_count + 1; -- +1 for the parent task
|
||||
|
||||
IF _total_tasks = 0 THEN
|
||||
_ratio = 0;
|
||||
ELSE
|
||||
_ratio = (_total_completed / _total_tasks) * 100;
|
||||
END IF;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
-- Ensure ratio is between 0 and 100
|
||||
IF _ratio < 0 THEN
|
||||
_ratio = 0;
|
||||
ELSIF _ratio > 100 THEN
|
||||
_ratio = 100;
|
||||
END IF;
|
||||
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'ratio', _ratio,
|
||||
'total_completed', _total_completed,
|
||||
'total_tasks', _total_tasks,
|
||||
'is_manual', _is_manual
|
||||
);
|
||||
END
|
||||
$$;
|
||||
|
||||
-- Make sure we recalculate parent task progress when subtask progress changes
|
||||
CREATE OR REPLACE FUNCTION update_parent_task_progress() RETURNS TRIGGER AS
|
||||
$$
|
||||
DECLARE
|
||||
_parent_task_id UUID;
|
||||
_project_id UUID;
|
||||
_ratio FLOAT;
|
||||
BEGIN
|
||||
-- Check if this is a subtask
|
||||
IF NEW.parent_task_id IS NOT NULL THEN
|
||||
_parent_task_id := NEW.parent_task_id;
|
||||
|
||||
-- Force any parent task with subtasks to NOT use manual progress
|
||||
UPDATE tasks
|
||||
SET manual_progress = FALSE
|
||||
WHERE id = _parent_task_id;
|
||||
END IF;
|
||||
|
||||
-- If this task has progress value of 100 and doesn't have subtasks, we might want to prompt the user
|
||||
-- to mark it as done. We'll annotate this in a way that the socket handler can detect.
|
||||
IF NEW.progress_value = 100 OR NEW.weight = 100 OR NEW.total_minutes > 0 THEN
|
||||
-- Check if task has status in "done" category
|
||||
SELECT project_id FROM tasks WHERE id = NEW.id INTO _project_id;
|
||||
|
||||
-- Get the progress ratio for this task
|
||||
SELECT get_task_complete_ratio(NEW.id)->>'ratio' INTO _ratio;
|
||||
|
||||
IF _ratio::FLOAT >= 100 THEN
|
||||
-- Log that this task is at 100% progress
|
||||
RAISE NOTICE 'Task % progress is at 100%%, may need status update', NEW.id;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create trigger for updates to task progress
|
||||
DROP TRIGGER IF EXISTS update_parent_task_progress_trigger ON tasks;
|
||||
CREATE TRIGGER update_parent_task_progress_trigger
|
||||
AFTER UPDATE OF progress_value, weight, total_minutes ON tasks
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_parent_task_progress();
|
||||
|
||||
-- Create a function to ensure parent tasks never have manual progress when they have subtasks
|
||||
CREATE OR REPLACE FUNCTION ensure_parent_task_without_manual_progress() RETURNS TRIGGER AS
|
||||
$$
|
||||
BEGIN
|
||||
-- If this is a new subtask being created or a task is being converted to a subtask
|
||||
IF NEW.parent_task_id IS NOT NULL THEN
|
||||
-- Force the parent task to NOT use manual progress
|
||||
UPDATE tasks
|
||||
SET manual_progress = FALSE
|
||||
WHERE id = NEW.parent_task_id;
|
||||
|
||||
-- Log that we've reset manual progress for a parent task
|
||||
RAISE NOTICE 'Reset manual progress for parent task % because it has subtasks', NEW.parent_task_id;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create trigger for when tasks are created or updated with a parent_task_id
|
||||
DROP TRIGGER IF EXISTS ensure_parent_task_without_manual_progress_trigger ON tasks;
|
||||
CREATE TRIGGER ensure_parent_task_without_manual_progress_trigger
|
||||
AFTER INSERT OR UPDATE OF parent_task_id ON tasks
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION ensure_parent_task_without_manual_progress();
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,150 @@
|
||||
-- Migration: Update socket event handlers to set progress-mode handlers
|
||||
-- Date: 2025-04-26
|
||||
-- Version: 1.0.0
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Create ENUM type for progress modes
|
||||
CREATE TYPE progress_mode_type AS ENUM ('manual', 'weighted', 'time', 'default');
|
||||
|
||||
-- Alter tasks table to use ENUM type
|
||||
ALTER TABLE tasks
|
||||
ALTER COLUMN progress_mode TYPE progress_mode_type
|
||||
USING progress_mode::text::progress_mode_type;
|
||||
|
||||
-- Update the on_update_task_progress function to set progress_mode
|
||||
CREATE OR REPLACE FUNCTION on_update_task_progress(_body json) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_task_id UUID;
|
||||
_progress_value INTEGER;
|
||||
_parent_task_id UUID;
|
||||
_project_id UUID;
|
||||
_current_mode progress_mode_type;
|
||||
BEGIN
|
||||
_task_id = (_body ->> 'task_id')::UUID;
|
||||
_progress_value = (_body ->> 'progress_value')::INTEGER;
|
||||
_parent_task_id = (_body ->> 'parent_task_id')::UUID;
|
||||
|
||||
-- Get the project ID and determine the current progress mode
|
||||
SELECT project_id INTO _project_id FROM tasks WHERE id = _task_id;
|
||||
|
||||
IF _project_id IS NOT NULL THEN
|
||||
SELECT
|
||||
CASE
|
||||
WHEN use_manual_progress IS TRUE THEN 'manual'
|
||||
WHEN use_weighted_progress IS TRUE THEN 'weighted'
|
||||
WHEN use_time_progress IS TRUE THEN 'time'
|
||||
ELSE 'default'
|
||||
END
|
||||
INTO _current_mode
|
||||
FROM projects
|
||||
WHERE id = _project_id;
|
||||
ELSE
|
||||
_current_mode := 'default';
|
||||
END IF;
|
||||
|
||||
-- Update the task with progress value and set the progress mode
|
||||
UPDATE tasks
|
||||
SET progress_value = _progress_value,
|
||||
manual_progress = TRUE,
|
||||
progress_mode = _current_mode,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = _task_id;
|
||||
|
||||
-- Return the updated task info
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'task_id', _task_id,
|
||||
'progress_value', _progress_value,
|
||||
'progress_mode', _current_mode
|
||||
);
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Update the on_update_task_weight function to set progress_mode when weight is updated
|
||||
CREATE OR REPLACE FUNCTION on_update_task_weight(_body json) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_task_id UUID;
|
||||
_weight INTEGER;
|
||||
_parent_task_id UUID;
|
||||
_project_id UUID;
|
||||
BEGIN
|
||||
_task_id = (_body ->> 'task_id')::UUID;
|
||||
_weight = (_body ->> 'weight')::INTEGER;
|
||||
_parent_task_id = (_body ->> 'parent_task_id')::UUID;
|
||||
|
||||
-- Get the project ID
|
||||
SELECT project_id INTO _project_id FROM tasks WHERE id = _task_id;
|
||||
|
||||
-- Update the task with weight value and set progress_mode to 'weighted'
|
||||
UPDATE tasks
|
||||
SET weight = _weight,
|
||||
progress_mode = 'weighted',
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = _task_id;
|
||||
|
||||
-- Return the updated task info
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'task_id', _task_id,
|
||||
'weight', _weight
|
||||
);
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Create a function to reset progress values when switching project progress modes
|
||||
CREATE OR REPLACE FUNCTION reset_project_progress_values() RETURNS TRIGGER
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_old_mode progress_mode_type;
|
||||
_new_mode progress_mode_type;
|
||||
_project_id UUID;
|
||||
BEGIN
|
||||
_project_id := NEW.id;
|
||||
|
||||
-- Determine old and new modes
|
||||
_old_mode :=
|
||||
CASE
|
||||
WHEN OLD.use_manual_progress IS TRUE THEN 'manual'
|
||||
WHEN OLD.use_weighted_progress IS TRUE THEN 'weighted'
|
||||
WHEN OLD.use_time_progress IS TRUE THEN 'time'
|
||||
ELSE 'default'
|
||||
END;
|
||||
|
||||
_new_mode :=
|
||||
CASE
|
||||
WHEN NEW.use_manual_progress IS TRUE THEN 'manual'
|
||||
WHEN NEW.use_weighted_progress IS TRUE THEN 'weighted'
|
||||
WHEN NEW.use_time_progress IS TRUE THEN 'time'
|
||||
ELSE 'default'
|
||||
END;
|
||||
|
||||
-- If mode has changed, reset progress values for tasks with the old mode
|
||||
IF _old_mode <> _new_mode THEN
|
||||
-- Reset progress values for tasks that were set in the old mode
|
||||
UPDATE tasks
|
||||
SET progress_value = NULL,
|
||||
progress_mode = NULL
|
||||
WHERE project_id = _project_id
|
||||
AND progress_mode = _old_mode;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Create trigger to reset progress values when project progress mode changes
|
||||
DROP TRIGGER IF EXISTS reset_progress_on_mode_change ON projects;
|
||||
CREATE TRIGGER reset_progress_on_mode_change
|
||||
AFTER UPDATE OF use_manual_progress, use_weighted_progress, use_time_progress
|
||||
ON projects
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION reset_project_progress_values();
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,160 @@
|
||||
-- Migration: Fix progress_mode_type ENUM and casting issues
|
||||
-- Date: 2025-04-27
|
||||
-- Version: 1.0.0
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- First, let's ensure the ENUM type exists with the correct values
|
||||
DO $$
|
||||
BEGIN
|
||||
-- Check if the type exists
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'progress_mode_type') THEN
|
||||
CREATE TYPE progress_mode_type AS ENUM ('manual', 'weighted', 'time', 'default');
|
||||
ELSE
|
||||
-- Add any missing values to the existing ENUM
|
||||
BEGIN
|
||||
ALTER TYPE progress_mode_type ADD VALUE IF NOT EXISTS 'manual';
|
||||
ALTER TYPE progress_mode_type ADD VALUE IF NOT EXISTS 'weighted';
|
||||
ALTER TYPE progress_mode_type ADD VALUE IF NOT EXISTS 'time';
|
||||
ALTER TYPE progress_mode_type ADD VALUE IF NOT EXISTS 'default';
|
||||
EXCEPTION
|
||||
WHEN duplicate_object THEN
|
||||
-- Ignore if values already exist
|
||||
NULL;
|
||||
END;
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- Update functions to use proper type casting
|
||||
CREATE OR REPLACE FUNCTION on_update_task_progress(_body json) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_task_id UUID;
|
||||
_progress_value INTEGER;
|
||||
_parent_task_id UUID;
|
||||
_project_id UUID;
|
||||
_current_mode progress_mode_type;
|
||||
BEGIN
|
||||
_task_id = (_body ->> 'task_id')::UUID;
|
||||
_progress_value = (_body ->> 'progress_value')::INTEGER;
|
||||
_parent_task_id = (_body ->> 'parent_task_id')::UUID;
|
||||
|
||||
-- Get the project ID and determine the current progress mode
|
||||
SELECT project_id INTO _project_id FROM tasks WHERE id = _task_id;
|
||||
|
||||
IF _project_id IS NOT NULL THEN
|
||||
SELECT
|
||||
CASE
|
||||
WHEN use_manual_progress IS TRUE THEN 'manual'::progress_mode_type
|
||||
WHEN use_weighted_progress IS TRUE THEN 'weighted'::progress_mode_type
|
||||
WHEN use_time_progress IS TRUE THEN 'time'::progress_mode_type
|
||||
ELSE 'default'::progress_mode_type
|
||||
END
|
||||
INTO _current_mode
|
||||
FROM projects
|
||||
WHERE id = _project_id;
|
||||
ELSE
|
||||
_current_mode := 'default'::progress_mode_type;
|
||||
END IF;
|
||||
|
||||
-- Update the task with progress value and set the progress mode
|
||||
UPDATE tasks
|
||||
SET progress_value = _progress_value,
|
||||
manual_progress = TRUE,
|
||||
progress_mode = _current_mode,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = _task_id;
|
||||
|
||||
-- Return the updated task info
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'task_id', _task_id,
|
||||
'progress_value', _progress_value,
|
||||
'progress_mode', _current_mode
|
||||
);
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Update the on_update_task_weight function to use proper type casting
|
||||
CREATE OR REPLACE FUNCTION on_update_task_weight(_body json) RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_task_id UUID;
|
||||
_weight INTEGER;
|
||||
_parent_task_id UUID;
|
||||
_project_id UUID;
|
||||
BEGIN
|
||||
_task_id = (_body ->> 'task_id')::UUID;
|
||||
_weight = (_body ->> 'weight')::INTEGER;
|
||||
_parent_task_id = (_body ->> 'parent_task_id')::UUID;
|
||||
|
||||
-- Get the project ID
|
||||
SELECT project_id INTO _project_id FROM tasks WHERE id = _task_id;
|
||||
|
||||
-- Update the task with weight value and set progress_mode to 'weighted'
|
||||
UPDATE tasks
|
||||
SET weight = _weight,
|
||||
progress_mode = 'weighted'::progress_mode_type,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = _task_id;
|
||||
|
||||
-- Return the updated task info
|
||||
RETURN JSON_BUILD_OBJECT(
|
||||
'task_id', _task_id,
|
||||
'weight', _weight
|
||||
);
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Update the reset_project_progress_values function to use proper type casting
|
||||
CREATE OR REPLACE FUNCTION reset_project_progress_values() RETURNS TRIGGER
|
||||
LANGUAGE plpgsql
|
||||
AS
|
||||
$$
|
||||
DECLARE
|
||||
_old_mode progress_mode_type;
|
||||
_new_mode progress_mode_type;
|
||||
_project_id UUID;
|
||||
BEGIN
|
||||
_project_id := NEW.id;
|
||||
|
||||
-- Determine old and new modes with proper type casting
|
||||
_old_mode :=
|
||||
CASE
|
||||
WHEN OLD.use_manual_progress IS TRUE THEN 'manual'::progress_mode_type
|
||||
WHEN OLD.use_weighted_progress IS TRUE THEN 'weighted'::progress_mode_type
|
||||
WHEN OLD.use_time_progress IS TRUE THEN 'time'::progress_mode_type
|
||||
ELSE 'default'::progress_mode_type
|
||||
END;
|
||||
|
||||
_new_mode :=
|
||||
CASE
|
||||
WHEN NEW.use_manual_progress IS TRUE THEN 'manual'::progress_mode_type
|
||||
WHEN NEW.use_weighted_progress IS TRUE THEN 'weighted'::progress_mode_type
|
||||
WHEN NEW.use_time_progress IS TRUE THEN 'time'::progress_mode_type
|
||||
ELSE 'default'::progress_mode_type
|
||||
END;
|
||||
|
||||
-- If mode has changed, reset progress values for tasks with the old mode
|
||||
IF _old_mode <> _new_mode THEN
|
||||
-- Reset progress values for tasks that were set in the old mode
|
||||
UPDATE tasks
|
||||
SET progress_value = NULL,
|
||||
progress_mode = NULL
|
||||
WHERE project_id = _project_id
|
||||
AND progress_mode = _old_mode;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$;
|
||||
|
||||
-- Update the tasks table to ensure proper type casting for existing values
|
||||
UPDATE tasks
|
||||
SET progress_mode = progress_mode::text::progress_mode_type
|
||||
WHERE progress_mode IS NOT NULL;
|
||||
|
||||
COMMIT;
|
||||
@@ -0,0 +1,166 @@
|
||||
-- Migration: Fix multilevel subtask progress calculation for weighted and manual progress
|
||||
-- Date: 2025-05-06
|
||||
-- Version: 1.0.0
|
||||
|
||||
BEGIN;
|
||||
|
||||
-- Update the trigger function to recursively recalculate parent task progress up the entire hierarchy
|
||||
CREATE OR REPLACE FUNCTION update_parent_task_progress() RETURNS TRIGGER AS
|
||||
$$
|
||||
DECLARE
|
||||
_parent_task_id UUID;
|
||||
_project_id UUID;
|
||||
_ratio FLOAT;
|
||||
BEGIN
|
||||
-- Check if this is a subtask
|
||||
IF NEW.parent_task_id IS NOT NULL THEN
|
||||
_parent_task_id := NEW.parent_task_id;
|
||||
|
||||
-- Force any parent task with subtasks to NOT use manual progress
|
||||
UPDATE tasks
|
||||
SET manual_progress = FALSE
|
||||
WHERE id = _parent_task_id;
|
||||
|
||||
-- Calculate and update the parent's progress value
|
||||
SELECT (get_task_complete_ratio(_parent_task_id)->>'ratio')::FLOAT INTO _ratio;
|
||||
|
||||
-- Update the parent's progress value
|
||||
UPDATE tasks
|
||||
SET progress_value = _ratio
|
||||
WHERE id = _parent_task_id;
|
||||
|
||||
-- Recursively propagate changes up the hierarchy by using a recursive CTE
|
||||
WITH RECURSIVE task_hierarchy AS (
|
||||
-- Base case: Start with the parent task
|
||||
SELECT
|
||||
id,
|
||||
parent_task_id
|
||||
FROM tasks
|
||||
WHERE id = _parent_task_id
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- Recursive case: Go up to each ancestor
|
||||
SELECT
|
||||
t.id,
|
||||
t.parent_task_id
|
||||
FROM tasks t
|
||||
JOIN task_hierarchy th ON t.id = th.parent_task_id
|
||||
WHERE t.id IS NOT NULL
|
||||
)
|
||||
-- For each ancestor, recalculate its progress
|
||||
UPDATE tasks
|
||||
SET
|
||||
manual_progress = FALSE,
|
||||
progress_value = (SELECT (get_task_complete_ratio(task_hierarchy.id)->>'ratio')::FLOAT)
|
||||
FROM task_hierarchy
|
||||
WHERE tasks.id = task_hierarchy.id
|
||||
AND task_hierarchy.parent_task_id IS NOT NULL;
|
||||
|
||||
-- Log the recalculation for debugging
|
||||
RAISE NOTICE 'Updated progress for task % to %', _parent_task_id, _ratio;
|
||||
END IF;
|
||||
|
||||
-- If this task has progress value of 100 and doesn't have subtasks, we might want to prompt the user
|
||||
-- to mark it as done. We'll annotate this in a way that the socket handler can detect.
|
||||
IF NEW.progress_value = 100 OR NEW.weight = 100 OR NEW.total_minutes > 0 THEN
|
||||
-- Check if task has status in "done" category
|
||||
SELECT project_id FROM tasks WHERE id = NEW.id INTO _project_id;
|
||||
|
||||
-- Get the progress ratio for this task
|
||||
SELECT (get_task_complete_ratio(NEW.id)->>'ratio')::FLOAT INTO _ratio;
|
||||
|
||||
IF _ratio >= 100 THEN
|
||||
-- Log that this task is at 100% progress
|
||||
RAISE NOTICE 'Task % progress is at 100%%, may need status update', NEW.id;
|
||||
END IF;
|
||||
END IF;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Update existing trigger or create a new one to handle more changes
|
||||
DROP TRIGGER IF EXISTS update_parent_task_progress_trigger ON tasks;
|
||||
CREATE TRIGGER update_parent_task_progress_trigger
|
||||
AFTER UPDATE OF progress_value, weight, total_minutes, parent_task_id, manual_progress ON tasks
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION update_parent_task_progress();
|
||||
|
||||
-- Also add a trigger for when a new task is inserted
|
||||
DROP TRIGGER IF EXISTS update_parent_task_progress_on_insert_trigger ON tasks;
|
||||
CREATE TRIGGER update_parent_task_progress_on_insert_trigger
|
||||
AFTER INSERT ON tasks
|
||||
FOR EACH ROW
|
||||
WHEN (NEW.parent_task_id IS NOT NULL)
|
||||
EXECUTE FUNCTION update_parent_task_progress();
|
||||
|
||||
-- Add a comment to explain the fix
|
||||
COMMENT ON FUNCTION update_parent_task_progress() IS
|
||||
'This function recursively updates progress values for all ancestors when a task''s progress changes.
|
||||
The previous version only updated the immediate parent, which led to incorrect progress values for
|
||||
higher-level parent tasks when using weighted or manual progress calculations with multi-level subtasks.';
|
||||
|
||||
-- Add a function to immediately recalculate all task progress values in the correct order
|
||||
-- This will fix existing data where parent tasks don't have proper progress values
|
||||
CREATE OR REPLACE FUNCTION recalculate_all_task_progress() RETURNS void AS
|
||||
$$
|
||||
BEGIN
|
||||
-- First, reset manual_progress flag for all tasks that have subtasks
|
||||
UPDATE tasks AS t
|
||||
SET manual_progress = FALSE
|
||||
WHERE EXISTS (
|
||||
SELECT 1
|
||||
FROM tasks
|
||||
WHERE parent_task_id = t.id
|
||||
AND archived IS FALSE
|
||||
);
|
||||
|
||||
-- Start recalculation from leaf tasks (no subtasks) and propagate upward
|
||||
-- This ensures calculations are done in the right order
|
||||
WITH RECURSIVE task_hierarchy AS (
|
||||
-- Base case: Start with all leaf tasks (no subtasks)
|
||||
SELECT
|
||||
id,
|
||||
parent_task_id,
|
||||
0 AS level
|
||||
FROM tasks
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM tasks AS sub
|
||||
WHERE sub.parent_task_id = tasks.id
|
||||
AND sub.archived IS FALSE
|
||||
)
|
||||
AND archived IS FALSE
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- Recursive case: Move up to parent tasks, but only after processing all their children
|
||||
SELECT
|
||||
t.id,
|
||||
t.parent_task_id,
|
||||
th.level + 1
|
||||
FROM tasks t
|
||||
JOIN task_hierarchy th ON t.id = th.parent_task_id
|
||||
WHERE t.archived IS FALSE
|
||||
)
|
||||
-- Sort by level to ensure we calculate in the right order (leaves first, then parents)
|
||||
-- This ensures we're using already updated progress values
|
||||
UPDATE tasks
|
||||
SET progress_value = (SELECT (get_task_complete_ratio(tasks.id)->>'ratio')::FLOAT)
|
||||
FROM (
|
||||
SELECT id, level
|
||||
FROM task_hierarchy
|
||||
ORDER BY level
|
||||
) AS ordered_tasks
|
||||
WHERE tasks.id = ordered_tasks.id
|
||||
AND (manual_progress IS FALSE OR manual_progress IS NULL);
|
||||
|
||||
-- Log the completion of the recalculation
|
||||
RAISE NOTICE 'Finished recalculating all task progress values';
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Execute the function to fix existing data
|
||||
SELECT recalculate_all_task_progress();
|
||||
|
||||
COMMIT;
|
||||
2
worklenz-backend/database/migrations/README.md
Normal file
2
worklenz-backend/database/migrations/README.md
Normal file
@@ -0,0 +1,2 @@
|
||||
Migrations should be executed out in the sequence specified by the filename. They should be removed once the migrations
|
||||
have been released to all databases.
|
||||
File diff suppressed because it is too large
Load Diff
3
worklenz-backend/database/sql/0_extensions.sql
Normal file
3
worklenz-backend/database/sql/0_extensions.sql
Normal file
@@ -0,0 +1,3 @@
|
||||
-- Extensions
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
|
||||
CREATE EXTENSION IF NOT EXISTS "unaccent";
|
||||
2281
worklenz-backend/database/sql/1_tables.sql
Normal file
2281
worklenz-backend/database/sql/1_tables.sql
Normal file
File diff suppressed because it is too large
Load Diff
144
worklenz-backend/database/sql/2_dml.sql
Normal file
144
worklenz-backend/database/sql/2_dml.sql
Normal file
@@ -0,0 +1,144 @@
|
||||
CREATE OR REPLACE FUNCTION sys_insert_task_priorities() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO task_priorities (name, value, color_code, color_code_dark) VALUES ('Medium', 1, '#fbc84c', '#FFC227');
|
||||
INSERT INTO task_priorities (name, value, color_code, color_code_dark) VALUES ('Low', 0, '#75c997', '#46D980');
|
||||
INSERT INTO task_priorities (name, value, color_code, color_code_dark) VALUES ('High', 2, '#f37070', '#FF4141');
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_project_access_levels() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO project_access_levels (name, key)
|
||||
VALUES ('Admin', 'ADMIN');
|
||||
INSERT INTO project_access_levels (name, key)
|
||||
VALUES ('Member', 'MEMBER');
|
||||
INSERT INTO project_access_levels (name, key)
|
||||
VALUES ('Project Manager', 'PROJECT_MANAGER');
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_task_status_categories() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO public.sys_task_status_categories (name, color_code, index, is_todo, is_doing, is_done, description,
|
||||
color_code_dark)
|
||||
VALUES ('To do', '#a9a9a9', 1, TRUE, FALSE, FALSE,
|
||||
'For tasks that have not been started.', '#989898');
|
||||
INSERT INTO public.sys_task_status_categories (name, color_code, index, is_todo, is_doing, is_done, description,
|
||||
color_code_dark)
|
||||
VALUES ('Doing', '#70a6f3', 2, FALSE, TRUE, FALSE,
|
||||
'For tasks that have been started.', '#4190FF');
|
||||
INSERT INTO public.sys_task_status_categories (name, color_code, index, is_todo, is_doing, is_done, description,
|
||||
color_code_dark)
|
||||
VALUES ('Done', '#75c997', 3, FALSE, FALSE, TRUE,
|
||||
'For tasks that have been completed.', '#46D980');
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_project_statuses() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO public.sys_project_statuses (name, color_code, icon, sort_order, is_default)
|
||||
VALUES ('Cancelled', '#f37070', 'close-circle', 0, FALSE),
|
||||
('Blocked', '#cbc8a1', 'stop', 1, FALSE),
|
||||
('On Hold', '#cbc8a1', 'stop', 2, FALSE),
|
||||
('Proposed', '#cbc8a1', 'clock-circle', 3, TRUE),
|
||||
('In Planning', '#cbc8a1', 'clock-circle', 4, FALSE),
|
||||
('In Progress', '#80ca79', 'clock-circle', 5, FALSE),
|
||||
('Completed', '#80ca79', 'check-circle', 6, FALSE),
|
||||
('Continuous', '#80ca79', 'clock-circle', 7, FALSE);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_project_healths() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
|
||||
VALUES ('Not Set', '#a9a9a9', 0, TRUE);
|
||||
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
|
||||
VALUES ('Needs Attention', '#fbc84c', 1, FALSE);
|
||||
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
|
||||
VALUES ('At Risk', '#f37070', 2, FALSE);
|
||||
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
|
||||
VALUES ('Good', '#75c997', 3, FALSE);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_license_types() RETURNS VOID AS
|
||||
$$
|
||||
BEGIN
|
||||
INSERT INTO public.sys_license_types (name, key)
|
||||
VALUES ('Custom Subscription', 'CUSTOM'),
|
||||
('Free Trial', 'TRIAL'),
|
||||
('Paddle Subscription', 'PADDLE'),
|
||||
('Credit Subscription', 'CREDIT'),
|
||||
('Free Plan', 'FREE'),
|
||||
('Life Time Deal', 'LIFE_TIME_DEAL'),
|
||||
('Self Hosted', 'SELF_HOSTED');
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION sys_insert_project_templates() RETURNS VOID AS
|
||||
$$
|
||||
DECLARE
|
||||
medium_priority_id UUID;
|
||||
todo_category_id UUID;
|
||||
doing_category_id UUID;
|
||||
done_category_id UUID;
|
||||
BEGIN
|
||||
-- Fetch IDs to avoid repeated subqueries
|
||||
SELECT id INTO medium_priority_id FROM task_priorities WHERE name = 'Medium' LIMIT 1;
|
||||
SELECT id INTO todo_category_id FROM public.sys_task_status_categories WHERE name = 'To do' LIMIT 1;
|
||||
SELECT id INTO doing_category_id FROM public.sys_task_status_categories WHERE name = 'Doing' LIMIT 1;
|
||||
SELECT id INTO done_category_id FROM public.sys_task_status_categories WHERE name = 'Done' LIMIT 1;
|
||||
|
||||
INSERT INTO public.pt_project_templates (id, name, key, description, phase_label, image_url, color_code)
|
||||
VALUES ('39db59be-1dba-448b-87f4-3b955ea699d2', 'Bug Tracking', 'BT', 'The "Bug Tracking" project template is a versatile solution meticulously designed to streamline and enhance the bug management processes of businesses across diverse industries. This template is especially valuable for organizations that rely on software development, IT services, or digital product management. It provides a structured and efficient approach to tracking, resolving, and improving software issues.', 'Phase', 'https://worklenz.s3.amazonaws.com/project-template-gifs/bug-tracking.gif', '#3b7ad4');
|
||||
|
||||
INSERT INTO public.pt_statuses (id, name, template_id, category_id)
|
||||
VALUES ('c3242606-5a24-48aa-8320-cc90a05c2589', 'To Do', '39db59be-1dba-448b-87f4-3b955ea699d2', todo_category_id),
|
||||
('05ed8d04-92b1-4c44-bd06-abee29641f31', 'Doing', '39db59be-1dba-448b-87f4-3b955ea699d2', doing_category_id),
|
||||
('66e80bc8-6b29-4e72-a484-1593eb1fb44b', 'Done', '39db59be-1dba-448b-87f4-3b955ea699d2', done_category_id);
|
||||
|
||||
INSERT INTO public.pt_tasks (id, name, description, total_minutes, sort_order, priority_id, template_id, parent_task_id, status_id)
|
||||
VALUES ('a75993d9-3fb3-4d0b-a5d4-cab53b60462c', 'Testing and Verification', NULL, 0, 0, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, 'c3242606-5a24-48aa-8320-cc90a05c2589'),
|
||||
('3fdb6801-bc09-4d71-8273-987cd3d1e0f6', 'Bug Prioritization', NULL, 0, 6, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '05ed8d04-92b1-4c44-bd06-abee29641f31'),
|
||||
('ca64f247-a186-4edb-affd-738f1c2a4d60', 'Bug reporting', NULL, 0, 2, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, 'c3242606-5a24-48aa-8320-cc90a05c2589'),
|
||||
('1e493de8-38cf-4e6e-8f0b-5e1f6f3b07f4', 'Bug Assignment', NULL, 0, 5, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '05ed8d04-92b1-4c44-bd06-abee29641f31'),
|
||||
('67b2ab3c-53e5-428c-bbad-8bdc19dc88de', 'Bug Closure', NULL, 0, 4, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '66e80bc8-6b29-4e72-a484-1593eb1fb44b'),
|
||||
('9311ff84-1052-4989-8192-0fea20204fbe', 'Documentation', NULL, 0, 3, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '66e80bc8-6b29-4e72-a484-1593eb1fb44b'),
|
||||
('7d0697cd-868c-4b41-9f4f-f9a8c1131b24', 'Reporting', NULL, 0, 1, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '66e80bc8-6b29-4e72-a484-1593eb1fb44b');
|
||||
|
||||
INSERT INTO public.pt_task_phases (task_id, phase_id)
|
||||
VALUES ('a75993d9-3fb3-4d0b-a5d4-cab53b60462c', '4b4a8fe0-4f35-464a-a337-848e5b432ab5'),
|
||||
('3fdb6801-bc09-4d71-8273-987cd3d1e0f6', '557b58ca-3335-4b41-9880-fdd0f990deb9'),
|
||||
('ca64f247-a186-4edb-affd-738f1c2a4d60', '62097027-979f-4b00-afb8-f70fba533f80'),
|
||||
('1e493de8-38cf-4e6e-8f0b-5e1f6f3b07f4', 'e3128891-4873-4795-ad8a-880474280045'),
|
||||
('67b2ab3c-53e5-428c-bbad-8bdc19dc88de', '77204bf3-fcb3-4e39-a843-14458b2f659d'),
|
||||
('9311ff84-1052-4989-8192-0fea20204fbe', '62097027-979f-4b00-afb8-f70fba533f80'),
|
||||
('7d0697cd-868c-4b41-9f4f-f9a8c1131b24', '62097027-979f-4b00-afb8-f70fba533f80');
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
|
||||
SELECT sys_insert_task_priorities();
|
||||
SELECT sys_insert_project_access_levels();
|
||||
SELECT sys_insert_task_status_categories();
|
||||
SELECT sys_insert_project_statuses();
|
||||
SELECT sys_insert_project_healths();
|
||||
SELECT sys_insert_license_types();
|
||||
-- SELECT sys_insert_project_templates();
|
||||
|
||||
DROP FUNCTION sys_insert_task_priorities();
|
||||
DROP FUNCTION sys_insert_project_access_levels();
|
||||
DROP FUNCTION sys_insert_task_status_categories();
|
||||
DROP FUNCTION sys_insert_project_statuses();
|
||||
DROP FUNCTION sys_insert_project_healths();
|
||||
DROP FUNCTION sys_insert_license_types();
|
||||
-- DROP FUNCTION sys_insert_project_templates();
|
||||
|
||||
INSERT INTO timezones (name, abbrev, utc_offset)
|
||||
SELECT name, abbrev, utc_offset
|
||||
FROM pg_timezone_names;
|
||||
34
worklenz-backend/database/sql/3_views.sql
Normal file
34
worklenz-backend/database/sql/3_views.sql
Normal file
@@ -0,0 +1,34 @@
|
||||
CREATE OR REPLACE VIEW task_labels_view(name, task_id, label_id) AS
|
||||
SELECT (SELECT team_labels.name
|
||||
FROM team_labels
|
||||
WHERE team_labels.id = task_labels.label_id) AS name,
|
||||
task_labels.task_id,
|
||||
task_labels.label_id
|
||||
FROM task_labels;
|
||||
|
||||
CREATE OR REPLACE VIEW tasks_with_status_view(task_id, parent_task_id, is_todo, is_doing, is_done) AS
|
||||
SELECT tasks.id AS task_id,
|
||||
tasks.parent_task_id,
|
||||
stsc.is_todo,
|
||||
stsc.is_doing,
|
||||
stsc.is_done
|
||||
FROM tasks
|
||||
JOIN task_statuses ts ON tasks.status_id = ts.id
|
||||
JOIN sys_task_status_categories stsc ON ts.category_id = stsc.id
|
||||
WHERE tasks.archived IS FALSE;
|
||||
|
||||
CREATE OR REPLACE VIEW team_member_info_view(avatar_url, email, name, user_id, team_member_id, team_id, active) AS
|
||||
SELECT u.avatar_url,
|
||||
COALESCE(u.email, (SELECT email_invitations.email
|
||||
FROM email_invitations
|
||||
WHERE email_invitations.team_member_id = team_members.id)) AS email,
|
||||
COALESCE(u.name, (SELECT email_invitations.name
|
||||
FROM email_invitations
|
||||
WHERE email_invitations.team_member_id = team_members.id)) AS name,
|
||||
u.id AS user_id,
|
||||
team_members.id AS team_member_id,
|
||||
team_members.team_id,
|
||||
team_members.active
|
||||
FROM team_members
|
||||
LEFT JOIN users u ON team_members.user_id = u.id;
|
||||
|
||||
6150
worklenz-backend/database/sql/4_functions.sql
Normal file
6150
worklenz-backend/database/sql/4_functions.sql
Normal file
File diff suppressed because it is too large
Load Diff
31
worklenz-backend/database/sql/5_database_user.sql
Normal file
31
worklenz-backend/database/sql/5_database_user.sql
Normal file
@@ -0,0 +1,31 @@
|
||||
REVOKE CREATE ON SCHEMA public FROM PUBLIC;
|
||||
CREATE ROLE worklenz_client;
|
||||
|
||||
GRANT CONNECT ON DATABASE worklenz_db TO worklenz_client;
|
||||
GRANT INSERT, SELECT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO worklenz_client;
|
||||
|
||||
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON task_priorities FROM worklenz_client;
|
||||
GRANT SELECT ON task_priorities TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON project_access_levels FROM worklenz_client;
|
||||
GRANT SELECT ON project_access_levels TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON timezones FROM worklenz_client;
|
||||
GRANT SELECT ON timezones TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON worklenz_alerts FROM worklenz_client;
|
||||
GRANT SELECT ON worklenz_alerts TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON sys_task_status_categories FROM worklenz_client;
|
||||
GRANT SELECT ON sys_task_status_categories TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON sys_project_statuses FROM worklenz_client;
|
||||
GRANT SELECT ON sys_project_statuses TO worklenz_client;
|
||||
|
||||
REVOKE ALL PRIVILEGES ON sys_project_healths FROM worklenz_client;
|
||||
GRANT SELECT ON sys_project_healths TO worklenz_client;
|
||||
|
||||
CREATE USER worklenz_backend WITH PASSWORD 'n?&bb24=aWmnw+G@';
|
||||
GRANT worklenz_client TO worklenz_backend;
|
||||
134
worklenz-backend/database/sql/indexes.sql
Normal file
134
worklenz-backend/database/sql/indexes.sql
Normal file
@@ -0,0 +1,134 @@
|
||||
-- Indexes
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS permissions_name_uindex
|
||||
ON permissions (name);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS bounced_emails_email_uindex
|
||||
ON bounced_emails (email);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS clients_id_team_id_index
|
||||
ON clients (id, team_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS clients_name_team_id_uindex
|
||||
ON clients (name, team_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS cpt_phases_name_project_uindex
|
||||
ON cpt_phases (name, template_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS cpt_task_phase_cpt_task_phase_uindex
|
||||
ON cpt_task_phases (task_id, phase_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS cpt_task_phase_task_id_uindex
|
||||
ON cpt_task_phases (task_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS cpt_task_statuses_template_id_name_uindex
|
||||
ON cpt_task_statuses (template_id, name);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS custom_project_templates_name_team_id_uindex
|
||||
ON custom_project_templates (name, team_id);
|
||||
|
||||
-- Create index on expire field
|
||||
CREATE INDEX IF NOT EXISTS idx_pg_sessions_expire
|
||||
ON pg_sessions (expire);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS job_titles_name_team_id_uindex
|
||||
ON job_titles (name, team_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS job_titles_team_id_index
|
||||
ON job_titles (team_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_name_uindex
|
||||
ON licensing_admin_users (name);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_phone_no_uindex
|
||||
ON licensing_admin_users (phone_no);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_username_uindex
|
||||
ON licensing_admin_users (username);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS licensing_coupon_codes_coupon_code_uindex
|
||||
ON licensing_coupon_codes (coupon_code);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS licensing_coupon_codes_redeemed_by_index
|
||||
ON licensing_coupon_codes (redeemed_by);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS licensing_pricing_plans_uindex
|
||||
ON licensing_pricing_plans (id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS licensing_user_plans_uindex
|
||||
ON licensing_user_subscriptions (id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS licensing_user_subscriptions_user_id_index
|
||||
ON licensing_user_subscriptions (user_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS notification_settings_team_user_id_index
|
||||
ON notification_settings (team_id, user_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS personal_todo_list_index_uindex
|
||||
ON personal_todo_list (user_id, index);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_access_levels_key_uindex
|
||||
ON project_access_levels (key);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_access_levels_name_uindex
|
||||
ON project_access_levels (name);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_categories_name_team_id_uindex
|
||||
ON project_categories (name, team_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS project_comments_project_id_index
|
||||
ON project_comments (project_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_folders_team_id_key_uindex
|
||||
ON project_folders (team_id, key);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_folders_team_id_name_uindex
|
||||
ON project_folders (team_id, name);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS project_members_project_id_index
|
||||
ON project_members (project_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS project_members_project_id_member_id_index
|
||||
ON project_members (project_id, team_member_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS project_members_team_member_id_index
|
||||
ON project_members (team_member_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_members_team_member_project_uindex
|
||||
ON project_members (team_member_id, project_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_phases_name_project_uindex
|
||||
ON project_phases (name, project_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_subscribers_user_task_team_member_uindex
|
||||
ON project_subscribers (user_id, project_id, team_member_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS project_task_list_cols_index
|
||||
ON project_task_list_cols (project_id, index);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS project_task_list_cols_key_project_uindex
|
||||
ON project_task_list_cols (key, project_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS projects_folder_id_index
|
||||
ON projects (folder_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS projects_id_team_id_index
|
||||
ON projects (id, team_id);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS projects_key_team_id_uindex
|
||||
ON projects (key, team_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS projects_name_index
|
||||
ON projects (name);
|
||||
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS projects_name_team_id_uindex
|
||||
ON projects (name, team_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS projects_team_id_folder_id_index
|
||||
ON projects (team_id, folder_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS projects_team_id_index
|
||||
ON projects (team_id);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS projects_team_id_name_index
|
||||
ON projects (team_id, name);
|
||||
|
||||
47
worklenz-backend/database/sql/text_length_checks.sql
Normal file
47
worklenz-backend/database/sql/text_length_checks.sql
Normal file
@@ -0,0 +1,47 @@
|
||||
ALTER TABLE teams
|
||||
ADD CONSTRAINT teams_name_check CHECK (CHAR_LENGTH(name) <= 55);
|
||||
|
||||
ALTER TABLE clients
|
||||
ADD CONSTRAINT clients_name_check CHECK (CHAR_LENGTH(name) <= 60);
|
||||
|
||||
ALTER TABLE job_titles
|
||||
ADD CONSTRAINT job_titles_name_check CHECK (CHAR_LENGTH(name) <= 55);
|
||||
|
||||
ALTER TABLE users
|
||||
ADD CONSTRAINT users_name_check CHECK (CHAR_LENGTH(name) <= 55);
|
||||
|
||||
ALTER TABLE users
|
||||
ADD CONSTRAINT users_email_check CHECK (CHAR_LENGTH(email) <= 255);
|
||||
|
||||
ALTER TABLE projects
|
||||
ADD CONSTRAINT projects_name_check CHECK (CHAR_LENGTH(name) <= 100);
|
||||
|
||||
ALTER TABLE projects
|
||||
ADD CONSTRAINT projects_notes_check CHECK (CHAR_LENGTH(notes) <= 500);
|
||||
|
||||
ALTER TABLE task_statuses
|
||||
ADD CONSTRAINT task_statuses_name_check CHECK (CHAR_LENGTH(name) <= 50);
|
||||
|
||||
ALTER TABLE tasks
|
||||
ADD CONSTRAINT tasks_name_check CHECK (CHAR_LENGTH(name) <= 500);
|
||||
|
||||
ALTER TABLE tasks
|
||||
ADD CONSTRAINT tasks_description_check CHECK (CHAR_LENGTH(description) <= 500000);
|
||||
|
||||
ALTER TABLE team_labels
|
||||
ADD CONSTRAINT team_labels_name_check CHECK (CHAR_LENGTH(name) <= 40);
|
||||
|
||||
ALTER TABLE personal_todo_list
|
||||
ADD CONSTRAINT personal_todo_list_name_check CHECK (CHAR_LENGTH(name) <= 100);
|
||||
|
||||
ALTER TABLE personal_todo_list
|
||||
ADD CONSTRAINT personal_todo_list_description_check CHECK (CHAR_LENGTH(description) <= 200);
|
||||
|
||||
ALTER TABLE task_work_log
|
||||
ADD CONSTRAINT task_work_log_description_check CHECK (CHAR_LENGTH(description) <= 500);
|
||||
|
||||
ALTER TABLE task_comment_contents
|
||||
ADD CONSTRAINT task_comment_contents_name_check CHECK (CHAR_LENGTH(text_content) <= 500);
|
||||
|
||||
ALTER TABLE task_attachments
|
||||
ADD CONSTRAINT task_attachments_name_check CHECK (CHAR_LENGTH(name) <= 110);
|
||||
75
worklenz-backend/database/sql/truncate.sql
Normal file
75
worklenz-backend/database/sql/truncate.sql
Normal file
@@ -0,0 +1,75 @@
|
||||
TRUNCATE TABLE public.pg_sessions RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.email_invitations RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_labels RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.team_labels RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.tasks_assignees RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.project_members RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.project_access_levels RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.role_permissions RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.permissions RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.project_logs RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.personal_todo_list RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.user_notifications RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_work_log RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_comment_contents RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_comments RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.team_members RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.job_titles RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.roles RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_attachments RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.worklenz_alerts RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.favorite_projects RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.archived_projects RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.shared_projects RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_templates_tasks RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_templates RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.notification_settings RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_updates RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_timers RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.tasks RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_priorities RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.task_statuses RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.sys_task_status_categories RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.project_task_list_cols RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.projects RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.clients RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.teams, public.users RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.timezones RESTART IDENTITY CASCADE;
|
||||
|
||||
TRUNCATE TABLE public.sys_project_statuses RESTART IDENTITY CASCADE;
|
||||
2533
worklenz-backend/database/worklenz_db_revision_1.svg
Normal file
2533
worklenz-backend/database/worklenz_db_revision_1.svg
Normal file
File diff suppressed because it is too large
Load Diff
|
After Width: | Height: | Size: 265 KiB |
7392
worklenz-backend/database/worklenz_db_revision_2.svg
Normal file
7392
worklenz-backend/database/worklenz_db_revision_2.svg
Normal file
File diff suppressed because it is too large
Load Diff
|
After Width: | Height: | Size: 737 KiB |
1140
worklenz-backend/package-lock.json
generated
1140
worklenz-backend/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -15,6 +15,7 @@
|
||||
"tcs": "grunt build:tsc",
|
||||
"build": "grunt build",
|
||||
"watch": "grunt watch",
|
||||
"dev": "grunt dev",
|
||||
"es": "esbuild `find src -type f -name '*.ts'` --platform=node --minify=true --watch=true --target=esnext --format=cjs --tsconfig=tsconfig.prod.json --outdir=dist",
|
||||
"copy": "grunt copy",
|
||||
"sonar": "sonar-scanner -Dproject.settings=sonar-project-dev.properties",
|
||||
@@ -32,15 +33,18 @@
|
||||
"@aws-sdk/client-ses": "^3.378.0",
|
||||
"@aws-sdk/s3-request-presigner": "^3.378.0",
|
||||
"@aws-sdk/util-format-url": "^3.357.0",
|
||||
"@azure/storage-blob": "^12.27.0",
|
||||
"axios": "^1.6.0",
|
||||
"bcrypt": "^5.1.0",
|
||||
"bluebird": "^3.7.2",
|
||||
"chartjs-to-image": "^1.2.1",
|
||||
"compression": "^1.7.4",
|
||||
"connect-flash": "^0.1.1",
|
||||
"connect-pg-simple": "^7.0.0",
|
||||
"cookie-parser": "~1.4.4",
|
||||
"cors": "^2.8.5",
|
||||
"cron": "^2.4.0",
|
||||
"crypto-js": "^4.1.1",
|
||||
"csurf": "^1.11.0",
|
||||
"debug": "^4.3.4",
|
||||
"dotenv": "^16.3.1",
|
||||
@@ -60,13 +64,13 @@
|
||||
"moment-timezone": "^0.5.43",
|
||||
"morgan": "^1.10.0",
|
||||
"nanoid": "^3.3.6",
|
||||
"passport": "^0.5.3",
|
||||
"passport": "^0.7.0",
|
||||
"passport-google-oauth2": "^0.2.0",
|
||||
"passport-google-oauth20": "^2.0.0",
|
||||
"passport-local": "^1.0.0",
|
||||
"path": "^0.12.7",
|
||||
"pg": "^8.11.1",
|
||||
"pg-native": "^3.0.1",
|
||||
"pg": "^8.14.1",
|
||||
"pg-native": "^3.3.0",
|
||||
"pug": "^3.0.2",
|
||||
"redis": "^4.6.7",
|
||||
"sanitize-html": "^2.11.0",
|
||||
@@ -87,13 +91,12 @@
|
||||
"@types/connect-flash": "^0.0.37",
|
||||
"@types/cookie-parser": "^1.4.3",
|
||||
"@types/cron": "^2.0.1",
|
||||
"@types/crypto-js": "^4.2.2",
|
||||
"@types/csurf": "^1.11.2",
|
||||
"@types/express": "^4.17.17",
|
||||
"@types/express-brute": "^1.0.2",
|
||||
"@types/express-brute-redis": "^0.0.4",
|
||||
"@types/express-rate-limit": "^6.0.0",
|
||||
"@types/express-session": "^1.17.7",
|
||||
"@types/express-validator": "^3.0.0",
|
||||
"@types/fs-extra": "^9.0.13",
|
||||
"@types/hpp": "^0.2.2",
|
||||
"@types/http-errors": "^1.8.2",
|
||||
@@ -103,15 +106,13 @@
|
||||
"@types/mime-types": "^2.1.1",
|
||||
"@types/morgan": "^1.9.4",
|
||||
"@types/node": "^18.17.1",
|
||||
"@types/passport": "^1.0.12",
|
||||
"@types/passport-google-oauth20": "^2.0.11",
|
||||
"@types/passport-local": "^1.0.35",
|
||||
"@types/pg": "^8.10.2",
|
||||
"@types/passport": "^1.0.17",
|
||||
"@types/passport-google-oauth20": "^2.0.16",
|
||||
"@types/passport-local": "^1.0.38",
|
||||
"@types/pg": "^8.11.11",
|
||||
"@types/pug": "^2.0.6",
|
||||
"@types/redis": "^4.0.11",
|
||||
"@types/sanitize-html": "^2.9.0",
|
||||
"@types/sharp": "^0.31.1",
|
||||
"@types/socket.io": "^3.0.2",
|
||||
"@types/swagger-jsdoc": "^6.0.1",
|
||||
"@types/toobusy-js": "^0.5.2",
|
||||
"@types/uglify-js": "^3.17.1",
|
||||
@@ -126,7 +127,6 @@
|
||||
"eslint-plugin-security": "^1.7.1",
|
||||
"fs-extra": "^10.1.0",
|
||||
"grunt": "^1.6.1",
|
||||
"grunt-cli": "^1.4.3",
|
||||
"grunt-contrib-clean": "^2.0.1",
|
||||
"grunt-contrib-compress": "^2.0.0",
|
||||
"grunt-contrib-copy": "^1.0.0",
|
||||
@@ -134,6 +134,7 @@
|
||||
"grunt-contrib-watch": "^1.1.0",
|
||||
"grunt-shell": "^4.0.0",
|
||||
"grunt-sync": "^0.8.2",
|
||||
"highcharts": "^11.1.0",
|
||||
"jest": "^28.1.3",
|
||||
"jest-sonar-reporter": "^2.0.0",
|
||||
"ncp": "^2.0.0",
|
||||
|
||||
@@ -1 +1 @@
|
||||
901
|
||||
954
|
||||
@@ -1,5 +1,5 @@
|
||||
import createError from "http-errors";
|
||||
import express, {NextFunction, Request, Response} from "express";
|
||||
import express, { NextFunction, Request, Response } from "express";
|
||||
import path from "path";
|
||||
import cookieParser from "cookie-parser";
|
||||
import logger from "morgan";
|
||||
@@ -9,101 +9,181 @@ import passport from "passport";
|
||||
import csurf from "csurf";
|
||||
import rateLimit from "express-rate-limit";
|
||||
import cors from "cors";
|
||||
import uglify from "uglify-js";
|
||||
import flash from "connect-flash";
|
||||
import hpp from "hpp";
|
||||
|
||||
import passportConfig from "./passport";
|
||||
import indexRouter from "./routes/index";
|
||||
import apiRouter from "./routes/apis";
|
||||
import authRouter from "./routes/auth";
|
||||
import emailTemplatesRouter from "./routes/email-templates";
|
||||
|
||||
import public_router from "./routes/public";
|
||||
import {isInternalServer, isProduction} from "./shared/utils";
|
||||
import { isInternalServer, isProduction } from "./shared/utils";
|
||||
import sessionMiddleware from "./middlewares/session-middleware";
|
||||
import {send_to_slack} from "./shared/slack";
|
||||
import {CSP_POLICIES} from "./shared/csp";
|
||||
import safeControllerFunction from "./shared/safe-controller-function";
|
||||
import AwsSesController from "./controllers/aws-ses-controller";
|
||||
import { CSP_POLICIES } from "./shared/csp";
|
||||
|
||||
const app = express();
|
||||
|
||||
app.use(compression());
|
||||
app.use(helmet({crossOriginResourcePolicy: false, crossOriginEmbedderPolicy: false}));
|
||||
// Trust first proxy if behind reverse proxy
|
||||
app.set("trust proxy", 1);
|
||||
|
||||
// Basic middleware setup
|
||||
app.use(compression());
|
||||
app.use(logger("dev"));
|
||||
app.use(express.json({ limit: "50mb" }));
|
||||
app.use(express.urlencoded({ extended: false, limit: "50mb" }));
|
||||
app.use(cookieParser(process.env.COOKIE_SECRET));
|
||||
app.use(hpp());
|
||||
|
||||
// Helmet security headers
|
||||
app.use(helmet({
|
||||
crossOriginEmbedderPolicy: false,
|
||||
crossOriginResourcePolicy: false,
|
||||
}));
|
||||
|
||||
// Custom security headers
|
||||
app.use((_req: Request, res: Response, next: NextFunction) => {
|
||||
res.setHeader("X-XSS-Protection", "1; mode=block");
|
||||
res.removeHeader("server");
|
||||
res.setHeader("Content-Security-Policy", CSP_POLICIES);
|
||||
next();
|
||||
});
|
||||
|
||||
// CORS configuration
|
||||
const allowedOrigins = [
|
||||
isProduction()
|
||||
? [
|
||||
`http://localhost:5000`,
|
||||
`http://127.0.0.1:5000`,
|
||||
process.env.SERVER_CORS || "", // Add hostname from env
|
||||
process.env.FRONTEND_URL || "" // Support FRONTEND_URL as well
|
||||
].filter(Boolean) // Remove empty strings
|
||||
: [
|
||||
"http://localhost:3000",
|
||||
"http://localhost:5173",
|
||||
"http://127.0.0.1:5173",
|
||||
"http://127.0.0.1:3000",
|
||||
"http://127.0.0.1:5000",
|
||||
`http://localhost:5000`,
|
||||
process.env.SERVER_CORS || "", // Add hostname from env
|
||||
process.env.FRONTEND_URL || "" // Support FRONTEND_URL as well
|
||||
].filter(Boolean) // Remove empty strings
|
||||
].flat();
|
||||
|
||||
app.use(cors({
|
||||
origin: (origin, callback) => {
|
||||
if (!isProduction() || !origin || allowedOrigins.includes(origin)) {
|
||||
callback(null, true);
|
||||
} else {
|
||||
console.log("Blocked origin:", origin, process.env.NODE_ENV);
|
||||
callback(new Error("Not allowed by CORS"));
|
||||
}
|
||||
},
|
||||
credentials: true,
|
||||
methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS", "PATCH"],
|
||||
allowedHeaders: [
|
||||
"Origin",
|
||||
"X-Requested-With",
|
||||
"Content-Type",
|
||||
"Accept",
|
||||
"Authorization",
|
||||
"X-CSRF-Token"
|
||||
],
|
||||
exposedHeaders: ["Set-Cookie", "X-CSRF-Token"]
|
||||
}));
|
||||
|
||||
// Handle preflight requests
|
||||
app.options("*", cors());
|
||||
|
||||
// Session setup - must be before passport and CSRF
|
||||
app.use(sessionMiddleware);
|
||||
|
||||
// Passport initialization
|
||||
passportConfig(passport);
|
||||
app.use(passport.initialize());
|
||||
app.use(passport.session());
|
||||
|
||||
// Flash messages
|
||||
app.use(flash());
|
||||
|
||||
// Auth check middleware
|
||||
function isLoggedIn(req: Request, _res: Response, next: NextFunction) {
|
||||
return req.user ? next() : next(createError(401));
|
||||
}
|
||||
|
||||
passportConfig(passport);
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
require("pug").filters = {
|
||||
/**
|
||||
* ```pug
|
||||
* script
|
||||
* :minify_js
|
||||
* // JavaScript Syntax
|
||||
* ```
|
||||
* @param {String} text
|
||||
* @param {Object} options
|
||||
*/
|
||||
minify_js(text: string) {
|
||||
if (!text) return;
|
||||
// return text;
|
||||
return uglify.minify({"script.js": text}).code;
|
||||
}
|
||||
};
|
||||
|
||||
// view engine setup
|
||||
app.set("views", path.join(__dirname, "views"));
|
||||
app.set("view engine", "pug");
|
||||
|
||||
app.use(logger("dev"));
|
||||
app.use(express.json({limit: "50mb"}));
|
||||
app.use(express.urlencoded({extended: false, limit: "50mb"}));
|
||||
// Prevent HTTP Parameter Pollution
|
||||
app.use(hpp());
|
||||
app.use(cookieParser(process.env.COOKIE_SECRET));
|
||||
|
||||
app.use(cors({
|
||||
origin: [`https://${process.env.HOSTNAME}`],
|
||||
methods: "GET,PUT,POST,DELETE",
|
||||
preflightContinue: false,
|
||||
credentials: true
|
||||
}));
|
||||
|
||||
app.post("/-/csp", (req: express.Request, res: express.Response) => {
|
||||
send_to_slack({
|
||||
type: "⚠️ CSP Report",
|
||||
body: req.body
|
||||
});
|
||||
res.sendStatus(200);
|
||||
// CSRF configuration
|
||||
const csrfProtection = csurf({
|
||||
cookie: {
|
||||
key: "XSRF-TOKEN",
|
||||
path: "/",
|
||||
httpOnly: false,
|
||||
secure: isProduction(), // Only secure in production
|
||||
sameSite: isProduction() ? "none" : "lax", // Different settings for dev vs prod
|
||||
domain: isProduction() ? ".worklenz.com" : undefined // Only set domain in production
|
||||
},
|
||||
ignoreMethods: ["HEAD", "OPTIONS"]
|
||||
});
|
||||
|
||||
// Apply CSRF selectively (exclude webhooks and public routes)
|
||||
app.use((req, res, next) => {
|
||||
if (
|
||||
req.path.startsWith("/webhook/") ||
|
||||
req.path.startsWith("/secure/") ||
|
||||
req.path.startsWith("/api/") ||
|
||||
req.path.startsWith("/public/")
|
||||
) {
|
||||
next();
|
||||
} else {
|
||||
csrfProtection(req, res, next);
|
||||
}
|
||||
});
|
||||
|
||||
// Set CSRF token cookie
|
||||
app.use((req: Request, res: Response, next: NextFunction) => {
|
||||
if (req.csrfToken) {
|
||||
const token = req.csrfToken();
|
||||
res.cookie("XSRF-TOKEN", token, {
|
||||
httpOnly: false,
|
||||
secure: isProduction(),
|
||||
sameSite: isProduction() ? "none" : "lax",
|
||||
domain: isProduction() ? ".worklenz.com" : undefined,
|
||||
path: "/"
|
||||
});
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
// CSRF token refresh endpoint
|
||||
app.get("/csrf-token", (req: Request, res: Response) => {
|
||||
if (req.csrfToken) {
|
||||
const token = req.csrfToken();
|
||||
res.cookie("XSRF-TOKEN", token, {
|
||||
httpOnly: false,
|
||||
secure: isProduction(),
|
||||
sameSite: isProduction() ? "none" : "lax",
|
||||
domain: isProduction() ? ".worklenz.com" : undefined,
|
||||
path: "/"
|
||||
});
|
||||
res.status(200).json({ done: true, message: "CSRF token refreshed" });
|
||||
} else {
|
||||
res.status(500).json({ done: false, message: "Failed to generate CSRF token" });
|
||||
}
|
||||
});
|
||||
|
||||
// Webhook endpoints (no CSRF required)
|
||||
app.post("/webhook/emails/bounce", safeControllerFunction(AwsSesController.handleBounceResponse));
|
||||
app.post("/webhook/emails/complaints", safeControllerFunction(AwsSesController.handleComplaintResponse));
|
||||
app.post("/webhook/emails/reply", safeControllerFunction(AwsSesController.handleReplies));
|
||||
|
||||
app.use(flash());
|
||||
app.use(csurf({cookie: true}));
|
||||
|
||||
app.use((req: Request, res: Response, next: NextFunction) => {
|
||||
res.setHeader("Content-Security-Policy", CSP_POLICIES);
|
||||
const token = req.csrfToken();
|
||||
res.cookie("XSRF-TOKEN", token);
|
||||
res.locals.csrf = token;
|
||||
next();
|
||||
});
|
||||
|
||||
// Static file serving
|
||||
if (isProduction()) {
|
||||
app.use(express.static(path.join(__dirname, "build"), {
|
||||
maxAge: "1y",
|
||||
etag: false,
|
||||
}));
|
||||
|
||||
// Handle compressed files
|
||||
app.get("*.js", (req, res, next) => {
|
||||
if (req.header("Accept-Encoding")?.includes("br")) {
|
||||
req.url = `${req.url}.br`;
|
||||
@@ -116,61 +196,62 @@ if (isProduction()) {
|
||||
}
|
||||
next();
|
||||
});
|
||||
} else {
|
||||
app.use(express.static(path.join(__dirname, "public")));
|
||||
}
|
||||
|
||||
app.use(express.static(path.join(__dirname, "public")));
|
||||
app.set("trust proxy", 1);
|
||||
app.use(sessionMiddleware);
|
||||
|
||||
app.use(passport.initialize());
|
||||
app.use(passport.session());
|
||||
|
||||
// API rate limiting
|
||||
const apiLimiter = rateLimit({
|
||||
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||
max: 1500, // Limit each IP to 2000 requests per `window` (here, per 15 minutes)
|
||||
standardHeaders: false, // Return rate limit info in the `RateLimit-*` headers
|
||||
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
|
||||
});
|
||||
|
||||
app.use((req, res, next) => {
|
||||
const {send} = res;
|
||||
res.send = function (obj) {
|
||||
if (req.headers.accept?.includes("application/json"))
|
||||
return send.call(this, `)]}',\n${JSON.stringify(obj)}`);
|
||||
return send.call(this, obj);
|
||||
};
|
||||
next();
|
||||
windowMs: 15 * 60 * 1000,
|
||||
max: 1500,
|
||||
standardHeaders: false,
|
||||
legacyHeaders: false,
|
||||
});
|
||||
|
||||
// Routes
|
||||
app.use("/api/v1", apiLimiter, isLoggedIn, apiRouter);
|
||||
app.use("/secure", authRouter);
|
||||
app.use("/public", public_router);
|
||||
app.use("/api/v1", isLoggedIn, apiRouter);
|
||||
app.use("/", indexRouter);
|
||||
|
||||
if (isInternalServer())
|
||||
if (isInternalServer()) {
|
||||
app.use("/email-templates", emailTemplatesRouter);
|
||||
}
|
||||
|
||||
|
||||
// catch 404 and forward to error handler
|
||||
app.use((req: Request, res: Response) => {
|
||||
res.locals.error_title = "404 Not Found.";
|
||||
res.locals.error_message = `The requested URL ${req.url} was not found on this server.`;
|
||||
res.locals.error_image = "/assets/images/404.webp";
|
||||
res.status(400);
|
||||
res.render("error");
|
||||
// CSRF error handler
|
||||
app.use((err: any, req: Request, res: Response, next: NextFunction) => {
|
||||
if (err.code === "EBADCSRFTOKEN") {
|
||||
return res.status(403).json({
|
||||
done: false,
|
||||
message: "Invalid CSRF token",
|
||||
body: null
|
||||
});
|
||||
}
|
||||
next(err);
|
||||
});
|
||||
|
||||
// error handler
|
||||
app.use((err: { message: string; status: number; }, _req: Request, res: Response) => {
|
||||
// set locals, only providing error in development
|
||||
res.locals.error_title = "500 Internal Server Error.";
|
||||
res.locals.error_message = "Oops, something went wrong.";
|
||||
res.locals.error_message2 = "Try to refresh this page or feel free to contact us if the problem persists.";
|
||||
res.locals.error_image = "/assets/images/500.png";
|
||||
|
||||
// render the error page
|
||||
res.status(err.status || 500);
|
||||
res.render("error");
|
||||
// React app handling - serve index.html for all non-API routes
|
||||
app.get("*", (req: Request, res: Response, next: NextFunction) => {
|
||||
if (req.path.startsWith("/api/")) return next();
|
||||
res.sendFile(path.join(__dirname, isProduction() ? "build" : "public", "index.html"));
|
||||
});
|
||||
|
||||
export default app;
|
||||
// Global error handler
|
||||
app.use((err: any, _req: Request, res: Response, _next: NextFunction) => {
|
||||
const status = err.status || 500;
|
||||
|
||||
if (res.headersSent) {
|
||||
return;
|
||||
}
|
||||
|
||||
res.status(status);
|
||||
|
||||
// Send structured error response
|
||||
res.json({
|
||||
done: false,
|
||||
message: isProduction() ? "Internal Server Error" : err.message,
|
||||
body: null,
|
||||
...(process.env.NODE_ENV === "development" ? { stack: err.stack } : {})
|
||||
});
|
||||
});
|
||||
|
||||
export default app;
|
||||
@@ -95,8 +95,7 @@ function onListening() {
|
||||
? `pipe ${addr}`
|
||||
: `port ${addr.port}`;
|
||||
|
||||
startCronJobs();
|
||||
// TODO - uncomment initRedis()
|
||||
process.env.ENABLE_EMAIL_CRONJOBS === "true" && startCronJobs();
|
||||
// void initRedis();
|
||||
FileConstants.init();
|
||||
void DbTaskStatusChangeListener.connect();
|
||||
|
||||
@@ -5,8 +5,19 @@ import db from "../config/db";
|
||||
import {ServerResponse} from "../models/server-response";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import {getColor} from "../shared/utils";
|
||||
import {calculateMonthDays, getColor, log_error, megabytesToBytes} from "../shared/utils";
|
||||
import moment from "moment";
|
||||
import {calculateStorage} from "../shared/s3";
|
||||
import {checkTeamSubscriptionStatus, getActiveTeamMemberCount, getCurrentProjectsCount, getFreePlanSettings, getOwnerIdByTeam, getTeamMemberCount, getUsedStorage} from "../shared/paddle-utils";
|
||||
import {
|
||||
addModifier,
|
||||
cancelSubscription,
|
||||
changePlan,
|
||||
generatePayLinkRequest,
|
||||
pauseOrResumeSubscription,
|
||||
updateUsers
|
||||
} from "../shared/paddle-requests";
|
||||
import {statusExclude} from "../shared/constants";
|
||||
import {NotificationsService} from "../services/notifications/notifications.service";
|
||||
import {SocketEvents} from "../socket.io/events";
|
||||
import {IO} from "../shared/io";
|
||||
@@ -221,7 +232,11 @@ export default class AdminCenterController extends WorklenzControllerBase {
|
||||
FROM team_member_info_view
|
||||
WHERE team_member_info_view.team_member_id = tm.id),
|
||||
role_id,
|
||||
r.name AS role_name
|
||||
r.name AS role_name,
|
||||
EXISTS(SELECT email
|
||||
FROM email_invitations
|
||||
WHERE team_member_id = tm.id
|
||||
AND email_invitations.team_id = tm.team_id) AS pending_invitation
|
||||
FROM team_members tm
|
||||
LEFT JOIN users u on tm.user_id = u.id
|
||||
LEFT JOIN roles r on tm.role_id = r.id
|
||||
@@ -244,22 +259,411 @@ export default class AdminCenterController extends WorklenzControllerBase {
|
||||
const {id} = req.params;
|
||||
const {name, teamMembers} = req.body;
|
||||
|
||||
const updateNameQuery = `UPDATE teams
|
||||
SET name = $1
|
||||
WHERE id = $2;`;
|
||||
await db.query(updateNameQuery, [name, id]);
|
||||
try {
|
||||
// Update team name
|
||||
const updateNameQuery = `UPDATE teams SET name = $1 WHERE id = $2 RETURNING id;`;
|
||||
const nameResult = await db.query(updateNameQuery, [name, id]);
|
||||
|
||||
if (!nameResult.rows.length) {
|
||||
return res.status(404).send(new ServerResponse(false, null, "Team not found"));
|
||||
}
|
||||
|
||||
if (teamMembers.length) {
|
||||
teamMembers.forEach(async (element: { role_name: string; user_id: string; }) => {
|
||||
const q = `UPDATE team_members
|
||||
SET role_id = (SELECT id FROM roles WHERE roles.team_id = $1 AND name = $2)
|
||||
WHERE user_id = $3
|
||||
AND team_id = $1;`;
|
||||
await db.query(q, [id, element.role_name, element.user_id]);
|
||||
});
|
||||
// Update team member roles if provided
|
||||
if (teamMembers?.length) {
|
||||
// Use Promise.all to handle all role updates concurrently
|
||||
await Promise.all(teamMembers.map(async (member: { role_name: string; user_id: string; }) => {
|
||||
const roleQuery = `
|
||||
UPDATE team_members
|
||||
SET role_id = (SELECT id FROM roles WHERE roles.team_id = $1 AND name = $2)
|
||||
WHERE user_id = $3 AND team_id = $1
|
||||
RETURNING id;`;
|
||||
await db.query(roleQuery, [id, member.role_name, member.user_id]);
|
||||
}));
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, null, "Team updated successfully"));
|
||||
} catch (error) {
|
||||
log_error("Error updating team:", error);
|
||||
return res.status(500).send(new ServerResponse(false, null, "Failed to update team"));
|
||||
}
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getBillingInfo(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT get_billing_info($1) AS billing_info;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const validTillDate = moment(data.billing_info.trial_expire_date);
|
||||
|
||||
const daysDifference = validTillDate.diff(moment(), "days");
|
||||
const dateString = calculateMonthDays(moment().format("YYYY-MM-DD"), data.billing_info.trial_expire_date);
|
||||
|
||||
data.billing_info.expire_date_string = dateString;
|
||||
|
||||
if (daysDifference < 0) {
|
||||
data.billing_info.expire_date_string = `Your trial plan expired ${dateString} ago`;
|
||||
} else if (daysDifference === 0 && daysDifference < 7) {
|
||||
data.billing_info.expire_date_string = `Your trial plan expires today`;
|
||||
} else {
|
||||
data.billing_info.expire_date_string = `Your trial plan expires in ${dateString}.`;
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, [], "Team updated successfully"));
|
||||
if (data.billing_info.billing_type === "year") data.billing_info.unit_price_per_month = data.billing_info.unit_price / 12;
|
||||
|
||||
const teamMemberData = await getTeamMemberCount(req.user?.owner_id ?? "");
|
||||
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id ?? "");
|
||||
|
||||
data.billing_info.total_used = teamMemberData.user_count;
|
||||
data.billing_info.total_seats = subscriptionData.quantity;
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data.billing_info));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getBillingTransactions(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT subscription_payment_id,
|
||||
event_time::date,
|
||||
(next_bill_date::DATE - INTERVAL '1 day')::DATE AS next_bill_date,
|
||||
currency,
|
||||
receipt_url,
|
||||
payment_method,
|
||||
status,
|
||||
payment_status
|
||||
FROM licensing_payment_details
|
||||
WHERE user_id = $1
|
||||
ORDER BY created_at DESC;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getBillingCharges(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT (SELECT name FROM licensing_pricing_plans lpp WHERE id = lus.plan_id),
|
||||
unit_price::numeric,
|
||||
currency,
|
||||
status,
|
||||
quantity,
|
||||
unit_price::numeric * quantity AS amount,
|
||||
(SELECT event_time
|
||||
FROM licensing_payment_details lpd
|
||||
WHERE lpd.user_id = lus.user_id
|
||||
ORDER BY created_at DESC
|
||||
LIMIT 1)::DATE AS start_date,
|
||||
(next_bill_date::DATE - INTERVAL '1 day')::DATE AS end_date
|
||||
FROM licensing_user_subscriptions lus
|
||||
WHERE user_id = $1;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
|
||||
const countQ = `SELECT subscription_id
|
||||
FROM licensing_user_subscription_modifiers
|
||||
WHERE subscription_id = (SELECT subscription_id
|
||||
FROM licensing_user_subscriptions
|
||||
WHERE user_id = $1
|
||||
AND status != 'deleted'
|
||||
LIMIT 1)::INT;`;
|
||||
const countResult = await db.query(countQ, [req.user?.owner_id]);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, {plan_charges: result.rows, modifiers: countResult.rows}));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getBillingModifiers(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT created_at
|
||||
FROM licensing_user_subscription_modifiers
|
||||
WHERE subscription_id = (SELECT subscription_id
|
||||
FROM licensing_user_subscriptions
|
||||
WHERE user_id = $1
|
||||
AND status != 'deleted'
|
||||
LIMIT 1)::INT;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getBillingConfiguration(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT name,
|
||||
email,
|
||||
organization_name AS company_name,
|
||||
contact_number AS phone,
|
||||
address_line_1,
|
||||
address_line_2,
|
||||
city,
|
||||
state,
|
||||
postal_code,
|
||||
country
|
||||
FROM organizations
|
||||
LEFT JOIN users u ON organizations.user_id = u.id
|
||||
WHERE u.id = $1;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async updateBillingConfiguration(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const {company_name, phone, address_line_1, address_line_2, city, state, postal_code, country} = req.body;
|
||||
const q = `UPDATE organizations
|
||||
SET organization_name = $1,
|
||||
contact_number = $2,
|
||||
address_line_1 = $3,
|
||||
address_line_2 = $4,
|
||||
city = $5,
|
||||
state = $6,
|
||||
postal_code = $7,
|
||||
country = $8
|
||||
WHERE user_id = $9;`;
|
||||
const result = await db.query(q, [company_name, phone, address_line_1, address_line_2, city, state, postal_code, country, req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data, "Configuration Updated"));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async upgradePlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const {plan} = req.query;
|
||||
|
||||
const obj = await getTeamMemberCount(req.user?.owner_id ?? "");
|
||||
const axiosResponse = await generatePayLinkRequest(obj, plan as string, req.user?.owner_id, req.user?.id);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getPlans(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT
|
||||
ls.default_monthly_plan AS monthly_plan_id,
|
||||
lp_monthly.name AS monthly_plan_name,
|
||||
ls.default_annual_plan AS annual_plan_id,
|
||||
lp_monthly.recurring_price AS monthly_price,
|
||||
lp_annual.name AS annual_plan_name,
|
||||
lp_annual.recurring_price AS annual_price,
|
||||
ls.team_member_limit,
|
||||
ls.projects_limit,
|
||||
ls.free_tier_storage
|
||||
FROM
|
||||
licensing_settings ls
|
||||
JOIN
|
||||
licensing_pricing_plans lp_monthly ON ls.default_monthly_plan = lp_monthly.id
|
||||
JOIN
|
||||
licensing_pricing_plans lp_annual ON ls.default_annual_plan = lp_annual.id;`;
|
||||
const result = await db.query(q, []);
|
||||
const [data] = result.rows;
|
||||
|
||||
const obj = await getTeamMemberCount(req.user?.owner_id ?? "");
|
||||
|
||||
data.team_member_limit = data.team_member_limit === 0 ? "Unlimited" : data.team_member_limit;
|
||||
data.projects_limit = data.projects_limit === 0 ? "Unlimited" : data.projects_limit;
|
||||
data.free_tier_storage = `${data.free_tier_storage}MB`;
|
||||
data.current_user_count = obj.user_count;
|
||||
data.annual_price = (data.annual_price / 12).toFixed(2);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async purchaseStorage(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT subscription_id
|
||||
FROM licensing_user_subscriptions lus
|
||||
WHERE user_id = $1;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
await addModifier(data.subscription_id);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async changePlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const {plan} = req.query;
|
||||
|
||||
const q = `SELECT subscription_id
|
||||
FROM licensing_user_subscriptions lus
|
||||
WHERE user_id = $1;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const axiosResponse = await changePlan(plan as string, data.subscription_id);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async cancelPlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
if (!req.user?.owner_id) return res.status(200).send(new ServerResponse(false, "Invalid Request."));
|
||||
|
||||
const q = `SELECT subscription_id
|
||||
FROM licensing_user_subscriptions lus
|
||||
WHERE user_id = $1;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const axiosResponse = await cancelSubscription(data.subscription_id, req.user?.owner_id);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async pauseSubscription(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
if (!req.user?.owner_id) return res.status(200).send(new ServerResponse(false, "Invalid Request."));
|
||||
|
||||
const q = `SELECT subscription_id
|
||||
FROM licensing_user_subscriptions lus
|
||||
WHERE user_id = $1;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const axiosResponse = await pauseOrResumeSubscription(data.subscription_id, req.user?.owner_id, true);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async resumeSubscription(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
if (!req.user?.owner_id) return res.status(200).send(new ServerResponse(false, "Invalid Request."));
|
||||
|
||||
const q = `SELECT subscription_id
|
||||
FROM licensing_user_subscriptions lus
|
||||
WHERE user_id = $1;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const axiosResponse = await pauseOrResumeSubscription(data.subscription_id, req.user?.owner_id, false);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getBillingStorageInfo(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT trial_in_progress,
|
||||
trial_expire_date,
|
||||
ud.storage,
|
||||
(SELECT name AS plan_name FROM licensing_pricing_plans WHERE id = lus.plan_id),
|
||||
(SELECT default_trial_storage FROM licensing_settings),
|
||||
(SELECT storage_addon_size FROM licensing_settings),
|
||||
(SELECT storage_addon_price FROM licensing_settings)
|
||||
FROM organizations ud
|
||||
LEFT JOIN users u ON ud.user_id = u.id
|
||||
LEFT JOIN licensing_user_subscriptions lus ON u.id = lus.user_id
|
||||
WHERE ud.user_id = $1;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getAccountStorage(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const teamsQ = `SELECT id
|
||||
FROM teams
|
||||
WHERE user_id = $1;`;
|
||||
const teamsResponse = await db.query(teamsQ, [req.user?.owner_id]);
|
||||
|
||||
const storageQ = `SELECT storage
|
||||
FROM organizations
|
||||
WHERE user_id = $1;`;
|
||||
const result = await db.query(storageQ, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const storage: any = {};
|
||||
storage.used = 0;
|
||||
storage.total = data.storage;
|
||||
|
||||
for (const team of teamsResponse.rows) {
|
||||
storage.used += await calculateStorage(team.id);
|
||||
}
|
||||
|
||||
storage.remaining = (storage.total * 1024 * 1024 * 1024) - storage.used;
|
||||
storage.used_percent = Math.ceil((storage.used / (storage.total * 1024 * 1024 * 1024)) * 10000) / 100;
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, storage));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getCountries(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT id, name, code
|
||||
FROM countries
|
||||
ORDER BY name;`;
|
||||
const result = await db.query(q, []);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, result.rows || []));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async switchToFreePlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { id: teamId } = req.params;
|
||||
|
||||
const limits = await getFreePlanSettings();
|
||||
const ownerId = await getOwnerIdByTeam(teamId);
|
||||
|
||||
if (limits && ownerId) {
|
||||
if (parseInt(limits.team_member_limit) !== 0) {
|
||||
const teamMemberCount = await getTeamMemberCount(ownerId);
|
||||
if (parseInt(teamMemberCount) > parseInt(limits.team_member_limit)) {
|
||||
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${limits.team_member_limit} members.`));
|
||||
}
|
||||
}
|
||||
|
||||
const projectsCount = await getCurrentProjectsCount(ownerId);
|
||||
if (parseInt(projectsCount) > parseInt(limits.projects_limit)) {
|
||||
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${limits.projects_limit} projects.`));
|
||||
}
|
||||
|
||||
const usedStorage = await getUsedStorage(ownerId);
|
||||
if (parseInt(usedStorage) > megabytesToBytes(parseInt(limits.free_tier_storage))) {
|
||||
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot exceed ${limits.free_tier_storage}MB of storage.`));
|
||||
}
|
||||
|
||||
const update_q = `UPDATE organizations
|
||||
SET license_type_id = (SELECT id FROM sys_license_types WHERE key = 'FREE'),
|
||||
trial_in_progress = FALSE,
|
||||
subscription_status = 'free',
|
||||
storage = (SELECT free_tier_storage FROM licensing_settings)
|
||||
WHERE user_id = $1;`;
|
||||
await db.query(update_q, [ownerId]);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, [], "Your plan has been successfully switched to the Free Plan."));
|
||||
}
|
||||
return res.status(200).send(new ServerResponse(false, [], "Failed to switch to the Free Plan. Please try again later."));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async redeem(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { code } = req.body;
|
||||
|
||||
const q = `SELECT * FROM licensing_coupon_codes WHERE coupon_code = $1 AND is_redeemed IS FALSE AND is_refunded IS FALSE;`;
|
||||
const result = await db.query(q, [code]);
|
||||
const [data] = result.rows;
|
||||
|
||||
if (!result.rows.length)
|
||||
return res.status(200).send(new ServerResponse(false, [], "Redeem Code verification Failed! Please try again."));
|
||||
|
||||
const checkQ = `SELECT sum(team_members_limit) AS team_member_total FROM licensing_coupon_codes WHERE redeemed_by = $1 AND is_redeemed IS TRUE;`;
|
||||
const checkResult = await db.query(checkQ, [req.user?.owner_id]);
|
||||
const [total] = checkResult.rows;
|
||||
|
||||
if (parseInt(total.team_member_total) > 50)
|
||||
return res.status(200).send(new ServerResponse(false, [], "Maximum number of codes redeemed!"));
|
||||
|
||||
const updateQ = `UPDATE licensing_coupon_codes
|
||||
SET is_redeemed = TRUE, redeemed_at = CURRENT_TIMESTAMP,
|
||||
redeemed_by = $1
|
||||
WHERE id = $2;`;
|
||||
await db.query(updateQ, [req.user?.owner_id, data.id]);
|
||||
|
||||
const updateQ2 = `UPDATE organizations
|
||||
SET subscription_status = 'life_time_deal',
|
||||
trial_in_progress = FALSE,
|
||||
storage = (SELECT sum(storage_limit) FROM licensing_coupon_codes WHERE redeemed_by = $1),
|
||||
license_type_id = (SELECT id FROM sys_license_types WHERE key = 'LIFE_TIME_DEAL')
|
||||
WHERE user_id = $1;`;
|
||||
await db.query(updateQ2, [req.user?.owner_id]);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, [], "Code redeemed successfully!"));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
@@ -284,6 +688,11 @@ export default class AdminCenterController extends WorklenzControllerBase {
|
||||
|
||||
if (!id || !teamId) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
|
||||
|
||||
// check subscription status
|
||||
const subscriptionData = await checkTeamSubscriptionStatus(teamId);
|
||||
if (statusExclude.includes(subscriptionData.subscription_status)) {
|
||||
return res.status(200).send(new ServerResponse(false, "Please check your subscription status."));
|
||||
}
|
||||
|
||||
const q = `SELECT remove_team_member($1, $2, $3) AS member;`;
|
||||
const result = await db.query(q, [id, req.user?.id, teamId]);
|
||||
@@ -291,6 +700,22 @@ export default class AdminCenterController extends WorklenzControllerBase {
|
||||
|
||||
const message = `You have been removed from <b>${req.user?.team_name}</b> by <b>${req.user?.name}</b>`;
|
||||
|
||||
// if (subscriptionData.status === "trialing") break;
|
||||
if (!subscriptionData.is_credit && !subscriptionData.is_custom) {
|
||||
if (subscriptionData.subscription_status === "active" && subscriptionData.quantity > 0) {
|
||||
|
||||
const obj = await getActiveTeamMemberCount(req.user?.owner_id ?? "");
|
||||
|
||||
const userActiveInOtherTeams = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, req.query?.email as string);
|
||||
|
||||
if (!userActiveInOtherTeams) {
|
||||
const response = await updateUsers(subscriptionData.subscription_id, obj.user_count);
|
||||
if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
NotificationsService.sendNotification({
|
||||
receiver_socket_id: data.socket_id,
|
||||
message,
|
||||
@@ -305,5 +730,49 @@ export default class AdminCenterController extends WorklenzControllerBase {
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getFreePlanLimits(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const limits = await getFreePlanSettings();
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, limits || {}));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getOrganizationProjects(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { searchQuery, size, offset } = this.toPaginationOptions(req.query, ["p.name"]);
|
||||
|
||||
const countQ = `SELECT COUNT(*) AS total
|
||||
FROM projects p
|
||||
JOIN teams t ON p.team_id = t.id
|
||||
JOIN organizations o ON t.organization_id = o.id
|
||||
WHERE o.user_id = $1;`;
|
||||
const countResult = await db.query(countQ, [req.user?.owner_id]);
|
||||
|
||||
// Query to get the project data
|
||||
const dataQ = `SELECT p.id,
|
||||
p.name,
|
||||
t.name AS team_name,
|
||||
p.created_at,
|
||||
pm.member_count
|
||||
FROM projects p
|
||||
JOIN teams t ON p.team_id = t.id
|
||||
JOIN organizations o ON t.organization_id = o.id
|
||||
LEFT JOIN (
|
||||
SELECT project_id, COUNT(*) AS member_count
|
||||
FROM project_members
|
||||
GROUP BY project_id
|
||||
) pm ON p.id = pm.project_id
|
||||
WHERE o.user_id = $1 ${searchQuery}
|
||||
ORDER BY p.name
|
||||
OFFSET $2 LIMIT $3;`;
|
||||
|
||||
const result = await db.query(dataQ, [req.user?.owner_id, offset, size]);
|
||||
|
||||
const response = {
|
||||
total: countResult.rows[0]?.total ?? 0,
|
||||
data: result.rows ?? []
|
||||
};
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, response));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,8 @@ import { IWorkLenzRequest } from "../interfaces/worklenz-request";
|
||||
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
|
||||
|
||||
import db from "../config/db";
|
||||
import { humanFileSize, log_error, smallId } from "../shared/utils";
|
||||
import { humanFileSize, smallId } from "../shared/utils";
|
||||
import { getStorageUrl } from "../shared/constants";
|
||||
import { ServerResponse } from "../models/server-response";
|
||||
import {
|
||||
createPresignedUrlWithClient,
|
||||
@@ -12,16 +13,10 @@ import {
|
||||
getRootDir,
|
||||
uploadBase64,
|
||||
uploadBuffer
|
||||
} from "../shared/s3";
|
||||
} from "../shared/storage";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
|
||||
const {S3_URL} = process.env;
|
||||
|
||||
if (!S3_URL) {
|
||||
log_error("Invalid S3_URL. Please check .env file.");
|
||||
}
|
||||
|
||||
export default class AttachmentController extends WorklenzControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
@@ -42,7 +37,7 @@ export default class AttachmentController extends WorklenzControllerBase {
|
||||
req.user?.id,
|
||||
size,
|
||||
type,
|
||||
`${S3_URL}/${getRootDir()}`
|
||||
`${getStorageUrl()}/${getRootDir()}`
|
||||
]);
|
||||
const [data] = result.rows;
|
||||
|
||||
@@ -86,7 +81,7 @@ export default class AttachmentController extends WorklenzControllerBase {
|
||||
FROM task_attachments
|
||||
WHERE task_id = $1;
|
||||
`;
|
||||
const result = await db.query(q, [req.params.id, `${S3_URL}/${getRootDir()}`]);
|
||||
const result = await db.query(q, [req.params.id, `${getStorageUrl()}/${getRootDir()}`]);
|
||||
|
||||
for (const item of result.rows)
|
||||
item.size = humanFileSize(item.size);
|
||||
@@ -121,7 +116,7 @@ export default class AttachmentController extends WorklenzControllerBase {
|
||||
LEFT JOIN tasks t ON task_attachments.task_id = t.id
|
||||
WHERE task_attachments.project_id = $1) rec;
|
||||
`;
|
||||
const result = await db.query(q, [req.params.id, `${S3_URL}/${getRootDir()}`, size, offset]);
|
||||
const result = await db.query(q, [req.params.id, `${getStorageUrl()}/${getRootDir()}`, size, offset]);
|
||||
const [data] = result.rows;
|
||||
|
||||
for (const item of data?.attachments.data || [])
|
||||
@@ -135,26 +130,29 @@ export default class AttachmentController extends WorklenzControllerBase {
|
||||
const q = `DELETE
|
||||
FROM task_attachments
|
||||
WHERE id = $1
|
||||
RETURNING CONCAT($2::TEXT, '/', team_id, '/', project_id, '/', id, '.', type) AS key;`;
|
||||
const result = await db.query(q, [req.params.id, getRootDir()]);
|
||||
RETURNING team_id, project_id, id, type;`;
|
||||
const result = await db.query(q, [req.params.id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
if (data?.key)
|
||||
void deleteObject(data.key);
|
||||
if (data) {
|
||||
const key = getKey(data.team_id, data.project_id, data.id, data.type);
|
||||
void deleteObject(key);
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async download(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT CONCAT($2::TEXT, '/', team_id, '/', project_id, '/', id, '.', type) AS key
|
||||
const q = `SELECT team_id, project_id, id, type
|
||||
FROM task_attachments
|
||||
WHERE id = $1;`;
|
||||
const result = await db.query(q, [req.query.id, getRootDir()]);
|
||||
const result = await db.query(q, [req.query.id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
if (data?.key) {
|
||||
const url = await createPresignedUrlWithClient(data.key, req.query.file as string);
|
||||
if (data) {
|
||||
const key = getKey(data.team_id, data.project_id, data.id, data.type);
|
||||
const url = await createPresignedUrlWithClient(key, req.query.file as string);
|
||||
return res.status(200).send(new ServerResponse(true, url));
|
||||
}
|
||||
|
||||
|
||||
@@ -12,6 +12,9 @@ import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import {PasswordStrengthChecker} from "../shared/password-strength-check";
|
||||
import FileConstants from "../shared/file-constants";
|
||||
import axios from "axios";
|
||||
import {log_error} from "../shared/utils";
|
||||
import {DEFAULT_ERROR_MESSAGE} from "../shared/constants";
|
||||
|
||||
export default class AuthController extends WorklenzControllerBase {
|
||||
/** This just send ok response to the client when the request came here through the sign-up-validator */
|
||||
@@ -42,11 +45,20 @@ export default class AuthController extends WorklenzControllerBase {
|
||||
}
|
||||
|
||||
public static logout(req: IWorkLenzRequest, res: IWorkLenzResponse) {
|
||||
req.logout(() => true);
|
||||
req.session.destroy(() => {
|
||||
res.redirect("/");
|
||||
req.logout((err) => {
|
||||
if (err) {
|
||||
console.error("Logout error:", err);
|
||||
return res.status(500).send(new AuthResponse(null, true, {}, "Logout failed", null));
|
||||
}
|
||||
|
||||
req.session.destroy((destroyErr) => {
|
||||
if (destroyErr) {
|
||||
console.error("Session destroy error:", destroyErr);
|
||||
}
|
||||
res.status(200).send(new AuthResponse(null, req.isAuthenticated(), {}, null, null));
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static async destroyOtherSessions(userId: string, sessionId: string) {
|
||||
try {
|
||||
@@ -138,4 +150,25 @@ export default class AuthController extends WorklenzControllerBase {
|
||||
}
|
||||
return res.status(200).send(new ServerResponse(false, null, "Invalid Request. Please try again."));
|
||||
}
|
||||
|
||||
@HandleExceptions({logWithError: "body"})
|
||||
public static async verifyCaptcha(req: IWorkLenzRequest, res: IWorkLenzResponse) {
|
||||
const {token} = req.body;
|
||||
const secretKey = process.env.GOOGLE_CAPTCHA_SECRET_KEY;
|
||||
try {
|
||||
const response = await axios.post(
|
||||
`https://www.google.com/recaptcha/api/siteverify?secret=${secretKey}&response=${token}`
|
||||
);
|
||||
|
||||
const {success, score} = response.data;
|
||||
|
||||
if (success && score > 0.5) {
|
||||
return res.status(200).send(new ServerResponse(true, null, null));
|
||||
}
|
||||
return res.status(400).send(new ServerResponse(false, null, "Please try again later.").withTitle("Error"));
|
||||
} catch (error) {
|
||||
log_error(error);
|
||||
res.status(500).send(new ServerResponse(false, null, DEFAULT_ERROR_MESSAGE));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
288
worklenz-backend/src/controllers/billing-controller.ts
Normal file
288
worklenz-backend/src/controllers/billing-controller.ts
Normal file
@@ -0,0 +1,288 @@
|
||||
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
|
||||
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
|
||||
|
||||
import db from "../config/db";
|
||||
import { ServerResponse } from "../models/server-response";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import { getTeamMemberCount } from "../shared/paddle-utils";
|
||||
import { generatePayLinkRequest, updateUsers } from "../shared/paddle-requests";
|
||||
|
||||
import CryptoJS from "crypto-js";
|
||||
import moment from "moment";
|
||||
import axios from "axios";
|
||||
|
||||
import crypto from "crypto";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { log_error } from "../shared/utils";
|
||||
import { sendEmail } from "../shared/email";
|
||||
|
||||
export default class BillingController extends WorklenzControllerBase {
|
||||
public static async getInitialCharge(count: number) {
|
||||
if (!count) throw new Error("No selected plan detected.");
|
||||
|
||||
const baseRate = 4990;
|
||||
const firstTier = 15;
|
||||
const secondTierEnd = 200;
|
||||
|
||||
if (count <= firstTier) {
|
||||
return baseRate;
|
||||
} else if (count <= secondTierEnd) {
|
||||
return baseRate + (count - firstTier) * 300;
|
||||
}
|
||||
return baseRate + (secondTierEnd - firstTier) * 300 + (count - secondTierEnd) * 200;
|
||||
|
||||
}
|
||||
|
||||
public static async getBillingMonth() {
|
||||
const startDate = moment().format("YYYYMMDD");
|
||||
const endDate = moment().add(1, "month").subtract(1, "day").format("YYYYMMDD");
|
||||
|
||||
return `${startDate} - ${endDate}`;
|
||||
}
|
||||
|
||||
public static async chargeInitialPayment(signature: string, data: any) {
|
||||
const config = {
|
||||
method: "post",
|
||||
maxBodyLength: Infinity,
|
||||
url: process.env.DP_URL,
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
"Signature": signature,
|
||||
"x-api-key": process.env.DP_API_KEY
|
||||
},
|
||||
data
|
||||
};
|
||||
|
||||
axios.request(config)
|
||||
.then((response) => {
|
||||
console.log(JSON.stringify(response.data));
|
||||
|
||||
})
|
||||
.catch((error) => {
|
||||
console.log(error);
|
||||
});
|
||||
}
|
||||
|
||||
public static async saveLocalTransaction(signature: string, data: any) {
|
||||
try {
|
||||
const q = `INSERT INTO transactions (status, transaction_id, transaction_status, description, date_time, reference, amount, card_number)
|
||||
VALUES ($1, $2, $3);`;
|
||||
const result = await db.query(q, []);
|
||||
} catch (error) {
|
||||
log_error(error);
|
||||
}
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async upgradeToPaidPlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { plan, seatCount } = req.query;
|
||||
|
||||
const teamMemberData = await getTeamMemberCount(req.user?.owner_id ?? "");
|
||||
teamMemberData.user_count = seatCount as string;
|
||||
const axiosResponse = await generatePayLinkRequest(teamMemberData, plan as string, req.user?.owner_id, req.user?.id);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async addMoreSeats(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { seatCount } = req.body;
|
||||
|
||||
const q = `SELECT subscription_id
|
||||
FROM licensing_user_subscriptions lus
|
||||
WHERE user_id = $1;`;
|
||||
const result = await db.query(q, [req.user?.owner_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const response = await updateUsers(data.subscription_id, seatCount);
|
||||
|
||||
if (!response.body.subscription_id) {
|
||||
return res.status(200).send(new ServerResponse(false, null, response.message || "Please check your subscription."));
|
||||
}
|
||||
return res.status(200).send(new ServerResponse(true, null, "Your purchase has been successfully completed!").withTitle("Done"));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getDirectPayObject(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { seatCount } = req.query;
|
||||
if (!seatCount) return res.status(200).send(new ServerResponse(false, null));
|
||||
const email = req.user?.email;
|
||||
const name = req.user?.name;
|
||||
const amount = await this.getInitialCharge(parseInt(seatCount as string));
|
||||
const uniqueTimestamp = moment().format("YYYYMMDDHHmmss");
|
||||
const billingMonth = await this.getBillingMonth();
|
||||
|
||||
const { DP_MERCHANT_ID, DP_SECRET_KEY, DP_STAGE } = process.env;
|
||||
|
||||
const payload = {
|
||||
merchant_id: DP_MERCHANT_ID,
|
||||
amount: 10,
|
||||
type: "RECURRING",
|
||||
order_id: `WORKLENZ_${email}_${uniqueTimestamp}`,
|
||||
currency: "LKR",
|
||||
return_url: null,
|
||||
response_url: null,
|
||||
first_name: name,
|
||||
last_name: null,
|
||||
phone: null,
|
||||
email,
|
||||
description: `${name} (${email})`,
|
||||
page_type: "IN_APP",
|
||||
logo: "https://app.worklenz.com/assets/icons/icon-96x96.png",
|
||||
start_date: moment().format("YYYY-MM-DD"),
|
||||
do_initial_payment: 1,
|
||||
interval: 1,
|
||||
};
|
||||
|
||||
const encodePayload = CryptoJS.enc.Base64.stringify(CryptoJS.enc.Utf8.parse(JSON.stringify(payload)));
|
||||
const signature = CryptoJS.HmacSHA256(encodePayload, DP_SECRET_KEY as string);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, { signature: signature.toString(CryptoJS.enc.Hex), dataString: encodePayload, stage: DP_STAGE }));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async saveTransactionData(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { status, card, transaction, seatCount } = req.body;
|
||||
const { DP_MERCHANT_ID, DP_STAGE } = process.env;
|
||||
|
||||
const email = req.user?.email;
|
||||
|
||||
const amount = await this.getInitialCharge(parseInt(seatCount as string));
|
||||
const uniqueTimestamp = moment().format("YYYYMMDDHHmmss");
|
||||
const billingMonth = await this.getBillingMonth();
|
||||
|
||||
const values = [
|
||||
status,
|
||||
card?.id,
|
||||
card?.number,
|
||||
card?.brand,
|
||||
card?.type,
|
||||
card?.issuer,
|
||||
card?.expiry?.year,
|
||||
card?.expiry?.month,
|
||||
card?.walletId,
|
||||
transaction?.id,
|
||||
transaction?.status,
|
||||
transaction?.amount || 0,
|
||||
transaction?.currency || null,
|
||||
transaction?.channel || null,
|
||||
transaction?.dateTime || null,
|
||||
transaction?.message || null,
|
||||
transaction?.description || null,
|
||||
req.user?.id,
|
||||
req.user?.owner_id,
|
||||
];
|
||||
|
||||
const q = `INSERT INTO licensing_lkr_payments (
|
||||
status, card_id, card_number, card_brand, card_type, card_issuer,
|
||||
card_expiry_year, card_expiry_month, wallet_id,
|
||||
transaction_id, transaction_status, transaction_amount,
|
||||
transaction_currency, transaction_channel, transaction_datetime,
|
||||
transaction_message, transaction_description, user_id, owner_id
|
||||
)
|
||||
VALUES (
|
||||
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19
|
||||
);`;
|
||||
await db.query(q, values);
|
||||
|
||||
if (transaction.status === "SUCCESS") {
|
||||
const payload = {
|
||||
"merchantId": DP_MERCHANT_ID,
|
||||
"reference": `WORKLENZ_${email}_${uniqueTimestamp}`,
|
||||
"type": "CARD_PAY",
|
||||
"cardId": card.id,
|
||||
"refCode": req.user?.id,
|
||||
amount,
|
||||
"currency": "LKR"
|
||||
};
|
||||
const dataString = Object.values(payload).join("");
|
||||
const { DP_STAGE } = process.env;
|
||||
|
||||
const pemFile = DP_STAGE === "PROD" ? "src/keys/PRIVATE_KEY_PROD.pem" : `src/keys/PRIVATE_KEY_DEV.pem`;
|
||||
|
||||
const privateKeyTest = fs.readFileSync(path.resolve(pemFile), "utf8");
|
||||
const sign = crypto.createSign("SHA256");
|
||||
sign.update(dataString);
|
||||
sign.end();
|
||||
|
||||
const signature = sign.sign(privateKeyTest);
|
||||
const byteArray = new Uint8Array(signature);
|
||||
let byteString = "";
|
||||
for (let i = 0; i < byteArray.byteLength; i++) {
|
||||
byteString += String.fromCharCode(byteArray[i]);
|
||||
}
|
||||
const base64Signature = btoa(byteString);
|
||||
|
||||
this.chargeInitialPayment(base64Signature, payload);
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, null, "Your purchase has been successfully completed!").withTitle("Done"));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getCardList(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const payload = {
|
||||
"merchantId": "RT02300",
|
||||
"reference": "1234",
|
||||
"type": "LIST_CARD"
|
||||
};
|
||||
|
||||
const { DP_STAGE } = process.env;
|
||||
|
||||
const dataString = `RT023001234LIST_CARD`;
|
||||
const pemFile = DP_STAGE === "PROD" ? "src/keys/PRIVATE_KEY_PROD.pem" : `src/keys/PRIVATE_KEY_DEV.pem`;
|
||||
|
||||
const privateKeyTest = fs.readFileSync(path.resolve(pemFile), "utf8");
|
||||
const sign = crypto.createSign("SHA256");
|
||||
sign.update(dataString);
|
||||
sign.end();
|
||||
|
||||
const signature = sign.sign(privateKeyTest);
|
||||
const byteArray = new Uint8Array(signature);
|
||||
let byteString = "";
|
||||
for (let i = 0; i < byteArray.byteLength; i++) {
|
||||
byteString += String.fromCharCode(byteArray[i]);
|
||||
}
|
||||
const base64Signature = btoa(byteString);
|
||||
// const signature = CryptoJS.HmacSHA256(dataString, DP_SECRET_KEY as string).toString(CryptoJS.enc.Hex);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, { signature: base64Signature, dataString }));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async contactUs(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { contactNo } = req.query;
|
||||
|
||||
if (!contactNo) {
|
||||
return res.status(200).send(new ServerResponse(false, null, "Contact number is required!"));
|
||||
}
|
||||
|
||||
const html = `<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Worklenz Local Billing - Contact Information</title>
|
||||
</head>
|
||||
<body>
|
||||
<div>
|
||||
<h1 style="text-align: center; margin-bottom: 20px;">Worklenz Local Billing - Contact Information</h1>
|
||||
<p><strong>Name:</strong> ${req.user?.name}</p>
|
||||
<p><strong>Contact No:</strong> ${contactNo as string}</p>
|
||||
<p><strong>Email:</strong> ${req.user?.email}</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>`;
|
||||
const to = [process.env.CONTACT_US_EMAIL || "chamika@ceydigital.com"];
|
||||
|
||||
sendEmail({
|
||||
to,
|
||||
subject: "Worklenz - Local billing contact.",
|
||||
html
|
||||
});
|
||||
return res.status(200).send(new ServerResponse(true, null, "Your contact information has been sent successfully."));
|
||||
}
|
||||
|
||||
}
|
||||
@@ -12,7 +12,7 @@ export default class ClientsController extends WorklenzControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
public static async create(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `INSERT INTO clients (name, team_id) VALUES ($1, $2);`;
|
||||
const q = `INSERT INTO clients (name, team_id) VALUES ($1, $2) RETURNING id, name;`;
|
||||
const result = await db.query(q, [req.body.name, req.user?.team_id || null]);
|
||||
const [data] = result.rows;
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
|
||||
531
worklenz-backend/src/controllers/custom-columns-controller.ts
Normal file
531
worklenz-backend/src/controllers/custom-columns-controller.ts
Normal file
@@ -0,0 +1,531 @@
|
||||
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
|
||||
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
|
||||
|
||||
import db from "../config/db";
|
||||
import { ServerResponse } from "../models/server-response";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
|
||||
export default class CustomcolumnsController extends WorklenzControllerBase {
|
||||
@HandleExceptions()
|
||||
public static async create(
|
||||
req: IWorkLenzRequest,
|
||||
res: IWorkLenzResponse
|
||||
): Promise<IWorkLenzResponse> {
|
||||
const {
|
||||
project_id,
|
||||
name,
|
||||
key,
|
||||
field_type,
|
||||
width = 150,
|
||||
is_visible = true,
|
||||
configuration,
|
||||
} = req.body;
|
||||
|
||||
// Start a transaction since we're inserting into multiple tables
|
||||
const client = await db.pool.connect();
|
||||
|
||||
try {
|
||||
await client.query("BEGIN");
|
||||
|
||||
// 1. Insert the main custom column
|
||||
const columnQuery = `
|
||||
INSERT INTO cc_custom_columns (
|
||||
project_id, name, key, field_type, width, is_visible, is_custom_column
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, true)
|
||||
RETURNING id;
|
||||
`;
|
||||
const columnResult = await client.query(columnQuery, [
|
||||
project_id,
|
||||
name,
|
||||
key,
|
||||
field_type,
|
||||
width,
|
||||
is_visible,
|
||||
]);
|
||||
const columnId = columnResult.rows[0].id;
|
||||
|
||||
// 2. Insert the column configuration
|
||||
const configQuery = `
|
||||
INSERT INTO cc_column_configurations (
|
||||
column_id, field_title, field_type, number_type,
|
||||
decimals, label, label_position, preview_value,
|
||||
expression, first_numeric_column_key, second_numeric_column_key
|
||||
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
|
||||
RETURNING id;
|
||||
`;
|
||||
await client.query(configQuery, [
|
||||
columnId,
|
||||
configuration.field_title,
|
||||
configuration.field_type,
|
||||
configuration.number_type || null,
|
||||
configuration.decimals || null,
|
||||
configuration.label || null,
|
||||
configuration.label_position || null,
|
||||
configuration.preview_value || null,
|
||||
configuration.expression || null,
|
||||
configuration.first_numeric_column_key || null,
|
||||
configuration.second_numeric_column_key || null,
|
||||
]);
|
||||
|
||||
// 3. Insert selection options if present
|
||||
if (
|
||||
configuration.selections_list &&
|
||||
configuration.selections_list.length > 0
|
||||
) {
|
||||
const selectionQuery = `
|
||||
INSERT INTO cc_selection_options (
|
||||
column_id, selection_id, selection_name, selection_color, selection_order
|
||||
) VALUES ($1, $2, $3, $4, $5);
|
||||
`;
|
||||
for (const [
|
||||
index,
|
||||
selection,
|
||||
] of configuration.selections_list.entries()) {
|
||||
await client.query(selectionQuery, [
|
||||
columnId,
|
||||
selection.selection_id,
|
||||
selection.selection_name,
|
||||
selection.selection_color,
|
||||
index,
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Insert label options if present
|
||||
if (configuration.labels_list && configuration.labels_list.length > 0) {
|
||||
const labelQuery = `
|
||||
INSERT INTO cc_label_options (
|
||||
column_id, label_id, label_name, label_color, label_order
|
||||
) VALUES ($1, $2, $3, $4, $5);
|
||||
`;
|
||||
for (const [index, label] of configuration.labels_list.entries()) {
|
||||
await client.query(labelQuery, [
|
||||
columnId,
|
||||
label.label_id,
|
||||
label.label_name,
|
||||
label.label_color,
|
||||
index,
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
await client.query("COMMIT");
|
||||
|
||||
// Fetch the complete column data
|
||||
const getColumnQuery = `
|
||||
SELECT
|
||||
cc.*,
|
||||
cf.field_title,
|
||||
cf.number_type,
|
||||
cf.decimals,
|
||||
cf.label,
|
||||
cf.label_position,
|
||||
cf.preview_value,
|
||||
cf.expression,
|
||||
cf.first_numeric_column_key,
|
||||
cf.second_numeric_column_key,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'selection_id', so.selection_id,
|
||||
'selection_name', so.selection_name,
|
||||
'selection_color', so.selection_color
|
||||
)
|
||||
)
|
||||
FROM cc_selection_options so
|
||||
WHERE so.column_id = cc.id
|
||||
) as selections_list,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'label_id', lo.label_id,
|
||||
'label_name', lo.label_name,
|
||||
'label_color', lo.label_color
|
||||
)
|
||||
)
|
||||
FROM cc_label_options lo
|
||||
WHERE lo.column_id = cc.id
|
||||
) as labels_list
|
||||
FROM cc_custom_columns cc
|
||||
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
|
||||
WHERE cc.id = $1;
|
||||
`;
|
||||
const result = await client.query(getColumnQuery, [columnId]);
|
||||
const [data] = result.rows;
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
} catch (error) {
|
||||
await client.query("ROLLBACK");
|
||||
throw error;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async get(
|
||||
req: IWorkLenzRequest,
|
||||
res: IWorkLenzResponse
|
||||
): Promise<IWorkLenzResponse> {
|
||||
const { project_id } = req.query;
|
||||
|
||||
const q = `
|
||||
SELECT
|
||||
cc.*,
|
||||
cf.field_title,
|
||||
cf.number_type,
|
||||
cf.decimals,
|
||||
cf.label,
|
||||
cf.label_position,
|
||||
cf.preview_value,
|
||||
cf.expression,
|
||||
cf.first_numeric_column_key,
|
||||
cf.second_numeric_column_key,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'selection_id', so.selection_id,
|
||||
'selection_name', so.selection_name,
|
||||
'selection_color', so.selection_color
|
||||
)
|
||||
)
|
||||
FROM cc_selection_options so
|
||||
WHERE so.column_id = cc.id
|
||||
) as selections_list,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'label_id', lo.label_id,
|
||||
'label_name', lo.label_name,
|
||||
'label_color', lo.label_color
|
||||
)
|
||||
)
|
||||
FROM cc_label_options lo
|
||||
WHERE lo.column_id = cc.id
|
||||
) as labels_list
|
||||
FROM cc_custom_columns cc
|
||||
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
|
||||
WHERE cc.project_id = $1
|
||||
ORDER BY cc.created_at DESC;
|
||||
`;
|
||||
const result = await db.query(q, [project_id]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getById(
|
||||
req: IWorkLenzRequest,
|
||||
res: IWorkLenzResponse
|
||||
): Promise<IWorkLenzResponse> {
|
||||
const { id } = req.params;
|
||||
|
||||
const q = `
|
||||
SELECT
|
||||
cc.*,
|
||||
cf.field_title,
|
||||
cf.number_type,
|
||||
cf.decimals,
|
||||
cf.label,
|
||||
cf.label_position,
|
||||
cf.preview_value,
|
||||
cf.expression,
|
||||
cf.first_numeric_column_key,
|
||||
cf.second_numeric_column_key,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'selection_id', so.selection_id,
|
||||
'selection_name', so.selection_name,
|
||||
'selection_color', so.selection_color
|
||||
)
|
||||
)
|
||||
FROM cc_selection_options so
|
||||
WHERE so.column_id = cc.id
|
||||
) as selections_list,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'label_id', lo.label_id,
|
||||
'label_name', lo.label_name,
|
||||
'label_color', lo.label_color
|
||||
)
|
||||
)
|
||||
FROM cc_label_options lo
|
||||
WHERE lo.column_id = cc.id
|
||||
) as labels_list
|
||||
FROM cc_custom_columns cc
|
||||
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
|
||||
WHERE cc.id = $1;
|
||||
`;
|
||||
const result = await db.query(q, [id]);
|
||||
const [data] = result.rows;
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async update(
|
||||
req: IWorkLenzRequest,
|
||||
res: IWorkLenzResponse
|
||||
): Promise<IWorkLenzResponse> {
|
||||
const { id } = req.params;
|
||||
const { name, field_type, width, is_visible, configuration } = req.body;
|
||||
|
||||
const client = await db.pool.connect();
|
||||
|
||||
try {
|
||||
await client.query("BEGIN");
|
||||
|
||||
// 1. Update the main custom column
|
||||
const columnQuery = `
|
||||
UPDATE cc_custom_columns
|
||||
SET name = $1, field_type = $2, width = $3, is_visible = $4, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = $5
|
||||
RETURNING id;
|
||||
`;
|
||||
await client.query(columnQuery, [
|
||||
name,
|
||||
field_type,
|
||||
width,
|
||||
is_visible,
|
||||
id,
|
||||
]);
|
||||
|
||||
// 2. Update the configuration
|
||||
const configQuery = `
|
||||
UPDATE cc_column_configurations
|
||||
SET
|
||||
field_title = $1,
|
||||
field_type = $2,
|
||||
number_type = $3,
|
||||
decimals = $4,
|
||||
label = $5,
|
||||
label_position = $6,
|
||||
preview_value = $7,
|
||||
expression = $8,
|
||||
first_numeric_column_key = $9,
|
||||
second_numeric_column_key = $10,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE column_id = $11;
|
||||
`;
|
||||
await client.query(configQuery, [
|
||||
configuration.field_title,
|
||||
configuration.field_type,
|
||||
configuration.number_type || null,
|
||||
configuration.decimals || null,
|
||||
configuration.label || null,
|
||||
configuration.label_position || null,
|
||||
configuration.preview_value || null,
|
||||
configuration.expression || null,
|
||||
configuration.first_numeric_column_key || null,
|
||||
configuration.second_numeric_column_key || null,
|
||||
id,
|
||||
]);
|
||||
|
||||
// 3. Update selections if present
|
||||
if (configuration.selections_list) {
|
||||
// Delete existing selections
|
||||
await client.query(
|
||||
"DELETE FROM cc_selection_options WHERE column_id = $1",
|
||||
[id]
|
||||
);
|
||||
|
||||
// Insert new selections
|
||||
if (configuration.selections_list.length > 0) {
|
||||
const selectionQuery = `
|
||||
INSERT INTO cc_selection_options (
|
||||
column_id, selection_id, selection_name, selection_color, selection_order
|
||||
) VALUES ($1, $2, $3, $4, $5);
|
||||
`;
|
||||
for (const [
|
||||
index,
|
||||
selection,
|
||||
] of configuration.selections_list.entries()) {
|
||||
await client.query(selectionQuery, [
|
||||
id,
|
||||
selection.selection_id,
|
||||
selection.selection_name,
|
||||
selection.selection_color,
|
||||
index,
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Update labels if present
|
||||
if (configuration.labels_list) {
|
||||
// Delete existing labels
|
||||
await client.query("DELETE FROM cc_label_options WHERE column_id = $1", [
|
||||
id,
|
||||
]);
|
||||
|
||||
// Insert new labels
|
||||
if (configuration.labels_list.length > 0) {
|
||||
const labelQuery = `
|
||||
INSERT INTO cc_label_options (
|
||||
column_id, label_id, label_name, label_color, label_order
|
||||
) VALUES ($1, $2, $3, $4, $5);
|
||||
`;
|
||||
for (const [index, label] of configuration.labels_list.entries()) {
|
||||
await client.query(labelQuery, [
|
||||
id,
|
||||
label.label_id,
|
||||
label.label_name,
|
||||
label.label_color,
|
||||
index,
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await client.query("COMMIT");
|
||||
|
||||
// Fetch the updated column data
|
||||
const getColumnQuery = `
|
||||
SELECT
|
||||
cc.*,
|
||||
cf.field_title,
|
||||
cf.number_type,
|
||||
cf.decimals,
|
||||
cf.label,
|
||||
cf.label_position,
|
||||
cf.preview_value,
|
||||
cf.expression,
|
||||
cf.first_numeric_column_key,
|
||||
cf.second_numeric_column_key,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'selection_id', so.selection_id,
|
||||
'selection_name', so.selection_name,
|
||||
'selection_color', so.selection_color
|
||||
)
|
||||
)
|
||||
FROM cc_selection_options so
|
||||
WHERE so.column_id = cc.id
|
||||
) as selections_list,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'label_id', lo.label_id,
|
||||
'label_name', lo.label_name,
|
||||
'label_color', lo.label_color
|
||||
)
|
||||
)
|
||||
FROM cc_label_options lo
|
||||
WHERE lo.column_id = cc.id
|
||||
) as labels_list
|
||||
FROM cc_custom_columns cc
|
||||
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
|
||||
WHERE cc.id = $1;
|
||||
`;
|
||||
const result = await client.query(getColumnQuery, [id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
} catch (error) {
|
||||
await client.query("ROLLBACK");
|
||||
throw error;
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async deleteById(
|
||||
req: IWorkLenzRequest,
|
||||
res: IWorkLenzResponse
|
||||
): Promise<IWorkLenzResponse> {
|
||||
const { id } = req.params;
|
||||
|
||||
const q = `
|
||||
DELETE FROM cc_custom_columns
|
||||
WHERE id = $1
|
||||
RETURNING id;
|
||||
`;
|
||||
const result = await db.query(q, [id]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getProjectColumns(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { project_id } = req.params;
|
||||
|
||||
const q = `
|
||||
WITH column_data AS (
|
||||
SELECT
|
||||
cc.id,
|
||||
cc.key,
|
||||
cc.name,
|
||||
cc.field_type,
|
||||
cc.width,
|
||||
cc.is_visible,
|
||||
cf.field_title,
|
||||
cf.number_type,
|
||||
cf.decimals,
|
||||
cf.label,
|
||||
cf.label_position,
|
||||
cf.preview_value,
|
||||
cf.expression,
|
||||
cf.first_numeric_column_key,
|
||||
cf.second_numeric_column_key,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'selection_id', so.selection_id,
|
||||
'selection_name', so.selection_name,
|
||||
'selection_color', so.selection_color
|
||||
)
|
||||
)
|
||||
FROM cc_selection_options so
|
||||
WHERE so.column_id = cc.id
|
||||
) as selections_list,
|
||||
(
|
||||
SELECT json_agg(
|
||||
json_build_object(
|
||||
'label_id', lo.label_id,
|
||||
'label_name', lo.label_name,
|
||||
'label_color', lo.label_color
|
||||
)
|
||||
)
|
||||
FROM cc_label_options lo
|
||||
WHERE lo.column_id = cc.id
|
||||
) as labels_list
|
||||
FROM cc_custom_columns cc
|
||||
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
|
||||
WHERE cc.project_id = $1
|
||||
)
|
||||
SELECT
|
||||
json_agg(
|
||||
json_build_object(
|
||||
'key', cd.key,
|
||||
'id', cd.id,
|
||||
'name', cd.name,
|
||||
'width', cd.width,
|
||||
'pinned', cd.is_visible,
|
||||
'custom_column', true,
|
||||
'custom_column_obj', json_build_object(
|
||||
'fieldType', cd.field_type,
|
||||
'fieldTitle', cd.field_title,
|
||||
'numberType', cd.number_type,
|
||||
'decimals', cd.decimals,
|
||||
'label', cd.label,
|
||||
'labelPosition', cd.label_position,
|
||||
'previewValue', cd.preview_value,
|
||||
'expression', cd.expression,
|
||||
'firstNumericColumnKey', cd.first_numeric_column_key,
|
||||
'secondNumericColumnKey', cd.second_numeric_column_key,
|
||||
'selectionsList', COALESCE(cd.selections_list, '[]'::json),
|
||||
'labelsList', COALESCE(cd.labels_list, '[]'::json)
|
||||
)
|
||||
)
|
||||
) as columns
|
||||
FROM column_data cd;
|
||||
`;
|
||||
|
||||
const result = await db.query(q, [project_id]);
|
||||
const columns = result.rows[0]?.columns || [];
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, columns));
|
||||
}
|
||||
}
|
||||
@@ -114,7 +114,7 @@ export default class HomePageController extends WorklenzControllerBase {
|
||||
p.team_id,
|
||||
p.name AS project_name,
|
||||
p.color_code AS project_color,
|
||||
(SELECT id FROM task_statuses WHERE id = t.status_id) AS status,
|
||||
(SELECT name FROM task_statuses WHERE id = t.status_id) AS status,
|
||||
(SELECT color_code
|
||||
FROM sys_task_status_categories
|
||||
WHERE id = (SELECT category_id FROM task_statuses WHERE id = t.status_id)) AS status_color,
|
||||
|
||||
@@ -59,7 +59,7 @@ export default class IndexController extends WorklenzControllerBase {
|
||||
if (req.user && !req.user.is_member)
|
||||
return res.redirect("/teams");
|
||||
|
||||
return res.redirect("/auth");
|
||||
return res.redirect(301, "/auth");
|
||||
}
|
||||
|
||||
public static redirectToLogin(req: IWorkLenzRequest, res: IWorkLenzResponse) {
|
||||
|
||||
@@ -195,7 +195,7 @@ export default class ProjectCommentsController extends WorklenzControllerBase {
|
||||
pc.created_at,
|
||||
pc.updated_at
|
||||
FROM project_comments pc
|
||||
WHERE pc.project_id = $1 ORDER BY pc.updated_at DESC
|
||||
WHERE pc.project_id = $1 ORDER BY pc.updated_at
|
||||
`;
|
||||
const result = await db.query(q, [req.params.id]);
|
||||
|
||||
|
||||
@@ -322,7 +322,7 @@ export default class ProjectInsightsController extends WorklenzControllerBase {
|
||||
(SELECT get_task_assignees(tasks.id)) AS assignees
|
||||
FROM tasks
|
||||
JOIN work_log ON work_log.task_id = tasks.id
|
||||
WHERE project_id = $1
|
||||
WHERE project_id = $1 AND total_minutes <> 0 AND (total_minutes * 60) <> work_log.total_time_spent
|
||||
AND CASE
|
||||
WHEN ($2 IS TRUE) THEN project_id IS NOT NULL
|
||||
ELSE archived IS FALSE END
|
||||
|
||||
@@ -7,6 +7,9 @@ import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import {getColor} from "../shared/utils";
|
||||
import TeamMembersController from "./team-members-controller";
|
||||
import {checkTeamSubscriptionStatus} from "../shared/paddle-utils";
|
||||
import {updateUsers} from "../shared/paddle-requests";
|
||||
import {statusExclude} from "../shared/constants";
|
||||
import {NotificationsService} from "../services/notifications/notifications.service";
|
||||
|
||||
export default class ProjectMembersController extends WorklenzControllerBase {
|
||||
@@ -69,6 +72,70 @@ export default class ProjectMembersController extends WorklenzControllerBase {
|
||||
|
||||
if (!req.user?.team_id) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
|
||||
|
||||
// check the subscription status
|
||||
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id);
|
||||
|
||||
const userExists = await this.checkIfUserAlreadyExists(req.user?.owner_id as string, req.body.email);
|
||||
|
||||
// Return error if user already exists
|
||||
if (userExists) {
|
||||
return res.status(200).send(new ServerResponse(false, null, "User already exists in the team."));
|
||||
}
|
||||
|
||||
// Handle self-hosted subscriptions differently
|
||||
if (subscriptionData.subscription_type === 'SELF_HOSTED') {
|
||||
// Adding as a team member
|
||||
const teamMemberReq: { team_id?: string; emails: string[], project_id?: string; } = {
|
||||
team_id: req.user?.team_id,
|
||||
emails: [req.body.email]
|
||||
};
|
||||
|
||||
if (req.body.project_id)
|
||||
teamMemberReq.project_id = req.body.project_id;
|
||||
|
||||
const [member] = await TeamMembersController.createOrInviteMembers(teamMemberReq, req.user);
|
||||
|
||||
if (!member)
|
||||
return res.status(200).send(new ServerResponse(true, null, "Failed to add the member to the project. Please try again."));
|
||||
|
||||
// Adding to the project
|
||||
const projectMemberReq = {
|
||||
team_member_id: member.team_member_id,
|
||||
team_id: req.user?.team_id,
|
||||
project_id: req.body.project_id,
|
||||
user_id: req.user?.id,
|
||||
access_level: req.body.access_level ? req.body.access_level : "MEMBER"
|
||||
};
|
||||
const data = await this.createOrInviteMembers(projectMemberReq);
|
||||
return res.status(200).send(new ServerResponse(true, data.member));
|
||||
}
|
||||
|
||||
if (statusExclude.includes(subscriptionData.subscription_status)) {
|
||||
return res.status(200).send(new ServerResponse(false, null, "Unable to add user! Please check your subscription status."));
|
||||
}
|
||||
|
||||
if (!userExists && subscriptionData.is_ltd && subscriptionData.current_count && (parseInt(subscriptionData.current_count) + 1 > parseInt(subscriptionData.ltd_users))) {
|
||||
return res.status(200).send(new ServerResponse(false, null, "Maximum number of life time users reached."));
|
||||
}
|
||||
|
||||
// if (subscriptionData.status === "trialing") break;
|
||||
if (!userExists && !subscriptionData.is_credit && !subscriptionData.is_custom && subscriptionData.subscription_status !== "trialing") {
|
||||
// if (subscriptionData.subscription_status === "active") {
|
||||
// const response = await updateUsers(subscriptionData.subscription_id, (subscriptionData.quantity + 1));
|
||||
// if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, null, response.message || "Unable to add user! Please check your subscription."));
|
||||
// }
|
||||
const updatedCount = parseInt(subscriptionData.current_count) + 1;
|
||||
const requiredSeats = updatedCount - subscriptionData.quantity;
|
||||
if (updatedCount > subscriptionData.quantity) {
|
||||
const obj = {
|
||||
seats_enough: false,
|
||||
required_count: requiredSeats,
|
||||
current_seat_amount: subscriptionData.quantity
|
||||
};
|
||||
return res.status(200).send(new ServerResponse(false, obj, null));
|
||||
}
|
||||
}
|
||||
|
||||
// Adding as a team member
|
||||
const teamMemberReq: { team_id?: string; emails: string[], project_id?: string; } = {
|
||||
team_id: req.user?.team_id,
|
||||
|
||||
@@ -8,6 +8,7 @@ import { templateData } from "./project-templates";
|
||||
import ProjectTemplatesControllerBase from "./project-templates-base";
|
||||
import { LOG_DESCRIPTIONS, TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA } from "../../shared/constants";
|
||||
import { IO } from "../../shared/io";
|
||||
import { getCurrentProjectsCount, getFreePlanSettings } from "../../shared/paddle-utils";
|
||||
|
||||
export default class ProjectTemplatesController extends ProjectTemplatesControllerBase {
|
||||
|
||||
@@ -46,10 +47,10 @@ export default class ProjectTemplatesController extends ProjectTemplatesControll
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getDefaultProjectHealth() {
|
||||
const q = `SELECT id FROM sys_project_healths WHERE is_default IS TRUE`;
|
||||
const result = await db.query(q, []);
|
||||
const [data] = result.rows;
|
||||
return data.id;
|
||||
const q = `SELECT id FROM sys_project_healths WHERE is_default IS TRUE`;
|
||||
const result = await db.query(q, []);
|
||||
const [data] = result.rows;
|
||||
return data.id;
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
@@ -92,6 +93,16 @@ export default class ProjectTemplatesController extends ProjectTemplatesControll
|
||||
|
||||
@HandleExceptions()
|
||||
public static async importTemplates(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
if (req.user?.subscription_status === "free" && req.user?.owner_id) {
|
||||
const limits = await getFreePlanSettings();
|
||||
const projectsCount = await getCurrentProjectsCount(req.user.owner_id);
|
||||
const projectsLimit = parseInt(limits.projects_limit);
|
||||
|
||||
if (parseInt(projectsCount) >= projectsLimit) {
|
||||
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${projectsLimit} projects.`));
|
||||
}
|
||||
}
|
||||
|
||||
const { template_id } = req.body;
|
||||
let project_id: string | null = null;
|
||||
|
||||
@@ -202,6 +213,16 @@ export default class ProjectTemplatesController extends ProjectTemplatesControll
|
||||
|
||||
@HandleExceptions()
|
||||
public static async importCustomTemplate(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
if (req.user?.subscription_status === "free" && req.user?.owner_id) {
|
||||
const limits = await getFreePlanSettings();
|
||||
const projectsCount = await getCurrentProjectsCount(req.user.owner_id);
|
||||
const projectsLimit = parseInt(limits.projects_limit);
|
||||
|
||||
if (parseInt(projectsCount) >= projectsLimit) {
|
||||
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${projectsLimit} projects.`));
|
||||
}
|
||||
}
|
||||
|
||||
const { template_id } = req.body;
|
||||
let project_id: string | null = null;
|
||||
|
||||
@@ -223,8 +244,8 @@ export default class ProjectTemplatesController extends ProjectTemplatesControll
|
||||
await this.deleteDefaultStatusForProject(project_id as string);
|
||||
await this.insertTeamLabels(data.labels, req.user?.team_id);
|
||||
await this.insertProjectPhases(data.phases, project_id as string);
|
||||
await this.insertProjectStatuses(data.status, project_id as string, data.team_id );
|
||||
await this.insertProjectTasksFromCustom(data.tasks, data.team_id, project_id as string, data.user_id, IO.getSocketById(req.user?.socket_id as string));
|
||||
await this.insertProjectStatuses(data.status, project_id as string, data.team_id);
|
||||
await this.insertProjectTasksFromCustom(data.tasks, data.team_id, project_id as string, data.user_id, IO.getSocketById(req.user?.socket_id as string));
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, { project_id }));
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import { NotificationsService } from "../services/notifications/notifications.se
|
||||
import { IPassportSession } from "../interfaces/passport-session";
|
||||
import { SocketEvents } from "../socket.io/events";
|
||||
import { IO } from "../shared/io";
|
||||
import { getCurrentProjectsCount, getFreePlanSettings } from "../shared/paddle-utils";
|
||||
|
||||
export default class ProjectsController extends WorklenzControllerBase {
|
||||
|
||||
@@ -61,6 +62,16 @@ export default class ProjectsController extends WorklenzControllerBase {
|
||||
}
|
||||
})
|
||||
public static async create(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
if (req.user?.subscription_status === "free" && req.user?.owner_id) {
|
||||
const limits = await getFreePlanSettings();
|
||||
const projectsCount = await getCurrentProjectsCount(req.user.owner_id);
|
||||
const projectsLimit = parseInt(limits.projects_limit);
|
||||
|
||||
if (parseInt(projectsCount) >= projectsLimit) {
|
||||
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${projectsLimit} projects.`));
|
||||
}
|
||||
}
|
||||
|
||||
const q = `SELECT create_project($1) AS project`;
|
||||
|
||||
req.body.team_id = req.user?.team_id || null;
|
||||
@@ -397,6 +408,9 @@ export default class ProjectsController extends WorklenzControllerBase {
|
||||
sps.color_code AS status_color,
|
||||
sps.icon AS status_icon,
|
||||
(SELECT name FROM clients WHERE id = projects.client_id) AS client_name,
|
||||
projects.use_manual_progress,
|
||||
projects.use_weighted_progress,
|
||||
projects.use_time_progress,
|
||||
|
||||
(SELECT COALESCE(ROW_TO_JSON(pm), '{}'::JSON)
|
||||
FROM (SELECT team_member_id AS id,
|
||||
@@ -689,7 +703,8 @@ export default class ProjectsController extends WorklenzControllerBase {
|
||||
public static async toggleArchiveAll(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT toggle_archive_all_projects($1);`;
|
||||
const result = await db.query(q, [req.params.id]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows || []));
|
||||
const [data] = result.rows;
|
||||
return res.status(200).send(new ServerResponse(true, data.toggle_archive_all_projects || []));
|
||||
}
|
||||
|
||||
public static async getProjectManager(projectId: string) {
|
||||
@@ -698,4 +713,47 @@ export default class ProjectsController extends WorklenzControllerBase {
|
||||
return result.rows || [];
|
||||
}
|
||||
|
||||
public static async updateExistPhaseColors() {
|
||||
const q = `SELECT id, name FROM project_phases`;
|
||||
const phases = await db.query(q);
|
||||
|
||||
phases.rows.forEach((phase) => {
|
||||
phase.color_code = getColor(phase.name);
|
||||
});
|
||||
|
||||
const body = {
|
||||
phases: phases.rows
|
||||
};
|
||||
|
||||
const q2 = `SELECT update_existing_phase_colors($1)`;
|
||||
await db.query(q2, [JSON.stringify(body)]);
|
||||
|
||||
}
|
||||
|
||||
public static async updateExistSortOrder() {
|
||||
const q = `SELECT id, project_id FROM project_phases ORDER BY name`;
|
||||
const phases = await db.query(q);
|
||||
|
||||
const sortNumbers: any = {};
|
||||
|
||||
phases.rows.forEach(phase => {
|
||||
const projectId = phase.project_id;
|
||||
|
||||
if (!sortNumbers[projectId]) {
|
||||
sortNumbers[projectId] = 0;
|
||||
}
|
||||
|
||||
phase.sort_number = sortNumbers[projectId]++;
|
||||
});
|
||||
|
||||
const body = {
|
||||
phases: phases.rows
|
||||
};
|
||||
|
||||
const q2 = `SELECT update_existing_phase_sort_order($1)`;
|
||||
await db.query(q2, [JSON.stringify(body)]);
|
||||
// return phases;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -415,15 +415,20 @@ export default class ReportingController extends WorklenzControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getMyTeams(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const selectedTeamId = req.user?.team_id;
|
||||
if (!selectedTeamId) {
|
||||
return res.status(400).send(new ServerResponse(false, "No selected team"));
|
||||
}
|
||||
const q = `SELECT team_id AS id, name
|
||||
FROM team_members tm
|
||||
LEFT JOIN teams ON teams.id = tm.team_id
|
||||
WHERE tm.user_id = $1
|
||||
AND tm.team_id = $2
|
||||
AND role_id IN (SELECT id
|
||||
FROM roles
|
||||
WHERE (admin_role IS TRUE OR owner IS TRUE))
|
||||
ORDER BY name;`;
|
||||
const result = await db.query(q, [req.user?.id]);
|
||||
const result = await db.query(q, [req.user?.id, selectedTeamId]);
|
||||
result.rows.forEach((team: any) => team.selected = true);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { IChartObject } from "./overview/reporting-overview-base";
|
||||
import * as Highcharts from "highcharts";
|
||||
|
||||
export interface IDuration {
|
||||
label: string;
|
||||
@@ -34,7 +34,7 @@ export interface IOverviewStatistics {
|
||||
}
|
||||
|
||||
export interface IChartData {
|
||||
chart: IChartObject[];
|
||||
chart: Highcharts.PointOptionsObject[];
|
||||
}
|
||||
|
||||
export interface ITasksByStatus extends IChartData {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import db from "../../../config/db";
|
||||
import * as Highcharts from "highcharts";
|
||||
import { ITasksByDue, ITasksByPriority, ITasksByStatus } from "../interfaces";
|
||||
import ReportingControllerBase from "../reporting-controller-base";
|
||||
import {
|
||||
@@ -15,36 +16,33 @@ import {
|
||||
TASK_STATUS_TODO_COLOR
|
||||
} from "../../../shared/constants";
|
||||
import { formatDuration, int } from "../../../shared/utils";
|
||||
import PointOptionsObject from "../point-options-object";
|
||||
import moment from "moment";
|
||||
|
||||
export interface IChartObject {
|
||||
name: string,
|
||||
color: string,
|
||||
y: number
|
||||
}
|
||||
|
||||
export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
|
||||
private static createChartObject(name: string, color: string, y: number) {
|
||||
return {
|
||||
name,
|
||||
color,
|
||||
y
|
||||
};
|
||||
}
|
||||
|
||||
protected static async getTeamsCounts(teamId: string | null, archivedQuery = "") {
|
||||
|
||||
const q = `
|
||||
SELECT JSON_BUILD_OBJECT(
|
||||
'teams', (SELECT COUNT(*) FROM teams WHERE in_organization(id, $1)),
|
||||
'projects',
|
||||
(SELECT COUNT(*) FROM projects WHERE in_organization(team_id, $1) ${archivedQuery}),
|
||||
'team_members', (SELECT COUNT(DISTINCT email)
|
||||
FROM team_member_info_view
|
||||
WHERE in_organization(team_id, $1))
|
||||
) AS counts;
|
||||
`;
|
||||
WITH team_count AS (
|
||||
SELECT COUNT(*) AS count
|
||||
FROM teams
|
||||
WHERE in_organization(id, $1)
|
||||
),
|
||||
project_count AS (
|
||||
SELECT COUNT(*) AS count
|
||||
FROM projects
|
||||
WHERE in_organization(team_id, $1) ${archivedQuery}
|
||||
),
|
||||
team_member_count AS (
|
||||
SELECT COUNT(DISTINCT email) AS count
|
||||
FROM team_member_info_view
|
||||
WHERE in_organization(team_id, $1)
|
||||
)
|
||||
SELECT JSON_BUILD_OBJECT(
|
||||
'teams', (SELECT count FROM team_count),
|
||||
'projects', (SELECT count FROM project_count),
|
||||
'team_members', (SELECT count FROM team_member_count)
|
||||
) AS counts;`;
|
||||
|
||||
const res = await db.query(q, [teamId]);
|
||||
const [data] = res.rows;
|
||||
@@ -173,7 +171,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
const doing = int(data?.counts.doing);
|
||||
const done = int(data?.counts.done);
|
||||
|
||||
const chart: IChartObject[] = [];
|
||||
const chart: Highcharts.PointOptionsObject[] = [];
|
||||
|
||||
return {
|
||||
all,
|
||||
@@ -209,7 +207,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
const medium = int(data?.counts.medium);
|
||||
const high = int(data?.counts.high);
|
||||
|
||||
const chart: IChartObject[] = [];
|
||||
const chart: Highcharts.PointOptionsObject[] = [];
|
||||
|
||||
return {
|
||||
all: 0,
|
||||
@@ -237,7 +235,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
const res = await db.query(q, [projectId]);
|
||||
const [data] = res.rows;
|
||||
|
||||
const chart: IChartObject[] = [];
|
||||
const chart: Highcharts.PointOptionsObject[] = [];
|
||||
|
||||
return {
|
||||
all: 0,
|
||||
@@ -251,26 +249,26 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
|
||||
protected static createByStatusChartData(body: ITasksByStatus) {
|
||||
body.chart = [
|
||||
this.createChartObject("Todo", TASK_STATUS_TODO_COLOR, body.todo),
|
||||
this.createChartObject("Doing", TASK_STATUS_DOING_COLOR, body.doing),
|
||||
this.createChartObject("Done", TASK_STATUS_DONE_COLOR, body.done),
|
||||
new PointOptionsObject("Todo", TASK_STATUS_TODO_COLOR, body.todo),
|
||||
new PointOptionsObject("Doing", TASK_STATUS_DOING_COLOR, body.doing),
|
||||
new PointOptionsObject("Done", TASK_STATUS_DONE_COLOR, body.done),
|
||||
];
|
||||
}
|
||||
|
||||
protected static createByPriorityChartData(body: ITasksByPriority) {
|
||||
body.chart = [
|
||||
this.createChartObject("Low", TASK_PRIORITY_LOW_COLOR, body.low),
|
||||
this.createChartObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, body.medium),
|
||||
this.createChartObject("High", TASK_PRIORITY_HIGH_COLOR, body.high),
|
||||
new PointOptionsObject("Low", TASK_PRIORITY_LOW_COLOR, body.low),
|
||||
new PointOptionsObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, body.medium),
|
||||
new PointOptionsObject("High", TASK_PRIORITY_HIGH_COLOR, body.high),
|
||||
];
|
||||
}
|
||||
|
||||
protected static createByDueDateChartData(body: ITasksByDue) {
|
||||
body.chart = [
|
||||
this.createChartObject("Completed", TASK_DUE_COMPLETED_COLOR, body.completed),
|
||||
this.createChartObject("Upcoming", TASK_DUE_UPCOMING_COLOR, body.upcoming),
|
||||
this.createChartObject("Overdue", TASK_DUE_OVERDUE_COLOR, body.overdue),
|
||||
this.createChartObject("No due date", TASK_DUE_NO_DUE_COLOR, body.no_due),
|
||||
new PointOptionsObject("Completed", TASK_DUE_COMPLETED_COLOR, body.completed),
|
||||
new PointOptionsObject("Upcoming", TASK_DUE_UPCOMING_COLOR, body.upcoming),
|
||||
new PointOptionsObject("Overdue", TASK_DUE_OVERDUE_COLOR, body.overdue),
|
||||
new PointOptionsObject("No due date", TASK_DUE_NO_DUE_COLOR, body.no_due),
|
||||
];
|
||||
}
|
||||
|
||||
@@ -581,7 +579,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
`;
|
||||
const result = await db.query(q, [teamMemberId]);
|
||||
|
||||
const chart: IChartObject[] = [];
|
||||
const chart: Highcharts.PointOptionsObject[] = [];
|
||||
|
||||
const total = result.rows.reduce((accumulator: number, current: {
|
||||
count: number
|
||||
@@ -589,7 +587,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
|
||||
for (const project of result.rows) {
|
||||
project.count = int(project.count);
|
||||
chart.push(this.createChartObject(project.label, project.color, project.count));
|
||||
chart.push(new PointOptionsObject(project.label, project.color, project.count));
|
||||
}
|
||||
|
||||
return { chart, total, data: result.rows };
|
||||
@@ -635,7 +633,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
`;
|
||||
const result = await db.query(q, [teamMemberId]);
|
||||
|
||||
const chart: IChartObject[] = [];
|
||||
const chart: Highcharts.PointOptionsObject[] = [];
|
||||
|
||||
const total = result.rows.reduce((accumulator: number, current: {
|
||||
count: number
|
||||
@@ -643,7 +641,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
|
||||
for (const project of result.rows) {
|
||||
project.count = int(project.count);
|
||||
chart.push(this.createChartObject(project.label, project.color, project.count));
|
||||
chart.push(new PointOptionsObject(project.label, project.color, project.count));
|
||||
}
|
||||
|
||||
return { chart, total, data: result.rows };
|
||||
@@ -673,10 +671,10 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
|
||||
const total = int(d.low) + int(d.medium) + int(d.high);
|
||||
|
||||
const chart = [
|
||||
this.createChartObject("Low", TASK_PRIORITY_LOW_COLOR, d.low),
|
||||
this.createChartObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, d.medium),
|
||||
this.createChartObject("High", TASK_PRIORITY_HIGH_COLOR, d.high),
|
||||
const chart: Highcharts.PointOptionsObject[] = [
|
||||
new PointOptionsObject("Low", TASK_PRIORITY_LOW_COLOR, d.low),
|
||||
new PointOptionsObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, d.medium),
|
||||
new PointOptionsObject("High", TASK_PRIORITY_HIGH_COLOR, d.high),
|
||||
];
|
||||
|
||||
const data = [
|
||||
@@ -730,10 +728,10 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
|
||||
const total = int(d.low) + int(d.medium) + int(d.high);
|
||||
|
||||
const chart = [
|
||||
this.createChartObject("Low", TASK_PRIORITY_LOW_COLOR, d.low),
|
||||
this.createChartObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, d.medium),
|
||||
this.createChartObject("High", TASK_PRIORITY_HIGH_COLOR, d.high),
|
||||
const chart: Highcharts.PointOptionsObject[] = [
|
||||
new PointOptionsObject("Low", TASK_PRIORITY_LOW_COLOR, d.low),
|
||||
new PointOptionsObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, d.medium),
|
||||
new PointOptionsObject("High", TASK_PRIORITY_HIGH_COLOR, d.high),
|
||||
];
|
||||
|
||||
const data = [
|
||||
@@ -784,10 +782,10 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
|
||||
const total = int(d.total);
|
||||
|
||||
const chart = [
|
||||
this.createChartObject("Todo", TASK_STATUS_TODO_COLOR, d.todo),
|
||||
this.createChartObject("Doing", TASK_STATUS_DOING_COLOR, d.doing),
|
||||
this.createChartObject("Done", TASK_STATUS_DONE_COLOR, d.done),
|
||||
const chart: Highcharts.PointOptionsObject[] = [
|
||||
new PointOptionsObject("Todo", TASK_STATUS_TODO_COLOR, d.todo),
|
||||
new PointOptionsObject("Doing", TASK_STATUS_DOING_COLOR, d.doing),
|
||||
new PointOptionsObject("Done", TASK_STATUS_DONE_COLOR, d.done),
|
||||
];
|
||||
|
||||
const data = [
|
||||
@@ -826,10 +824,10 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
|
||||
const total = int(d.todo) + int(d.doing) + int(d.done);
|
||||
|
||||
const chart = [
|
||||
this.createChartObject("Todo", TASK_STATUS_TODO_COLOR, d.todo),
|
||||
this.createChartObject("Doing", TASK_STATUS_DOING_COLOR, d.doing),
|
||||
this.createChartObject("Done", TASK_STATUS_DONE_COLOR, d.done),
|
||||
const chart: Highcharts.PointOptionsObject[] = [
|
||||
new PointOptionsObject("Todo", TASK_STATUS_TODO_COLOR, d.todo),
|
||||
new PointOptionsObject("Doing", TASK_STATUS_DOING_COLOR, d.doing),
|
||||
new PointOptionsObject("Done", TASK_STATUS_DONE_COLOR, d.done),
|
||||
];
|
||||
|
||||
const data = [
|
||||
@@ -878,7 +876,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
const in_progress = int(data?.counts.in_progress);
|
||||
const completed = int(data?.counts.completed);
|
||||
|
||||
const chart : IChartObject[] = [];
|
||||
const chart: Highcharts.PointOptionsObject[] = [];
|
||||
|
||||
return {
|
||||
all,
|
||||
@@ -908,7 +906,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
`;
|
||||
const result = await db.query(q, [teamId]);
|
||||
|
||||
const chart: IChartObject[] = [];
|
||||
const chart: Highcharts.PointOptionsObject[] = [];
|
||||
|
||||
const total = result.rows.reduce((accumulator: number, current: {
|
||||
count: number
|
||||
@@ -916,11 +914,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
|
||||
for (const category of result.rows) {
|
||||
category.count = int(category.count);
|
||||
chart.push({
|
||||
name: category.label,
|
||||
color: category.color,
|
||||
y: category.count
|
||||
});
|
||||
chart.push(new PointOptionsObject(category.label, category.color, category.count));
|
||||
}
|
||||
|
||||
return { chart, total, data: result.rows };
|
||||
@@ -956,7 +950,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
const at_risk = int(data?.counts.at_risk);
|
||||
const good = int(data?.counts.good);
|
||||
|
||||
const chart: IChartObject[] = [];
|
||||
const chart: Highcharts.PointOptionsObject[] = [];
|
||||
|
||||
return {
|
||||
not_set,
|
||||
@@ -971,22 +965,22 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
|
||||
// Team Overview
|
||||
protected static createByProjectStatusChartData(body: any) {
|
||||
body.chart = [
|
||||
this.createChartObject("Cancelled", "#f37070", body.cancelled),
|
||||
this.createChartObject("Blocked", "#cbc8a1", body.blocked),
|
||||
this.createChartObject("On Hold", "#cbc8a1", body.on_hold),
|
||||
this.createChartObject("Proposed", "#cbc8a1", body.proposed),
|
||||
this.createChartObject("In Planning", "#cbc8a1", body.in_planning),
|
||||
this.createChartObject("In Progress", "#80ca79", body.in_progress),
|
||||
this.createChartObject("Completed", "#80ca79", body.completed)
|
||||
new PointOptionsObject("Cancelled", "#f37070", body.cancelled),
|
||||
new PointOptionsObject("Blocked", "#cbc8a1", body.blocked),
|
||||
new PointOptionsObject("On Hold", "#cbc8a1", body.on_hold),
|
||||
new PointOptionsObject("Proposed", "#cbc8a1", body.proposed),
|
||||
new PointOptionsObject("In Planning", "#cbc8a1", body.in_planning),
|
||||
new PointOptionsObject("In Progress", "#80ca79", body.in_progress),
|
||||
new PointOptionsObject("Completed", "#80ca79", body.completed),
|
||||
];
|
||||
}
|
||||
|
||||
protected static createByProjectHealthChartData(body: any) {
|
||||
body.chart = [
|
||||
this.createChartObject("Not Set", "#a9a9a9", body.not_set),
|
||||
this.createChartObject("Needs Attention", "#f37070", body.needs_attention),
|
||||
this.createChartObject("At Risk", "#fbc84c", body.at_risk),
|
||||
this.createChartObject("Good", "#75c997", body.good)
|
||||
new PointOptionsObject("Not Set", "#a9a9a9", body.not_set),
|
||||
new PointOptionsObject("Needs Attention", "#f37070", body.needs_attention),
|
||||
new PointOptionsObject("At Risk", "#fbc84c", body.at_risk),
|
||||
new PointOptionsObject("Good", "#75c997", body.good)
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import * as Highcharts from "highcharts";
|
||||
|
||||
export default class PointOptionsObject implements Highcharts.PointOptionsObject {
|
||||
name!: string;
|
||||
color!: string;
|
||||
y!: number;
|
||||
|
||||
constructor(name: string, color: string, y: number) {
|
||||
this.name = name;
|
||||
this.color = color;
|
||||
this.y = y;
|
||||
}
|
||||
}
|
||||
@@ -8,13 +8,14 @@ import { getColor, int, log_error } from "../../shared/utils";
|
||||
import ReportingControllerBase from "./reporting-controller-base";
|
||||
import { DATE_RANGES } from "../../shared/constants";
|
||||
import Excel from "exceljs";
|
||||
import ChartJsImage from "chartjs-to-image";
|
||||
|
||||
enum IToggleOptions {
|
||||
'WORKING_DAYS' = 'WORKING_DAYS', 'MAN_DAYS' = 'MAN_DAYS'
|
||||
}
|
||||
|
||||
export default class ReportingAllocationController extends ReportingControllerBase {
|
||||
private static async getTimeLoggedByProjects(projects: string[], users: string[], key: string, dateRange: string[], archived = false, user_id = ""): Promise<any> {
|
||||
private static async getTimeLoggedByProjects(projects: string[], users: string[], key: string, dateRange: string[], archived = false, user_id = "", billable: { billable: boolean; nonBillable: boolean }): Promise<any> {
|
||||
try {
|
||||
const projectIds = projects.map(p => `'${p}'`).join(",");
|
||||
const userIds = users.map(u => `'${u}'`).join(",");
|
||||
@@ -24,8 +25,10 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
? ""
|
||||
: `AND projects.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = projects.id AND user_id = '${user_id}') `;
|
||||
|
||||
const projectTimeLogs = await this.getTotalTimeLogsByProject(archived, duration, projectIds, userIds, archivedClause);
|
||||
const userTimeLogs = await this.getTotalTimeLogsByUser(archived, duration, projectIds, userIds);
|
||||
const billableQuery = this.buildBillableQuery(billable);
|
||||
|
||||
const projectTimeLogs = await this.getTotalTimeLogsByProject(archived, duration, projectIds, userIds, archivedClause, billableQuery);
|
||||
const userTimeLogs = await this.getTotalTimeLogsByUser(archived, duration, projectIds, userIds, billableQuery);
|
||||
|
||||
const format = (seconds: number) => {
|
||||
if (seconds === 0) return "-";
|
||||
@@ -65,7 +68,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
return [];
|
||||
}
|
||||
|
||||
private static async getTotalTimeLogsByProject(archived: boolean, duration: string, projectIds: string, userIds: string, archivedClause = "") {
|
||||
private static async getTotalTimeLogsByProject(archived: boolean, duration: string, projectIds: string, userIds: string, archivedClause = "", billableQuery = '') {
|
||||
try {
|
||||
const q = `SELECT projects.name,
|
||||
projects.color_code,
|
||||
@@ -74,12 +77,12 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
sps.icon AS status_icon,
|
||||
(SELECT COUNT(*)
|
||||
FROM tasks
|
||||
WHERE CASE WHEN ($1 IS TRUE) THEN project_id IS NOT NULL ELSE archived = FALSE END
|
||||
WHERE CASE WHEN ($1 IS TRUE) THEN project_id IS NOT NULL ELSE archived = FALSE END ${billableQuery}
|
||||
AND project_id = projects.id) AS all_tasks_count,
|
||||
(SELECT COUNT(*)
|
||||
FROM tasks
|
||||
WHERE CASE WHEN ($1 IS TRUE) THEN project_id IS NOT NULL ELSE archived = FALSE END
|
||||
AND project_id = projects.id
|
||||
AND project_id = projects.id ${billableQuery}
|
||||
AND status_id IN (SELECT id
|
||||
FROM task_statuses
|
||||
WHERE project_id = projects.id
|
||||
@@ -91,10 +94,10 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
SELECT name,
|
||||
(SELECT COALESCE(SUM(time_spent), 0)
|
||||
FROM task_work_log
|
||||
LEFT JOIN tasks t ON task_work_log.task_id = t.id
|
||||
WHERE user_id = users.id
|
||||
AND CASE WHEN ($1 IS TRUE) THEN t.project_id IS NOT NULL ELSE t.archived = FALSE END
|
||||
AND t.project_id = projects.id
|
||||
LEFT JOIN tasks ON task_work_log.task_id = tasks.id
|
||||
WHERE user_id = users.id ${billableQuery}
|
||||
AND CASE WHEN ($1 IS TRUE) THEN tasks.project_id IS NOT NULL ELSE tasks.archived = FALSE END
|
||||
AND tasks.project_id = projects.id
|
||||
${duration}) AS time_logged
|
||||
FROM users
|
||||
WHERE id IN (${userIds})
|
||||
@@ -113,15 +116,15 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
}
|
||||
}
|
||||
|
||||
private static async getTotalTimeLogsByUser(archived: boolean, duration: string, projectIds: string, userIds: string) {
|
||||
private static async getTotalTimeLogsByUser(archived: boolean, duration: string, projectIds: string, userIds: string, billableQuery = "") {
|
||||
try {
|
||||
const q = `(SELECT id,
|
||||
(SELECT COALESCE(SUM(time_spent), 0)
|
||||
FROM task_work_log
|
||||
LEFT JOIN tasks t ON task_work_log.task_id = t.id
|
||||
LEFT JOIN tasks ON task_work_log.task_id = tasks.id ${billableQuery}
|
||||
WHERE user_id = users.id
|
||||
AND CASE WHEN ($1 IS TRUE) THEN t.project_id IS NOT NULL ELSE t.archived = FALSE END
|
||||
AND t.project_id IN (${projectIds})
|
||||
AND CASE WHEN ($1 IS TRUE) THEN tasks.project_id IS NOT NULL ELSE tasks.archived = FALSE END
|
||||
AND tasks.project_id IN (${projectIds})
|
||||
${duration}) AS time_logged
|
||||
FROM users
|
||||
WHERE id IN (${userIds})
|
||||
@@ -154,6 +157,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
@HandleExceptions()
|
||||
public static async getAllocation(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const teams = (req.body.teams || []) as string[]; // ids
|
||||
const billable = req.body.billable;
|
||||
|
||||
const teamIds = teams.map(id => `'${id}'`).join(",");
|
||||
const projectIds = (req.body.projects || []) as string[];
|
||||
@@ -164,7 +168,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
const users = await this.getUserIds(teamIds);
|
||||
const userIds = users.map((u: any) => u.id);
|
||||
|
||||
const { projectTimeLogs, userTimeLogs } = await this.getTimeLoggedByProjects(projectIds, userIds, req.body.duration, req.body.date_range, (req.query.archived === "true"), req.user?.id);
|
||||
const { projectTimeLogs, userTimeLogs } = await this.getTimeLoggedByProjects(projectIds, userIds, req.body.duration, req.body.date_range, (req.query.archived === "true"), req.user?.id, billable);
|
||||
|
||||
for (const [i, user] of users.entries()) {
|
||||
user.total_time = userTimeLogs[i].time_logged;
|
||||
@@ -184,6 +188,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
public static async export(req: IWorkLenzRequest, res: IWorkLenzResponse) {
|
||||
const teams = (req.query.teams as string)?.split(",");
|
||||
const teamIds = teams.map(t => `'${t}'`).join(",");
|
||||
const billable = req.body.billable ? req.body.billable : { billable: req.query.billable === "true", nonBillable: req.query.nonBillable === "true" };
|
||||
|
||||
const projectIds = (req.query.projects as string)?.split(",");
|
||||
|
||||
@@ -218,7 +223,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
const users = await this.getUserIds(teamIds);
|
||||
const userIds = users.map((u: any) => u.id);
|
||||
|
||||
const { projectTimeLogs, userTimeLogs } = await this.getTimeLoggedByProjects(projectIds, userIds, duration as string, dateRange, (req.query.include_archived === "true"), req.user?.id);
|
||||
const { projectTimeLogs, userTimeLogs } = await this.getTimeLoggedByProjects(projectIds, userIds, duration as string, dateRange, (req.query.include_archived === "true"), req.user?.id, billable);
|
||||
|
||||
for (const [i, user] of users.entries()) {
|
||||
user.total_time = userTimeLogs[i].time_logged;
|
||||
@@ -341,6 +346,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
const projects = (req.body.projects || []) as string[];
|
||||
const projectIds = projects.map(p => `'${p}'`).join(",");
|
||||
|
||||
const billable = req.body.billable;
|
||||
|
||||
if (!teamIds || !projectIds.length)
|
||||
return res.status(200).send(new ServerResponse(true, { users: [], projects: [] }));
|
||||
|
||||
@@ -352,6 +359,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
? ""
|
||||
: `AND p.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = p.id AND user_id = '${req.user?.id}') `;
|
||||
|
||||
const billableQuery = this.buildBillableQuery(billable);
|
||||
|
||||
const q = `
|
||||
SELECT p.id,
|
||||
p.name,
|
||||
@@ -359,8 +368,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
SUM(total_minutes) AS estimated,
|
||||
color_code
|
||||
FROM projects p
|
||||
LEFT JOIN tasks t ON t.project_id = p.id
|
||||
LEFT JOIN task_work_log ON task_work_log.task_id = t.id
|
||||
LEFT JOIN tasks ON tasks.project_id = p.id ${billableQuery}
|
||||
LEFT JOIN task_work_log ON task_work_log.task_id = tasks.id
|
||||
WHERE p.id IN (${projectIds}) ${durationClause} ${archivedClause}
|
||||
GROUP BY p.id, p.name
|
||||
ORDER BY logged_time DESC;`;
|
||||
@@ -372,7 +381,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
project.value = project.logged_time ? parseFloat(moment.duration(project.logged_time, "seconds").asHours().toFixed(2)) : 0;
|
||||
project.estimated_value = project.estimated ? parseFloat(moment.duration(project.estimated, "minutes").asHours().toFixed(2)) : 0;
|
||||
|
||||
if (project.value > 0 ) {
|
||||
if (project.value > 0) {
|
||||
data.push(project);
|
||||
}
|
||||
|
||||
@@ -392,22 +401,110 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
const projects = (req.body.projects || []) as string[];
|
||||
const projectIds = projects.map(p => `'${p}'`).join(",");
|
||||
|
||||
const billable = req.body.billable;
|
||||
|
||||
if (!teamIds || !projectIds.length)
|
||||
return res.status(200).send(new ServerResponse(true, { users: [], projects: [] }));
|
||||
|
||||
const { duration, date_range } = req.body;
|
||||
|
||||
// Calculate the date range (start and end)
|
||||
let startDate: moment.Moment;
|
||||
let endDate: moment.Moment;
|
||||
if (date_range && date_range.length === 2) {
|
||||
startDate = moment(date_range[0]);
|
||||
endDate = moment(date_range[1]);
|
||||
} else if (duration === DATE_RANGES.ALL_TIME) {
|
||||
// Fetch the earliest start_date (or created_at if null) from selected projects
|
||||
const minDateQuery = `SELECT MIN(COALESCE(start_date, created_at)) as min_date FROM projects WHERE id IN (${projectIds})`;
|
||||
const minDateResult = await db.query(minDateQuery, []);
|
||||
const minDate = minDateResult.rows[0]?.min_date;
|
||||
startDate = minDate ? moment(minDate) : moment('2000-01-01');
|
||||
endDate = moment();
|
||||
} else {
|
||||
switch (duration) {
|
||||
case DATE_RANGES.YESTERDAY:
|
||||
startDate = moment().subtract(1, "day");
|
||||
endDate = moment().subtract(1, "day");
|
||||
break;
|
||||
case DATE_RANGES.LAST_WEEK:
|
||||
startDate = moment().subtract(1, "week").startOf("isoWeek");
|
||||
endDate = moment().subtract(1, "week").endOf("isoWeek");
|
||||
break;
|
||||
case DATE_RANGES.LAST_MONTH:
|
||||
startDate = moment().subtract(1, "month").startOf("month");
|
||||
endDate = moment().subtract(1, "month").endOf("month");
|
||||
break;
|
||||
case DATE_RANGES.LAST_QUARTER:
|
||||
startDate = moment().subtract(3, "months").startOf("quarter");
|
||||
endDate = moment().subtract(1, "quarter").endOf("quarter");
|
||||
break;
|
||||
default:
|
||||
startDate = moment().startOf("day");
|
||||
endDate = moment().endOf("day");
|
||||
}
|
||||
}
|
||||
|
||||
// Get organization working days
|
||||
const orgWorkingDaysQuery = `
|
||||
SELECT monday, tuesday, wednesday, thursday, friday, saturday, sunday
|
||||
FROM organization_working_days
|
||||
WHERE organization_id IN (
|
||||
SELECT t.organization_id
|
||||
FROM teams t
|
||||
WHERE t.id IN (${teamIds})
|
||||
LIMIT 1
|
||||
);
|
||||
`;
|
||||
const orgWorkingDaysResult = await db.query(orgWorkingDaysQuery, []);
|
||||
const workingDaysConfig = orgWorkingDaysResult.rows[0] || {
|
||||
monday: true,
|
||||
tuesday: true,
|
||||
wednesday: true,
|
||||
thursday: true,
|
||||
friday: true,
|
||||
saturday: false,
|
||||
sunday: false
|
||||
};
|
||||
|
||||
// Count working days based on organization settings
|
||||
let workingDays = 0;
|
||||
let current = startDate.clone();
|
||||
while (current.isSameOrBefore(endDate, 'day')) {
|
||||
const day = current.isoWeekday();
|
||||
if (
|
||||
(day === 1 && workingDaysConfig.monday) ||
|
||||
(day === 2 && workingDaysConfig.tuesday) ||
|
||||
(day === 3 && workingDaysConfig.wednesday) ||
|
||||
(day === 4 && workingDaysConfig.thursday) ||
|
||||
(day === 5 && workingDaysConfig.friday) ||
|
||||
(day === 6 && workingDaysConfig.saturday) ||
|
||||
(day === 7 && workingDaysConfig.sunday)
|
||||
) {
|
||||
workingDays++;
|
||||
}
|
||||
current.add(1, 'day');
|
||||
}
|
||||
|
||||
// Get organization working hours
|
||||
const orgWorkingHoursQuery = `SELECT working_hours FROM organizations WHERE id = (SELECT t.organization_id FROM teams t WHERE t.id IN (${teamIds}) LIMIT 1)`;
|
||||
const orgWorkingHoursResult = await db.query(orgWorkingHoursQuery, []);
|
||||
const orgWorkingHours = orgWorkingHoursResult.rows[0]?.working_hours || 8;
|
||||
let totalWorkingHours = workingDays * orgWorkingHours;
|
||||
|
||||
const durationClause = this.getDateRangeClause(duration || DATE_RANGES.LAST_WEEK, date_range);
|
||||
const archivedClause = archived
|
||||
? ""
|
||||
: `AND p.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = p.id AND user_id = '${req.user?.id}') `;
|
||||
|
||||
const billableQuery = this.buildBillableQuery(billable);
|
||||
|
||||
const q = `
|
||||
SELECT tmiv.email, tmiv.name, SUM(time_spent) AS logged_time
|
||||
FROM team_member_info_view tmiv
|
||||
LEFT JOIN task_work_log ON task_work_log.user_id = tmiv.user_id
|
||||
LEFT JOIN tasks t ON t.id = task_work_log.task_id
|
||||
LEFT JOIN projects p ON p.id = t.project_id AND p.team_id = tmiv.team_id
|
||||
LEFT JOIN tasks ON tasks.id = task_work_log.task_id ${billableQuery}
|
||||
LEFT JOIN projects p ON p.id = tasks.project_id AND p.team_id = tmiv.team_id
|
||||
WHERE p.id IN (${projectIds})
|
||||
${durationClause} ${archivedClause}
|
||||
GROUP BY tmiv.email, tmiv.name
|
||||
@@ -417,12 +514,82 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
for (const member of result.rows) {
|
||||
member.value = member.logged_time ? parseFloat(moment.duration(member.logged_time, "seconds").asHours().toFixed(2)) : 0;
|
||||
member.color_code = getColor(member.name);
|
||||
member.total_working_hours = totalWorkingHours;
|
||||
if (totalWorkingHours === 0) {
|
||||
member.utilization_percent = member.logged_time && parseFloat(member.logged_time) > 0 ? 'N/A' : '0.00';
|
||||
member.utilized_hours = member.logged_time ? (parseFloat(member.logged_time) / 3600).toFixed(2) : '0.00';
|
||||
// Over/under utilized hours: all logged time is over-utilized
|
||||
member.over_under_utilized_hours = member.utilized_hours;
|
||||
} else {
|
||||
member.utilization_percent = (member.logged_time && totalWorkingHours > 0) ? ((parseFloat(member.logged_time) / (totalWorkingHours * 3600)) * 100).toFixed(2) : '0.00';
|
||||
member.utilized_hours = member.logged_time ? (parseFloat(member.logged_time) / 3600).toFixed(2) : '0.00';
|
||||
// Over/under utilized hours: utilized_hours - total_working_hours
|
||||
const overUnder = member.utilized_hours && member.total_working_hours ? (parseFloat(member.utilized_hours) - member.total_working_hours) : 0;
|
||||
member.over_under_utilized_hours = overUnder.toFixed(2);
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async exportTest(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
|
||||
const archived = req.query.archived === "true";
|
||||
const teamId = this.getCurrentTeamId(req);
|
||||
const { duration, date_range } = req.query;
|
||||
|
||||
const durationClause = this.getDateRangeClause(duration as string || DATE_RANGES.LAST_WEEK, date_range as string[]);
|
||||
|
||||
const archivedClause = archived
|
||||
? ""
|
||||
: `AND p.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = p.id AND user_id = '${req.user?.id}') `;
|
||||
|
||||
const q = `
|
||||
SELECT p.id,
|
||||
p.name,
|
||||
(SELECT SUM(time_spent)) AS logged_time,
|
||||
SUM(total_minutes) AS estimated,
|
||||
color_code
|
||||
FROM projects p
|
||||
LEFT JOIN tasks t ON t.project_id = p.id
|
||||
LEFT JOIN task_work_log ON task_work_log.task_id = t.id
|
||||
WHERE in_organization(p.team_id, $1)
|
||||
${durationClause} ${archivedClause}
|
||||
GROUP BY p.id, p.name
|
||||
ORDER BY p.name ASC;`;
|
||||
const result = await db.query(q, [teamId]);
|
||||
|
||||
const labelsX = [];
|
||||
const dataX = [];
|
||||
|
||||
for (const project of result.rows) {
|
||||
project.value = project.logged_time ? parseFloat(moment.duration(project.logged_time, "seconds").asHours().toFixed(2)) : 0;
|
||||
project.estimated_value = project.estimated ? parseFloat(moment.duration(project.estimated, "minutes").asHours().toFixed(2)) : 0;
|
||||
labelsX.push(project.name);
|
||||
dataX.push(project.value || 0);
|
||||
}
|
||||
|
||||
const chart = new ChartJsImage();
|
||||
chart.setConfig({
|
||||
type: "bar",
|
||||
data: {
|
||||
labels: labelsX,
|
||||
datasets: [
|
||||
{ label: "", data: dataX }
|
||||
]
|
||||
},
|
||||
});
|
||||
chart.setWidth(1920).setHeight(1080).setBackgroundColor("transparent");
|
||||
const url = chart.getUrl();
|
||||
chart.toFile("test.png");
|
||||
return res.status(200).send(new ServerResponse(true, url));
|
||||
}
|
||||
|
||||
private static getEstimated(project: any, type: string) {
|
||||
// if (project.estimated_man_days === 0 || project.estimated_working_days === 0) {
|
||||
// return (parseFloat(moment.duration(project.estimated, "minutes").asHours().toFixed(2)) / int(project.hours_per_day)).toFixed(2)
|
||||
// }
|
||||
|
||||
switch (type) {
|
||||
case IToggleOptions.MAN_DAYS:
|
||||
@@ -445,7 +612,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
|
||||
const projects = (req.body.projects || []) as string[];
|
||||
const projectIds = projects.map(p => `'${p}'`).join(",");
|
||||
const { type } = req.body;
|
||||
const { type, billable } = req.body;
|
||||
|
||||
if (!teamIds || !projectIds.length)
|
||||
return res.status(200).send(new ServerResponse(true, { users: [], projects: [] }));
|
||||
@@ -458,6 +625,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
? ""
|
||||
: `AND p.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = p.id AND user_id = '${req.user?.id}') `;
|
||||
|
||||
const billableQuery = this.buildBillableQuery(billable);
|
||||
|
||||
const q = `
|
||||
SELECT p.id,
|
||||
p.name,
|
||||
@@ -471,8 +640,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
WHERE project_id = p.id) AS estimated,
|
||||
color_code
|
||||
FROM projects p
|
||||
LEFT JOIN tasks t ON t.project_id = p.id
|
||||
LEFT JOIN task_work_log ON task_work_log.task_id = t.id
|
||||
LEFT JOIN tasks ON tasks.project_id = p.id ${billableQuery}
|
||||
LEFT JOIN task_work_log ON task_work_log.task_id = tasks.id
|
||||
WHERE p.id IN (${projectIds}) ${durationClause} ${archivedClause}
|
||||
GROUP BY p.id, p.name
|
||||
ORDER BY logged_time DESC;`;
|
||||
@@ -491,7 +660,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
|
||||
project.estimated_working_days = project.estimated_working_days ?? 0;
|
||||
project.hours_per_day = project.hours_per_day ?? 0;
|
||||
|
||||
if (project.value > 0 || project.estimated_value > 0 ) {
|
||||
if (project.value > 0 || project.estimated_value > 0) {
|
||||
data.push(project);
|
||||
}
|
||||
|
||||
|
||||
@@ -109,6 +109,23 @@ export default abstract class ReportingControllerBase extends WorklenzController
|
||||
return "";
|
||||
}
|
||||
|
||||
protected static buildBillableQuery(selectedStatuses: { billable: boolean; nonBillable: boolean }): string {
|
||||
const { billable, nonBillable } = selectedStatuses;
|
||||
|
||||
if (billable && nonBillable) {
|
||||
// Both are enabled, no need to filter
|
||||
return "";
|
||||
} else if (billable) {
|
||||
// Only billable is enabled
|
||||
return " AND tasks.billable IS TRUE";
|
||||
} else if (nonBillable) {
|
||||
// Only non-billable is enabled
|
||||
return " AND tasks.billable IS FALSE";
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
protected static formatEndDate(endDate: string) {
|
||||
const end = moment(endDate).format("YYYY-MM-DD");
|
||||
const fEndDate = moment(end);
|
||||
@@ -173,6 +190,9 @@ export default abstract class ReportingControllerBase extends WorklenzController
|
||||
(SELECT color_code
|
||||
FROM sys_project_healths
|
||||
WHERE sys_project_healths.id = p.health_id) AS health_color,
|
||||
(SELECT name
|
||||
FROM sys_project_healths
|
||||
WHERE sys_project_healths.id = p.health_id) AS health_name,
|
||||
|
||||
pc.id AS category_id,
|
||||
pc.name AS category_name,
|
||||
|
||||
@@ -862,7 +862,7 @@ export default class ReportingMembersController extends ReportingControllerBase
|
||||
}
|
||||
|
||||
|
||||
private static async memberTimeLogsData(durationClause: string, minMaxDateClause: string, team_id: string, team_member_id: string, includeArchived: boolean, userId: string) {
|
||||
private static async memberTimeLogsData(durationClause: string, minMaxDateClause: string, team_id: string, team_member_id: string, includeArchived: boolean, userId: string, billableQuery = "") {
|
||||
|
||||
const archivedClause = includeArchived
|
||||
? ""
|
||||
@@ -884,7 +884,7 @@ export default class ReportingMembersController extends ReportingControllerBase
|
||||
FROM task_work_log twl
|
||||
WHERE twl.user_id = tmiv.user_id
|
||||
${durationClause}
|
||||
AND task_id IN (SELECT id FROM tasks WHERE project_id IN (SELECT id FROM projects WHERE team_id = $1) ${archivedClause} )
|
||||
AND task_id IN (SELECT id FROM tasks WHERE project_id IN (SELECT id FROM projects WHERE team_id = $1) ${archivedClause} ${billableQuery})
|
||||
ORDER BY twl.updated_at DESC) tl) AS time_logs
|
||||
${minMaxDateClause}
|
||||
FROM team_member_info_view tmiv
|
||||
@@ -1017,14 +1017,33 @@ export default class ReportingMembersController extends ReportingControllerBase
|
||||
|
||||
}
|
||||
|
||||
protected static buildBillableQuery(selectedStatuses: { billable: boolean; nonBillable: boolean }): string {
|
||||
const { billable, nonBillable } = selectedStatuses;
|
||||
|
||||
if (billable && nonBillable) {
|
||||
// Both are enabled, no need to filter
|
||||
return "";
|
||||
} else if (billable) {
|
||||
// Only billable is enabled
|
||||
return " AND tasks.billable IS TRUE";
|
||||
} else if (nonBillable) {
|
||||
// Only non-billable is enabled
|
||||
return " AND tasks.billable IS FALSE";
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getMemberTimelogs(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { team_member_id, team_id, duration, date_range, archived } = req.body;
|
||||
const { team_member_id, team_id, duration, date_range, archived, billable } = req.body;
|
||||
|
||||
const durationClause = ReportingMembersController.getDateRangeClauseMembers(duration || DATE_RANGES.LAST_WEEK, date_range, "twl");
|
||||
const minMaxDateClause = this.getMinMaxDates(duration || DATE_RANGES.LAST_WEEK, date_range, "task_work_log");
|
||||
|
||||
const logGroups = await this.memberTimeLogsData(durationClause, minMaxDateClause, team_id, team_member_id, archived, req.user?.id as string);
|
||||
const billableQuery = this.buildBillableQuery(billable);
|
||||
|
||||
const logGroups = await this.memberTimeLogsData(durationClause, minMaxDateClause, team_id, team_member_id, archived, req.user?.id as string, billableQuery);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, logGroups));
|
||||
}
|
||||
@@ -1049,6 +1068,7 @@ export default class ReportingMembersController extends ReportingControllerBase
|
||||
const completedDurationClasue = this.completedDurationFilter(duration as string, dateRange);
|
||||
const overdueClauseByDate = this.getActivityLogsOverdue(duration as string, dateRange);
|
||||
const taskSelectorClause = this.getTaskSelectorClause();
|
||||
const durationFilter = this.memberTasksDurationFilter(duration as string, dateRange);
|
||||
|
||||
const q = `
|
||||
SELECT name AS team_member_name,
|
||||
@@ -1059,6 +1079,12 @@ export default class ReportingMembersController extends ReportingControllerBase
|
||||
LEFT JOIN tasks_assignees ta ON t.id = ta.task_id
|
||||
WHERE ta.team_member_id = $1 ${assignClause} ${archivedClause}) assigned) AS assigned,
|
||||
|
||||
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(assigned))), '[]')
|
||||
FROM (${taskSelectorClause}
|
||||
FROM tasks t
|
||||
LEFT JOIN tasks_assignees ta ON t.id = ta.task_id
|
||||
WHERE ta.team_member_id = $1 ${durationFilter} ${assignClause} ${archivedClause}) assigned) AS total,
|
||||
|
||||
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(completed))), '[]')
|
||||
FROM (${taskSelectorClause}
|
||||
FROM tasks t
|
||||
@@ -1095,6 +1121,11 @@ export default class ReportingMembersController extends ReportingControllerBase
|
||||
const body = {
|
||||
team_member_name: data.team_member_name,
|
||||
groups: [
|
||||
{
|
||||
name: "Total Tasks",
|
||||
color_code: "#7590c9",
|
||||
tasks: data.total ? data.total : 0
|
||||
},
|
||||
{
|
||||
name: "Tasks Assigned",
|
||||
color_code: "#7590c9",
|
||||
@@ -1114,7 +1145,7 @@ export default class ReportingMembersController extends ReportingControllerBase
|
||||
name: "Tasks Ongoing",
|
||||
color_code: "#7cb5ec",
|
||||
tasks: data.ongoing ? data.ongoing : 0
|
||||
}
|
||||
},
|
||||
]
|
||||
};
|
||||
|
||||
|
||||
@@ -0,0 +1,402 @@
|
||||
import db from "../../config/db";
|
||||
import { ParsedQs } from "qs";
|
||||
import HandleExceptions from "../../decorators/handle-exceptions";
|
||||
import { IWorkLenzRequest } from "../../interfaces/worklenz-request";
|
||||
import { IWorkLenzResponse } from "../../interfaces/worklenz-response";
|
||||
import { ServerResponse } from "../../models/server-response";
|
||||
import { TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA, UNMAPPED } from "../../shared/constants";
|
||||
import { getColor } from "../../shared/utils";
|
||||
import moment, { Moment } from "moment";
|
||||
import momentTime from "moment-timezone";
|
||||
import WorklenzControllerBase from "../worklenz-controller-base";
|
||||
|
||||
interface IDateUnions {
|
||||
date_union: {
|
||||
start_date: string | null;
|
||||
end_date: string | null;
|
||||
},
|
||||
logs_date_union: {
|
||||
start_date: string | null;
|
||||
end_date: string | null;
|
||||
},
|
||||
allocated_date_union: {
|
||||
start_date: string | null;
|
||||
end_date: string | null;
|
||||
}
|
||||
}
|
||||
|
||||
interface IDatesPair {
|
||||
start_date: string | null,
|
||||
end_date: string | null
|
||||
}
|
||||
|
||||
export default class ScheduleControllerV2 extends WorklenzControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getSettings(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
// get organization working days
|
||||
const getDataq = `SELECT organization_id, array_agg(initcap(day)) AS working_days
|
||||
FROM (
|
||||
SELECT organization_id,
|
||||
unnest(ARRAY['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']) AS day,
|
||||
unnest(ARRAY[monday, tuesday, wednesday, thursday, friday, saturday, sunday]) AS is_working
|
||||
FROM public.organization_working_days
|
||||
WHERE organization_id IN (
|
||||
SELECT id FROM organizations
|
||||
WHERE user_id = $1
|
||||
)
|
||||
) t
|
||||
WHERE t.is_working
|
||||
GROUP BY organization_id LIMIT 1;`;
|
||||
|
||||
const workingDaysResults = await db.query(getDataq, [req.user?.owner_id]);
|
||||
const [workingDays] = workingDaysResults.rows;
|
||||
|
||||
// get organization working hours
|
||||
const getDataHoursq = `SELECT working_hours FROM organizations WHERE user_id = $1 GROUP BY id LIMIT 1;`;
|
||||
|
||||
const workingHoursResults = await db.query(getDataHoursq, [req.user?.owner_id]);
|
||||
|
||||
const [workingHours] = workingHoursResults.rows;
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, { workingDays: workingDays?.working_days, workingHours: workingHours?.working_hours }));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async updateSettings(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { workingDays, workingHours } = req.body;
|
||||
|
||||
// Days of the week
|
||||
const days = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"];
|
||||
|
||||
// Generate the SET clause dynamically
|
||||
const setClause = days
|
||||
.map(day => `${day.toLowerCase()} = ${workingDays.includes(day)}`)
|
||||
.join(", ");
|
||||
|
||||
const updateQuery = `UPDATE public.organization_working_days
|
||||
SET ${setClause}, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE organization_id IN (SELECT id FROM organizations WHERE user_id = $1);`;
|
||||
|
||||
await db.query(updateQuery, [req.user?.owner_id]);
|
||||
|
||||
const getDataHoursq = `UPDATE organizations SET working_hours = $1 WHERE user_id = $2;`;
|
||||
|
||||
await db.query(getDataHoursq, [workingHours, req.user?.owner_id]);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, {}));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getDates(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
|
||||
const { date, type } = req.params;
|
||||
|
||||
if (type === "week") {
|
||||
const getDataq = `WITH input_date AS (
|
||||
SELECT
|
||||
$1::DATE AS given_date,
|
||||
(SELECT id FROM organizations WHERE user_id=$2 LIMIT 1) AS organization_id
|
||||
),
|
||||
week_range AS (
|
||||
SELECT
|
||||
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7)::DATE AS start_date, -- Current week start date
|
||||
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7 + 6)::DATE AS end_date, -- Current week end date
|
||||
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7 + 7)::DATE AS next_week_start, -- Next week start date
|
||||
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7 + 13)::DATE AS next_week_end, -- Next week end date
|
||||
TO_CHAR(given_date, 'Mon YYYY') AS month_year, -- Format the month as 'Jan 2025'
|
||||
EXTRACT(DAY FROM given_date) AS day_number, -- Extract the day from the date
|
||||
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7)::DATE AS chart_start, -- First week start date
|
||||
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7 + 13)::DATE AS chart_end, -- Second week end date
|
||||
CURRENT_DATE::DATE AS today,
|
||||
organization_id
|
||||
FROM input_date
|
||||
),
|
||||
org_working_days AS (
|
||||
SELECT
|
||||
organization_id,
|
||||
monday, tuesday, wednesday, thursday, friday, saturday, sunday
|
||||
FROM organization_working_days
|
||||
WHERE organization_id = (SELECT organization_id FROM week_range)
|
||||
),
|
||||
days AS (
|
||||
SELECT
|
||||
generate_series((SELECT start_date FROM week_range), (SELECT next_week_end FROM week_range), '1 day'::INTERVAL)::DATE AS date
|
||||
),
|
||||
formatted_days AS (
|
||||
SELECT
|
||||
d.date,
|
||||
TO_CHAR(d.date, 'Dy') AS day_name,
|
||||
EXTRACT(DAY FROM d.date) AS day,
|
||||
TO_CHAR(d.date, 'Mon YYYY') AS month, -- Format the month as 'Jan 2025'
|
||||
CASE
|
||||
WHEN EXTRACT(DOW FROM d.date) = 0 THEN (SELECT sunday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 1 THEN (SELECT monday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 2 THEN (SELECT tuesday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 3 THEN (SELECT wednesday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 4 THEN (SELECT thursday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 5 THEN (SELECT friday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 6 THEN (SELECT saturday FROM org_working_days)
|
||||
END AS is_weekend,
|
||||
CASE WHEN d.date = (SELECT today FROM week_range) THEN TRUE ELSE FALSE END AS is_today
|
||||
FROM days d
|
||||
),
|
||||
aggregated_days AS (
|
||||
SELECT
|
||||
jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'day', day,
|
||||
'month', month, -- Include formatted month
|
||||
'name', day_name,
|
||||
'isWeekend', NOT is_weekend,
|
||||
'isToday', is_today
|
||||
) ORDER BY date
|
||||
) AS days_json
|
||||
FROM formatted_days
|
||||
)
|
||||
SELECT jsonb_build_object(
|
||||
'date_data', jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'month', (SELECT month_year FROM week_range), -- Formatted month-year (e.g., Jan 2025)
|
||||
'day', (SELECT day_number FROM week_range), -- Dynamic day number
|
||||
'weeks', '[]', -- Empty weeks array for now
|
||||
'days', (SELECT days_json FROM aggregated_days) -- Aggregated days data
|
||||
)
|
||||
),
|
||||
'chart_start', (SELECT chart_start FROM week_range), -- First week start date
|
||||
'chart_end', (SELECT chart_end FROM week_range) -- Second week end date
|
||||
) AS result_json;`;
|
||||
|
||||
const results = await db.query(getDataq, [date, req.user?.owner_id]);
|
||||
const [data] = results.rows;
|
||||
return res.status(200).send(new ServerResponse(true, data.result_json));
|
||||
} else if (type === "month") {
|
||||
|
||||
const getDataq = `WITH params AS (
|
||||
SELECT
|
||||
DATE_TRUNC('month', $1::DATE)::DATE AS start_date, -- First day of the month
|
||||
(DATE_TRUNC('month', $1::DATE) + INTERVAL '1 month' - INTERVAL '1 day')::DATE AS end_date, -- Last day of the month
|
||||
CURRENT_DATE::DATE AS today,
|
||||
(SELECT id FROM organizations WHERE user_id = $2 LIMIT 1) AS org_id
|
||||
),
|
||||
days AS (
|
||||
SELECT
|
||||
generate_series(
|
||||
(SELECT start_date FROM params),
|
||||
(SELECT end_date FROM params),
|
||||
'1 day'::INTERVAL
|
||||
)::DATE AS date
|
||||
),
|
||||
org_working_days AS (
|
||||
SELECT
|
||||
monday, tuesday, wednesday, thursday, friday, saturday, sunday
|
||||
FROM organization_working_days
|
||||
WHERE organization_id = (SELECT org_id FROM params)
|
||||
LIMIT 1
|
||||
),
|
||||
formatted_days AS (
|
||||
SELECT
|
||||
d.date,
|
||||
TO_CHAR(d.date, 'Dy') AS day_name,
|
||||
EXTRACT(DAY FROM d.date) AS day,
|
||||
-- Dynamically check if the day is a weekend based on the organization's settings
|
||||
CASE
|
||||
WHEN EXTRACT(DOW FROM d.date) = 0 THEN NOT (SELECT sunday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 1 THEN NOT (SELECT monday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 2 THEN NOT (SELECT tuesday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 3 THEN NOT (SELECT wednesday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 4 THEN NOT (SELECT thursday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 5 THEN NOT (SELECT friday FROM org_working_days)
|
||||
WHEN EXTRACT(DOW FROM d.date) = 6 THEN NOT (SELECT saturday FROM org_working_days)
|
||||
END AS is_weekend,
|
||||
CASE WHEN d.date = (SELECT today FROM params) THEN TRUE ELSE FALSE END AS is_today
|
||||
FROM days d
|
||||
),
|
||||
grouped_by_month AS (
|
||||
SELECT
|
||||
TO_CHAR(date, 'Mon YYYY') AS month_name,
|
||||
jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'day', day,
|
||||
'name', day_name,
|
||||
'isWeekend', is_weekend,
|
||||
'isToday', is_today
|
||||
) ORDER BY date
|
||||
) AS days
|
||||
FROM formatted_days
|
||||
GROUP BY month_name
|
||||
)
|
||||
SELECT jsonb_build_object(
|
||||
'date_data', jsonb_agg(
|
||||
jsonb_build_object(
|
||||
'month', month_name,
|
||||
'weeks', '[]'::JSONB, -- Placeholder for weeks data
|
||||
'days', days
|
||||
) ORDER BY month_name
|
||||
),
|
||||
'chart_start', (SELECT start_date FROM params),
|
||||
'chart_end', (SELECT end_date FROM params)
|
||||
) AS result_json
|
||||
FROM grouped_by_month;`;
|
||||
|
||||
const results = await db.query(getDataq, [date, req.user?.owner_id]);
|
||||
const [data] = results.rows;
|
||||
return res.status(200).send(new ServerResponse(true, data.result_json));
|
||||
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, []));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getOrganizationMembers(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
|
||||
const getDataq = `SELECT DISTINCT ON (users.email)
|
||||
team_members.id AS team_member_id,
|
||||
users.id AS id,
|
||||
users.name AS name,
|
||||
users.email AS email,
|
||||
'[]'::JSONB AS projects
|
||||
FROM team_members
|
||||
INNER JOIN users ON users.id = team_members.user_id
|
||||
WHERE team_members.team_id IN (
|
||||
SELECT id FROM teams
|
||||
WHERE organization_id IN (
|
||||
SELECT id FROM organizations
|
||||
WHERE user_id = $1
|
||||
LIMIT 1
|
||||
)
|
||||
)
|
||||
ORDER BY users.email ASC, users.name ASC;`;
|
||||
|
||||
const results = await db.query(getDataq, [req.user?.owner_id]);
|
||||
return res.status(200).send(new ServerResponse(true, results.rows));
|
||||
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getOrganizationMemberProjects(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
|
||||
const { id } = req.params;
|
||||
|
||||
const getDataq = `WITH project_dates AS (
|
||||
SELECT
|
||||
pm.project_id,
|
||||
MIN(pm.allocated_from) AS start_date,
|
||||
MAX(pm.allocated_to) AS end_date,
|
||||
MAX(pm.seconds_per_day) / 3600 AS hours_per_day, -- Convert max seconds per day to hours per day
|
||||
(
|
||||
-- Calculate total working days between start and end dates
|
||||
SELECT COUNT(*)
|
||||
FROM generate_series(MIN(pm.allocated_from), MAX(pm.allocated_to), '1 day'::interval) AS day
|
||||
JOIN public.organization_working_days owd ON owd.organization_id = t.organization_id
|
||||
WHERE
|
||||
(EXTRACT(ISODOW FROM day) = 1 AND owd.monday = true) OR
|
||||
(EXTRACT(ISODOW FROM day) = 2 AND owd.tuesday = true) OR
|
||||
(EXTRACT(ISODOW FROM day) = 3 AND owd.wednesday = true) OR
|
||||
(EXTRACT(ISODOW FROM day) = 4 AND owd.thursday = true) OR
|
||||
(EXTRACT(ISODOW FROM day) = 5 AND owd.friday = true) OR
|
||||
(EXTRACT(ISODOW FROM day) = 6 AND owd.saturday = true) OR
|
||||
(EXTRACT(ISODOW FROM day) = 7 AND owd.sunday = true)
|
||||
) * (MAX(pm.seconds_per_day) / 3600) AS total_hours -- Multiply by hours per day
|
||||
FROM public.project_member_allocations pm
|
||||
JOIN public.projects p ON pm.project_id = p.id
|
||||
JOIN public.teams t ON p.team_id = t.id
|
||||
GROUP BY pm.project_id, t.organization_id
|
||||
),
|
||||
projects_with_offsets AS (
|
||||
SELECT
|
||||
p.name AS project_name,
|
||||
p.id AS project_id,
|
||||
COALESCE(pd.hours_per_day, 0) AS hours_per_day, -- Default to 8 if not available in project_member_allocations
|
||||
COALESCE(pd.total_hours, 0) AS total_hours, -- Calculated total hours based on working days
|
||||
pd.start_date,
|
||||
pd.end_date,
|
||||
p.team_id,
|
||||
tm.user_id,
|
||||
-- Calculate indicator_offset dynamically: days difference from earliest project start date * 75px
|
||||
COALESCE(
|
||||
(DATE_PART('day', pd.start_date - MIN(pd.start_date) OVER ())) * 75,
|
||||
0
|
||||
) AS indicator_offset,
|
||||
-- Calculate indicator_width as the number of days * 75 pixels per day
|
||||
COALESCE((DATE_PART('day', pd.end_date - pd.start_date) + 1) * 75, 75) AS indicator_width, -- Fallback to 75 if no dates exist
|
||||
75 AS min_width -- 75px minimum width for a 1-day project
|
||||
FROM public.projects p
|
||||
LEFT JOIN project_dates pd ON p.id = pd.project_id
|
||||
JOIN public.team_members tm ON tm.team_id = p.team_id
|
||||
JOIN public.teams t ON p.team_id = t.id
|
||||
WHERE tm.user_id = $2
|
||||
AND tm.team_id = $1
|
||||
ORDER BY pd.start_date, pd.end_date -- Order by start and end date
|
||||
)
|
||||
SELECT jsonb_agg(jsonb_build_object(
|
||||
'name', project_name,
|
||||
'id', project_id,
|
||||
'hours_per_day', hours_per_day,
|
||||
'total_hours', total_hours,
|
||||
'date_union', jsonb_build_object(
|
||||
'start', start_date::DATE,
|
||||
'end', end_date::DATE
|
||||
),
|
||||
'indicator_offset', indicator_offset,
|
||||
'indicator_width', indicator_width,
|
||||
'tasks', '[]'::jsonb, -- Empty tasks array for now,
|
||||
'default_values', jsonb_build_object(
|
||||
'allocated_from', start_date::DATE,
|
||||
'allocated_to', end_date::DATE,
|
||||
'seconds_per_day', hours_per_day,
|
||||
'total_seconds', total_hours
|
||||
)
|
||||
)) AS projects
|
||||
FROM projects_with_offsets;`;
|
||||
|
||||
const results = await db.query(getDataq, [req.user?.team_id, id]);
|
||||
const [data] = results.rows;
|
||||
return res.status(200).send(new ServerResponse(true, { projects: data.projects, id }));
|
||||
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async createSchedule(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
|
||||
const { allocated_from, allocated_to, project_id, team_member_id, seconds_per_day } = req.body;
|
||||
|
||||
const fromFormat = moment(allocated_from).format("YYYY-MM-DD");
|
||||
const toFormat = moment(allocated_to).format("YYYY-MM-DD");
|
||||
|
||||
const getDataq1 = `
|
||||
SELECT id
|
||||
FROM project_member_allocations
|
||||
WHERE project_id = $1
|
||||
AND team_member_id = $2
|
||||
AND (
|
||||
-- Case 1: The given range starts inside an existing range
|
||||
($3 BETWEEN allocated_from AND allocated_to)
|
||||
OR
|
||||
-- Case 2: The given range ends inside an existing range
|
||||
($4 BETWEEN allocated_from AND allocated_to)
|
||||
OR
|
||||
-- Case 3: The given range fully covers an existing range
|
||||
(allocated_from BETWEEN $3 AND $4 AND allocated_to BETWEEN $3 AND $4)
|
||||
OR
|
||||
-- Case 4: The existing range fully covers the given range
|
||||
(allocated_from <= $3 AND allocated_to >= $4)
|
||||
);`;
|
||||
|
||||
const results1 = await db.query(getDataq1, [project_id, team_member_id, fromFormat, toFormat]);
|
||||
|
||||
const [data] = results1.rows;
|
||||
if (data) {
|
||||
return res.status(200).send(new ServerResponse(false, null, "Allocation already exists!"));
|
||||
}
|
||||
|
||||
const getDataq = `INSERT INTO public.project_member_allocations(
|
||||
project_id, team_member_id, allocated_from, allocated_to, seconds_per_day)
|
||||
VALUES ($1, $2, $3, $4, $5);`;
|
||||
|
||||
const results = await db.query(getDataq, [project_id, team_member_id, allocated_from, allocated_to, Number(seconds_per_day) * 60 * 60]);
|
||||
return res.status(200).send(new ServerResponse(true, null, "Allocated successfully!"));
|
||||
|
||||
}
|
||||
}
|
||||
@@ -52,6 +52,83 @@ export default class ScheduleControllerV2 extends ScheduleTasksControllerBase {
|
||||
private static GLOBAL_START_DATE = moment().format("YYYY-MM-DD");
|
||||
private static GLOBAL_END_DATE = moment().format("YYYY-MM-DD");
|
||||
|
||||
// Migrate data
|
||||
@HandleExceptions()
|
||||
public static async migrate(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const getDataq = `SELECT p.id,
|
||||
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec))), '[]'::JSON)
|
||||
FROM (SELECT tmiv.team_member_id,
|
||||
tmiv.user_id,
|
||||
|
||||
LEAST(
|
||||
(SELECT MIN(LEAST(start_date, end_date)) AS start_date
|
||||
FROM tasks
|
||||
INNER JOIN tasks_assignees ta ON tasks.id = ta.task_id
|
||||
WHERE archived IS FALSE
|
||||
AND project_id = p.id
|
||||
AND ta.team_member_id = tmiv.team_member_id),
|
||||
(SELECT MIN(twl.created_at - INTERVAL '1 second' * twl.time_spent) AS ll_start_date
|
||||
FROM task_work_log twl
|
||||
INNER JOIN tasks t ON twl.task_id = t.id AND t.archived IS FALSE
|
||||
WHERE t.project_id = p.id
|
||||
AND twl.user_id = tmiv.user_id)
|
||||
) AS lowest_date,
|
||||
|
||||
GREATEST(
|
||||
(SELECT MAX(GREATEST(start_date, end_date)) AS end_date
|
||||
FROM tasks
|
||||
INNER JOIN tasks_assignees ta ON tasks.id = ta.task_id
|
||||
WHERE archived IS FALSE
|
||||
AND project_id = p.id
|
||||
AND ta.team_member_id = tmiv.team_member_id),
|
||||
(SELECT MAX(twl.created_at - INTERVAL '1 second' * twl.time_spent) AS ll_end_date
|
||||
FROM task_work_log twl
|
||||
INNER JOIN tasks t ON twl.task_id = t.id AND t.archived IS FALSE
|
||||
WHERE t.project_id = p.id
|
||||
AND twl.user_id = tmiv.user_id)
|
||||
) AS greatest_date
|
||||
|
||||
FROM project_members pm
|
||||
INNER JOIN team_member_info_view tmiv
|
||||
ON pm.team_member_id = tmiv.team_member_id
|
||||
WHERE project_id = p.id) rec) AS members
|
||||
|
||||
FROM projects p
|
||||
WHERE team_id IS NOT NULL
|
||||
AND p.id NOT IN (SELECT project_id FROM archived_projects)`;
|
||||
|
||||
const projectMembersResults = await db.query(getDataq);
|
||||
|
||||
const projectMemberData = projectMembersResults.rows;
|
||||
|
||||
const arrayToInsert = [];
|
||||
|
||||
for (const data of projectMemberData) {
|
||||
if (data.members.length) {
|
||||
for (const member of data.members) {
|
||||
|
||||
const body = {
|
||||
project_id: data.id,
|
||||
team_member_id: member.team_member_id,
|
||||
allocated_from: member.lowest_date ? member.lowest_date : null,
|
||||
allocated_to: member.greatest_date ? member.greatest_date : null
|
||||
};
|
||||
|
||||
if (body.allocated_from && body.allocated_to) arrayToInsert.push(body);
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const insertArray = JSON.stringify(arrayToInsert);
|
||||
|
||||
const insertFunctionCall = `SELECT migrate_member_allocations($1)`;
|
||||
await db.query(insertFunctionCall, [insertArray]);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, ""));
|
||||
}
|
||||
|
||||
|
||||
private static async getFirstLastDates(teamId: string, userId: string) {
|
||||
const q = `SELECT MIN(LEAST(allocated_from, allocated_to)) AS start_date,
|
||||
MAX(GREATEST(allocated_from, allocated_to)) AS end_date,
|
||||
|
||||
@@ -5,7 +5,7 @@ import {IWorkLenzResponse} from "../interfaces/worklenz-response";
|
||||
|
||||
import db from "../config/db";
|
||||
import {ServerResponse} from "../models/server-response";
|
||||
import {PriorityColorCodes, TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA} from "../shared/constants";
|
||||
import {PriorityColorCodes, PriorityColorCodesDark, TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA} from "../shared/constants";
|
||||
import {getColor} from "../shared/utils";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
@@ -33,6 +33,7 @@ export default class SubTasksController extends WorklenzControllerBase {
|
||||
(ts.name) AS status_name,
|
||||
TRUE AS is_sub_task,
|
||||
(tsc.color_code) AS status_color,
|
||||
(tsc.color_code_dark) AS status_color_dark,
|
||||
(SELECT name FROM projects WHERE id = t.project_id) AS project_name,
|
||||
(SELECT value FROM task_priorities WHERE id = t.priority_id) AS priority_value,
|
||||
total_minutes,
|
||||
@@ -46,11 +47,12 @@ export default class SubTasksController extends WorklenzControllerBase {
|
||||
WHERE task_id = t.id
|
||||
ORDER BY name) r) AS labels,
|
||||
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec))), '[]'::JSON)
|
||||
FROM (SELECT task_statuses.id, task_statuses.name, stsc.color_code
|
||||
FROM (SELECT task_statuses.id, task_statuses.name, stsc.color_code, stsc.color_code_dark
|
||||
FROM task_statuses
|
||||
INNER JOIN sys_task_status_categories stsc ON task_statuses.category_id = stsc.id
|
||||
WHERE project_id = t.project_id
|
||||
ORDER BY task_statuses.name) rec) AS statuses
|
||||
ORDER BY task_statuses.name) rec) AS statuses,
|
||||
t.completed_at
|
||||
FROM tasks t
|
||||
INNER JOIN task_statuses ts ON ts.id = t.status_id
|
||||
INNER JOIN task_priorities tp ON tp.id = t.priority_id
|
||||
@@ -62,6 +64,7 @@ export default class SubTasksController extends WorklenzControllerBase {
|
||||
|
||||
for (const task of result.rows) {
|
||||
task.priority_color = PriorityColorCodes[task.priority_value] || null;
|
||||
task.priority_color_dark = PriorityColorCodesDark[task.priority_value] || null;
|
||||
|
||||
task.time_spent = {hours: Math.floor(task.total_minutes_spent / 60), minutes: task.total_minutes_spent % 60};
|
||||
task.time_spent_string = `${task.time_spent.hours}h ${task.time_spent.minutes}m`;
|
||||
@@ -72,6 +75,7 @@ export default class SubTasksController extends WorklenzControllerBase {
|
||||
task.labels = this.createTagList(task.labels, 2);
|
||||
|
||||
task.status_color = task.status_color + TASK_STATUS_COLOR_ALPHA;
|
||||
task.status_color_dark = task.status_color_dark + TASK_STATUS_COLOR_ALPHA;
|
||||
task.priority_color = task.priority_color + TASK_PRIORITY_COLOR_ALPHA;
|
||||
}
|
||||
|
||||
|
||||
@@ -6,11 +6,13 @@ import { ServerResponse } from "../models/server-response";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import { NotificationsService } from "../services/notifications/notifications.service";
|
||||
import { log_error } from "../shared/utils";
|
||||
import { HTML_TAG_REGEXP } from "../shared/constants";
|
||||
import { humanFileSize, log_error, megabytesToBytes } from "../shared/utils";
|
||||
import { HTML_TAG_REGEXP, S3_URL } from "../shared/constants";
|
||||
import { getBaseUrl } from "../cron_jobs/helpers";
|
||||
import { ICommentEmailNotification } from "../interfaces/comment-email-notification";
|
||||
import { sendTaskComment } from "../shared/email-notifications";
|
||||
import { getRootDir, uploadBase64, getKey, getTaskAttachmentKey, createPresignedUrlWithClient } from "../shared/s3";
|
||||
import { getFreePlanSettings, getUsedStorage } from "../shared/paddle-utils";
|
||||
|
||||
interface ITaskAssignee {
|
||||
team_member_id: string;
|
||||
@@ -99,11 +101,134 @@ export default class TaskCommentsController extends WorklenzControllerBase {
|
||||
public static async create(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
req.body.user_id = req.user?.id;
|
||||
req.body.team_id = req.user?.team_id;
|
||||
const {mentions} = req.body;
|
||||
const { mentions, attachments, task_id } = req.body;
|
||||
const url = `${S3_URL}/${getRootDir()}`;
|
||||
|
||||
let commentContent = req.body.content;
|
||||
if (mentions.length > 0) {
|
||||
commentContent = await this.replaceContent(commentContent, mentions);
|
||||
commentContent = this.replaceContent(commentContent, mentions);
|
||||
}
|
||||
|
||||
req.body.content = commentContent;
|
||||
|
||||
const q = `SELECT create_task_comment($1) AS comment;`;
|
||||
const result = await db.query(q, [JSON.stringify(req.body)]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const response = data.comment;
|
||||
|
||||
const commentId = response.id;
|
||||
|
||||
if (attachments.length !== 0) {
|
||||
for (const attachment of attachments) {
|
||||
const q = `
|
||||
INSERT INTO task_comment_attachments (name, type, size, task_id, comment_id, team_id, project_id)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING id, name, type, task_id, comment_id, created_at,
|
||||
CONCAT($8::TEXT, '/', team_id, '/', project_id, '/', task_id, '/', comment_id, '/', id, '.', type) AS url;
|
||||
`;
|
||||
|
||||
const result = await db.query(q, [
|
||||
attachment.file_name,
|
||||
attachment.file_name.split(".").pop(),
|
||||
attachment.size,
|
||||
task_id,
|
||||
commentId,
|
||||
req.user?.team_id,
|
||||
attachment.project_id,
|
||||
url
|
||||
]);
|
||||
|
||||
const [data] = result.rows;
|
||||
|
||||
const s3Url = await uploadBase64(attachment.file, getTaskAttachmentKey(req.user?.team_id as string, attachment.project_id, task_id, commentId, data.id, data.type));
|
||||
|
||||
if (!data?.id || !s3Url)
|
||||
return res.status(200).send(new ServerResponse(false, null, "Attachment upload failed"));
|
||||
}
|
||||
}
|
||||
|
||||
const mentionMessage = `<b>${req.user?.name}</b> has mentioned you in a comment on <b>${response.task_name}</b> (${response.team_name})`;
|
||||
// const mentions = [...new Set(req.body.mentions || [])] as string[]; // remove duplicates
|
||||
|
||||
const assignees = await getAssignees(req.body.task_id);
|
||||
|
||||
const commentMessage = `<b>${req.user?.name}</b> added a comment on <b>${response.task_name}</b> (${response.team_name})`;
|
||||
for (const member of assignees || []) {
|
||||
if (member.user_id && member.user_id === req.user?.id) continue;
|
||||
|
||||
void NotificationsService.createNotification({
|
||||
userId: member.user_id,
|
||||
teamId: req.user?.team_id as string,
|
||||
socketId: member.socket_id,
|
||||
message: commentMessage,
|
||||
taskId: req.body.task_id,
|
||||
projectId: response.project_id
|
||||
});
|
||||
|
||||
if (member.email_notifications_enabled)
|
||||
await this.sendMail({
|
||||
message: commentMessage,
|
||||
receiverEmail: member.email,
|
||||
receiverName: member.name,
|
||||
content: req.body.content,
|
||||
commentId: response.id,
|
||||
projectId: response.project_id,
|
||||
taskId: req.body.task_id,
|
||||
teamName: response.team_name,
|
||||
projectName: response.project_name,
|
||||
taskName: response.task_name
|
||||
});
|
||||
}
|
||||
|
||||
const senderUserId = req.user?.id as string;
|
||||
|
||||
for (const mention of mentions) {
|
||||
if (mention) {
|
||||
const member = await this.getUserDataByTeamMemberId(senderUserId, mention.team_member_id, response.project_id);
|
||||
if (member) {
|
||||
|
||||
NotificationsService.sendNotification({
|
||||
team: member.team,
|
||||
receiver_socket_id: member.socket_id,
|
||||
message: mentionMessage,
|
||||
task_id: req.body.task_id,
|
||||
project_id: response.project_id,
|
||||
project: member.project,
|
||||
project_color: member.project_color,
|
||||
team_id: req.user?.team_id as string
|
||||
});
|
||||
|
||||
if (member.email_notifications_enabled)
|
||||
await this.sendMail({
|
||||
message: mentionMessage,
|
||||
receiverEmail: member.email,
|
||||
receiverName: member.user_name,
|
||||
content: req.body.content,
|
||||
commentId: response.id,
|
||||
projectId: response.project_id,
|
||||
taskId: req.body.task_id,
|
||||
teamName: response.team_name,
|
||||
projectName: response.project_name,
|
||||
taskName: response.task_name
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data.comment));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async update(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
req.body.user_id = req.user?.id;
|
||||
req.body.team_id = req.user?.team_id;
|
||||
const { mentions, comment_id } = req.body;
|
||||
|
||||
let commentContent = req.body.content;
|
||||
if (mentions.length > 0) {
|
||||
commentContent = await this.replaceContent(commentContent, mentions);
|
||||
}
|
||||
|
||||
req.body.content = commentContent;
|
||||
@@ -210,46 +335,90 @@ export default class TaskCommentsController extends WorklenzControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getByTaskId(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `
|
||||
SELECT task_comments.id,
|
||||
tc.text_content AS content,
|
||||
task_comments.user_id,
|
||||
task_comments.team_member_id,
|
||||
(SELECT name FROM team_member_info_view WHERE team_member_info_view.team_member_id = tm.id) AS member_name,
|
||||
u.avatar_url,
|
||||
task_comments.created_at,
|
||||
(SELECT COALESCE(JSON_AGG(rec), '[]'::JSON)
|
||||
FROM (SELECT tmiv.name AS user_name,
|
||||
tmiv.email AS user_email
|
||||
FROM task_comment_mentions tcm
|
||||
LEFT JOIN team_member_info_view tmiv ON tcm.informed_by = tmiv.team_member_id
|
||||
WHERE tcm.comment_id = task_comments.id) rec) AS mentions
|
||||
FROM task_comments
|
||||
INNER JOIN task_comment_contents tc ON task_comments.id = tc.comment_id
|
||||
INNER JOIN team_members tm ON task_comments.team_member_id = tm.id
|
||||
LEFT JOIN users u ON tm.user_id = u.id
|
||||
WHERE task_comments.task_id = $1
|
||||
ORDER BY task_comments.created_at DESC;
|
||||
`;
|
||||
const result = await db.query(q, [req.params.id]); // task id
|
||||
const result = await TaskCommentsController.getTaskComments(req.params.id); // task id
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
private static async getTaskComments(taskId: string) {
|
||||
const url = `${S3_URL}/${getRootDir()}`;
|
||||
|
||||
const q = `SELECT task_comments.id,
|
||||
tc.text_content AS content,
|
||||
task_comments.user_id,
|
||||
task_comments.team_member_id,
|
||||
(SELECT name FROM team_member_info_view WHERE team_member_info_view.team_member_id = tm.id) AS member_name,
|
||||
u.avatar_url,
|
||||
task_comments.created_at,
|
||||
(SELECT COALESCE(JSON_AGG(rec), '[]'::JSON)
|
||||
FROM (SELECT tmiv.name AS user_name,
|
||||
tmiv.email AS user_email
|
||||
FROM task_comment_mentions tcm
|
||||
LEFT JOIN team_member_info_view tmiv ON tcm.informed_by = tmiv.team_member_id
|
||||
WHERE tcm.comment_id = task_comments.id) rec) AS mentions,
|
||||
(SELECT JSON_BUILD_OBJECT(
|
||||
'likes',
|
||||
JSON_BUILD_OBJECT(
|
||||
'count', (SELECT COUNT(*)
|
||||
FROM task_comment_reactions tcr
|
||||
WHERE tcr.comment_id = task_comments.id
|
||||
AND reaction_type = 'like'),
|
||||
'liked_members', COALESCE(
|
||||
(SELECT JSON_AGG(tmiv.name)
|
||||
FROM task_comment_reactions tcr
|
||||
JOIN team_member_info_view tmiv ON tcr.team_member_id = tmiv.team_member_id
|
||||
WHERE tcr.comment_id = task_comments.id
|
||||
AND tcr.reaction_type = 'like'),
|
||||
'[]'::JSON
|
||||
),
|
||||
'liked_member_ids', COALESCE(
|
||||
(SELECT JSON_AGG(tmiv.team_member_id)
|
||||
FROM task_comment_reactions tcr
|
||||
JOIN team_member_info_view tmiv ON tcr.team_member_id = tmiv.team_member_id
|
||||
WHERE tcr.comment_id = task_comments.id
|
||||
AND tcr.reaction_type = 'like'),
|
||||
'[]'::JSON
|
||||
)
|
||||
)
|
||||
)) AS reactions,
|
||||
(SELECT COALESCE(JSON_AGG(rec), '[]'::JSON)
|
||||
FROM (SELECT id, created_at, name, size, type, (CONCAT('/', team_id, '/', project_id, '/', task_id, '/', comment_id, '/', id, '.', type)) AS url
|
||||
FROM task_comment_attachments tca
|
||||
WHERE tca.comment_id = task_comments.id) rec) AS attachments
|
||||
FROM task_comments
|
||||
LEFT JOIN task_comment_contents tc ON task_comments.id = tc.comment_id
|
||||
INNER JOIN team_members tm ON task_comments.team_member_id = tm.id
|
||||
LEFT JOIN users u ON tm.user_id = u.id
|
||||
WHERE task_comments.task_id = $1
|
||||
ORDER BY task_comments.created_at;`;
|
||||
const result = await db.query(q, [taskId]); // task id
|
||||
|
||||
for (const comment of result.rows) {
|
||||
if (!comment.content) comment.content = "";
|
||||
comment.rawContent = await comment.content;
|
||||
comment.content = await comment.content.replace(/\n/g, "</br>");
|
||||
const {mentions} = comment;
|
||||
comment.edit = false;
|
||||
const { mentions } = comment;
|
||||
if (mentions.length > 0) {
|
||||
const placeHolders = comment.content.match(/{\d+}/g);
|
||||
if (placeHolders) {
|
||||
placeHolders.forEach((placeHolder: { match: (arg0: RegExp) => string[]; }) => {
|
||||
const index = parseInt(placeHolder.match(/\d+/)[0]);
|
||||
if (index >= 0 && index < comment.mentions.length) {
|
||||
comment.content = comment.content.replace(placeHolder, `<span class="mentions"> @${comment.mentions[index].user_name} </span>`);
|
||||
}
|
||||
const index = parseInt(placeHolder.match(/\d+/)[0]);
|
||||
if (index >= 0 && index < comment.mentions.length) {
|
||||
comment.rawContent = comment.rawContent.replace(placeHolder, `@${comment.mentions[index].user_name}`);
|
||||
comment.content = comment.content.replace(placeHolder, `<span class="mentions"> @${comment.mentions[index].user_name} </span>`);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const attachment of comment.attachments) {
|
||||
attachment.size = humanFileSize(attachment.size);
|
||||
attachment.url = url + attachment.url;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
return result;
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
@@ -262,4 +431,186 @@ export default class TaskCommentsController extends WorklenzControllerBase {
|
||||
const result = await db.query(q, [req.params.id, req.params.taskId, req.user?.id || null]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async deleteAttachmentById(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `DELETE
|
||||
FROM task_comment_attachments
|
||||
WHERE id = $1;`;
|
||||
const result = await db.query(q, [req.params.id]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
private static async checkIfAlreadyExists(commentId: string, teamMemberId: string | undefined, reaction_type: string) {
|
||||
if (!teamMemberId) return;
|
||||
try {
|
||||
const q = `SELECT EXISTS(SELECT 1 FROM task_comment_reactions WHERE comment_id = $1 AND team_member_id = $2 AND reaction_type = $3)`;
|
||||
const result = await db.query(q, [commentId, teamMemberId, reaction_type]);
|
||||
const [data] = result.rows;
|
||||
return data.exists;
|
||||
} catch (error) {
|
||||
log_error(error);
|
||||
}
|
||||
}
|
||||
|
||||
private static async getTaskCommentData(commentId: string) {
|
||||
if (!commentId) return;
|
||||
try {
|
||||
const q = `SELECT tc.user_id,
|
||||
t.project_id,
|
||||
t.name AS task_name,
|
||||
(SELECT team_id FROM projects p WHERE p.id = t.project_id) AS team_id,
|
||||
(SELECT name FROM teams te WHERE id = (SELECT team_id FROM projects p WHERE p.id = t.project_id)) AS team_name,
|
||||
(SELECT u.socket_id FROM users u WHERE u.id = tc.user_id) AS socket_id,
|
||||
(SELECT name FROM team_member_info_view tmiv WHERE tmiv.team_member_id = tcr.team_member_id) AS reactor_name
|
||||
FROM task_comments tc
|
||||
LEFT JOIN tasks t ON t.id = tc.task_id
|
||||
LEFT JOIN task_comment_reactions tcr ON tc.id = tcr.comment_id
|
||||
WHERE tc.id = $1;`;
|
||||
const result = await db.query(q, [commentId]);
|
||||
const [data] = result.rows;
|
||||
return data;
|
||||
} catch (error) {
|
||||
log_error(error);
|
||||
}
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async updateReaction(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { id } = req.params;
|
||||
const { reaction_type, task_id } = req.query;
|
||||
|
||||
const exists = await this.checkIfAlreadyExists(id, req.user?.team_member_id, reaction_type as string);
|
||||
|
||||
if (exists) {
|
||||
const deleteQ = `DELETE FROM task_comment_reactions WHERE comment_id = $1 AND team_member_id = $2;`;
|
||||
await db.query(deleteQ, [id, req.user?.team_member_id]);
|
||||
} else {
|
||||
const q = `INSERT INTO task_comment_reactions (comment_id, user_id, team_member_id) VALUES ($1, $2, $3);`;
|
||||
await db.query(q, [id, req.user?.id, req.user?.team_member_id]);
|
||||
|
||||
const getTaskCommentData = await TaskCommentsController.getTaskCommentData(id);
|
||||
const commentMessage = `<b>${getTaskCommentData.reactor_name}</b> liked your comment on <b>${getTaskCommentData.task_name}</b> (${getTaskCommentData.team_name})`;
|
||||
|
||||
if (getTaskCommentData && getTaskCommentData.user_id !== req.user?.id) {
|
||||
void NotificationsService.createNotification({
|
||||
userId: getTaskCommentData.user_id,
|
||||
teamId: req.user?.team_id as string,
|
||||
socketId: getTaskCommentData.socket_id,
|
||||
message: commentMessage,
|
||||
taskId: req.body.task_id,
|
||||
projectId: getTaskCommentData.project_id
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const result = await TaskCommentsController.getTaskComments(task_id as string);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async createAttachment(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
req.body.user_id = req.user?.id;
|
||||
req.body.team_id = req.user?.team_id;
|
||||
const { attachments, task_id } = req.body;
|
||||
|
||||
const q = `INSERT INTO task_comments (user_id, team_member_id, task_id)
|
||||
VALUES ($1, (SELECT id
|
||||
FROM team_members
|
||||
WHERE user_id = $1
|
||||
AND team_id = $2::UUID), $3)
|
||||
RETURNING id;`;
|
||||
const result = await db.query(q, [req.user?.id, req.user?.team_id, task_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const commentId = data.id;
|
||||
|
||||
const url = `${S3_URL}/${getRootDir()}`;
|
||||
|
||||
for (const attachment of attachments) {
|
||||
if (req.user?.subscription_status === "free" && req.user?.owner_id) {
|
||||
const limits = await getFreePlanSettings();
|
||||
|
||||
const usedStorage = await getUsedStorage(req.user?.owner_id);
|
||||
if ((parseInt(usedStorage) + attachment.size) > megabytesToBytes(parseInt(limits.free_tier_storage))) {
|
||||
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot exceed ${limits.free_tier_storage}MB of storage.`));
|
||||
}
|
||||
}
|
||||
|
||||
const q = `
|
||||
INSERT INTO task_comment_attachments (name, type, size, task_id, comment_id, team_id, project_id)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING id, name, type, task_id, comment_id, created_at,
|
||||
CONCAT($8::TEXT, '/', team_id, '/', project_id, '/', task_id, '/', comment_id, '/', id, '.', type) AS url;
|
||||
`;
|
||||
|
||||
const result = await db.query(q, [
|
||||
attachment.file_name,
|
||||
attachment.size,
|
||||
attachment.file_name.split(".").pop(),
|
||||
task_id,
|
||||
commentId,
|
||||
req.user?.team_id,
|
||||
attachment.project_id,
|
||||
url
|
||||
]);
|
||||
|
||||
const [data] = result.rows;
|
||||
|
||||
const s3Url = await uploadBase64(attachment.file, getTaskAttachmentKey(req.user?.team_id as string, attachment.project_id, task_id, commentId, data.id, data.type));
|
||||
|
||||
if (!data?.id || !s3Url)
|
||||
return res.status(200).send(new ServerResponse(false, null, "Attachment upload failed"));
|
||||
}
|
||||
|
||||
const assignees = await getAssignees(task_id);
|
||||
|
||||
const commentMessage = `<b>${req.user?.name}</b> added a new attachment as a comment on <b>${commentId.task_name}</b> (${commentId.team_name})`;
|
||||
|
||||
for (const member of assignees || []) {
|
||||
if (member.user_id && member.user_id === req.user?.id) continue;
|
||||
|
||||
void NotificationsService.createNotification({
|
||||
userId: member.user_id,
|
||||
teamId: req.user?.team_id as string,
|
||||
socketId: member.socket_id,
|
||||
message: commentMessage,
|
||||
taskId: task_id,
|
||||
projectId: commentId.project_id
|
||||
});
|
||||
|
||||
if (member.email_notifications_enabled)
|
||||
await this.sendMail({
|
||||
message: commentMessage,
|
||||
receiverEmail: member.email,
|
||||
receiverName: member.name,
|
||||
content: req.body.content,
|
||||
commentId: commentId.id,
|
||||
projectId: commentId.project_id,
|
||||
taskId: task_id,
|
||||
teamName: commentId.team_name,
|
||||
projectName: commentId.project_name,
|
||||
taskName: commentId.task_name
|
||||
});
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, []));
|
||||
}
|
||||
|
||||
|
||||
@HandleExceptions()
|
||||
public static async download(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT CONCAT($2::TEXT, '/', team_id, '/', project_id, '/', task_id, '/', comment_id, '/', id, '.', type) AS key
|
||||
FROM task_comment_attachments
|
||||
WHERE id = $1;`;
|
||||
const result = await db.query(q, [req.query.id, getRootDir()]);
|
||||
const [data] = result.rows;
|
||||
|
||||
if (data?.key) {
|
||||
const url = await createPresignedUrlWithClient(data.key, req.query.file as string);
|
||||
return res.status(200).send(new ServerResponse(true, url));
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, null));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
|
||||
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
|
||||
|
||||
import db from "../config/db";
|
||||
import { ServerResponse } from "../models/server-response";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
|
||||
export default class TaskdependenciesController extends WorklenzControllerBase {
|
||||
@HandleExceptions({
|
||||
raisedExceptions: {
|
||||
"DEPENDENCY_EXISTS": `Task dependency already exists.`
|
||||
}
|
||||
})
|
||||
public static async saveTaskDependency(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const {task_id, related_task_id, dependency_type } = req.body;
|
||||
const q = `SELECT insert_task_dependency($1, $2, $3);`;
|
||||
const result = await db.query(q, [task_id, related_task_id, dependency_type]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getTaskDependencies(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { id } = req.params;
|
||||
|
||||
const q = `SELECT
|
||||
td.id,
|
||||
t2.name AS task_name,
|
||||
td.dependency_type,
|
||||
CONCAT(p.key, '-', t2.task_no) AS task_key
|
||||
FROM
|
||||
task_dependencies td
|
||||
LEFT JOIN
|
||||
tasks t ON td.task_id = t.id
|
||||
LEFT JOIN
|
||||
tasks t2 ON td.related_task_id = t2.id
|
||||
LEFT JOIN
|
||||
projects p ON t.project_id = p.id
|
||||
WHERE
|
||||
td.task_id = $1;`;
|
||||
const result = await db.query(q, [id]);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
public static async deleteById(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const {id} = req.params;
|
||||
const q = `DELETE FROM task_dependencies WHERE id = $1;`;
|
||||
const result = await db.query(q, [id]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
}
|
||||
@@ -32,8 +32,9 @@ export default class TaskListColumnsController extends WorklenzControllerBase {
|
||||
const q = `UPDATE project_task_list_cols
|
||||
SET pinned = $3
|
||||
WHERE project_id = $1
|
||||
AND key = $2;`;
|
||||
AND key = $2 RETURNING *;`;
|
||||
const result = await db.query(q, [req.params.id, req.body.key, !!req.body.pinned]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
const [data] = result.rows;
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,15 +5,17 @@ import db from "../config/db";
|
||||
import {ServerResponse} from "../models/server-response";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import {PriorityColorCodes} from "../shared/constants";
|
||||
import {PriorityColorCodes, PriorityColorCodesDark} from "../shared/constants";
|
||||
|
||||
export default class TaskPrioritiesController extends WorklenzControllerBase {
|
||||
@HandleExceptions()
|
||||
public static async get(_req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT id, name, value From task_priorities ORDER BY value;`;
|
||||
const result = await db.query(q, []);
|
||||
for (const item of result.rows)
|
||||
for (const item of result.rows) {
|
||||
item.color_code = PriorityColorCodes[item.value] || PriorityColorCodes["0"];
|
||||
item.color_code_dark = PriorityColorCodesDark[item.value] || PriorityColorCodesDark["0"];
|
||||
}
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
|
||||
|
||||
108
worklenz-backend/src/controllers/task-recurring-controller.ts
Normal file
108
worklenz-backend/src/controllers/task-recurring-controller.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import db from "../config/db";
|
||||
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
|
||||
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
|
||||
import { ServerResponse } from "../models/server-response";
|
||||
import { calculateNextEndDate, log_error } from "../shared/utils";
|
||||
|
||||
export default class TaskRecurringController extends WorklenzControllerBase {
|
||||
@HandleExceptions()
|
||||
public static async getById(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { id } = req.params;
|
||||
const q = `SELECT id,
|
||||
schedule_type,
|
||||
days_of_week,
|
||||
date_of_month,
|
||||
day_of_month,
|
||||
week_of_month,
|
||||
interval_days,
|
||||
interval_weeks,
|
||||
interval_months,
|
||||
created_at
|
||||
FROM task_recurring_schedules WHERE id = $1;`;
|
||||
const result = await db.query(q, [id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
}
|
||||
|
||||
private static async insertTaskRecurringTemplate(taskId: string, scheduleId: string) {
|
||||
const q = `SELECT create_recurring_task_template($1, $2);`;
|
||||
await db.query(q, [taskId, scheduleId]);
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async createTaskSchedule(taskId: string) {
|
||||
const q = `INSERT INTO task_recurring_schedules (schedule_type) VALUES ('daily') RETURNING id, schedule_type;`;
|
||||
const result = await db.query(q, []);
|
||||
const [data] = result.rows;
|
||||
|
||||
const updateQ = `UPDATE tasks SET schedule_id = $1 WHERE id = $2;`;
|
||||
await db.query(updateQ, [data.id, taskId]);
|
||||
|
||||
await TaskRecurringController.insertTaskRecurringTemplate(taskId, data.id);
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async removeTaskSchedule(scheduleId: string) {
|
||||
const deleteQ = `DELETE FROM task_recurring_schedules WHERE id = $1;`;
|
||||
await db.query(deleteQ, [scheduleId]);
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async updateSchedule(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { id } = req.params;
|
||||
const { schedule_type, days_of_week, day_of_month, week_of_month, interval_days, interval_weeks, interval_months, date_of_month } = req.body;
|
||||
|
||||
const deleteQ = `UPDATE task_recurring_schedules
|
||||
SET schedule_type = $1,
|
||||
days_of_week = $2,
|
||||
date_of_month = $3,
|
||||
day_of_month = $4,
|
||||
week_of_month = $5,
|
||||
interval_days = $6,
|
||||
interval_weeks = $7,
|
||||
interval_months = $8
|
||||
WHERE id = $9;`;
|
||||
await db.query(deleteQ, [schedule_type, days_of_week, date_of_month, day_of_month, week_of_month, interval_days, interval_weeks, interval_months, id]);
|
||||
return res.status(200).send(new ServerResponse(true, null));
|
||||
}
|
||||
|
||||
// Function to create the next task in the recurring schedule
|
||||
private static async createNextRecurringTask(scheduleId: string, lastTask: any, taskTemplate: any) {
|
||||
try {
|
||||
const q = "SELECT * FROM task_recurring_schedules WHERE id = $1";
|
||||
const { rows: schedules } = await db.query(q, [scheduleId]);
|
||||
|
||||
if (schedules.length === 0) {
|
||||
log_error("No schedule found");
|
||||
return;
|
||||
}
|
||||
|
||||
const [schedule] = schedules;
|
||||
|
||||
// Define the next start date based on the schedule
|
||||
const nextStartDate = calculateNextEndDate(schedule, lastTask.start_date);
|
||||
|
||||
const result = await db.query(
|
||||
`INSERT INTO tasks (name, start_date, end_date, priority_id, project_id, reporter_id, description, total_minutes, status_id, schedule_id)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15) RETURNING id;`,
|
||||
[
|
||||
taskTemplate.name, nextStartDate, null, taskTemplate.priority_id,
|
||||
lastTask.project_id, lastTask.reporter_id, taskTemplate.description,
|
||||
0, taskTemplate.status_id, scheduleId
|
||||
]
|
||||
);
|
||||
const [data] = result.rows;
|
||||
|
||||
log_error(`Next task created with id: ${data.id}`);
|
||||
|
||||
} catch (error) {
|
||||
log_error("Error creating next recurring task:", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -54,7 +54,7 @@ export default class TaskStatusesController extends WorklenzControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getCategories(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT id, name, color_code, description
|
||||
const q = `SELECT id, name, color_code, color_code_dark, description
|
||||
FROM sys_task_status_categories
|
||||
ORDER BY index;`;
|
||||
const result = await db.query(q, []);
|
||||
@@ -73,7 +73,7 @@ export default class TaskStatusesController extends WorklenzControllerBase {
|
||||
@HandleExceptions()
|
||||
public static async getById(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `
|
||||
SELECT task_statuses.id, task_statuses.name, stsc.color_code
|
||||
SELECT task_statuses.id, task_statuses.name, stsc.color_code, stsc.color_code_dark
|
||||
FROM task_statuses
|
||||
INNER JOIN sys_task_status_categories stsc ON task_statuses.category_id = stsc.id
|
||||
WHERE task_statuses.id = $1
|
||||
@@ -113,7 +113,7 @@ export default class TaskStatusesController extends WorklenzControllerBase {
|
||||
category_id = COALESCE($4, (SELECT id FROM sys_task_status_categories WHERE is_todo IS TRUE))
|
||||
WHERE id = $1
|
||||
AND project_id = $3
|
||||
RETURNING (SELECT color_code FROM sys_task_status_categories WHERE id = task_statuses.category_id);
|
||||
RETURNING (SELECT color_code FROM sys_task_status_categories WHERE id = task_statuses.category_id), (SELECT color_code_dark FROM sys_task_status_categories WHERE id = task_statuses.category_id);
|
||||
`;
|
||||
const result = await db.query(q, [req.params.id, req.body.name, req.body.project_id, req.body.category_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
@@ -234,4 +234,25 @@ export default class TaskWorklogController extends WorklenzControllerBase {
|
||||
res.end();
|
||||
});
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getAllRunningTimers(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT
|
||||
tt.task_id,
|
||||
tt.start_time,
|
||||
t1.name AS task_name,
|
||||
pr.id AS project_id,
|
||||
pr.name AS project_name,
|
||||
t1.parent_task_id,
|
||||
t2.name AS parent_task_name
|
||||
FROM task_timers tt
|
||||
LEFT JOIN public.tasks t1 ON tt.task_id = t1.id
|
||||
LEFT JOIN public.tasks t2 ON t1.parent_task_id = t2.id -- Optimized join for parent task name
|
||||
INNER JOIN projects pr ON t1.project_id = pr.id -- INNER JOIN ensures project-team match
|
||||
WHERE tt.user_id = $1
|
||||
AND pr.team_id = $2;`;
|
||||
const params = [req.user?.id, req.user?.team_id];
|
||||
const result = await db.query(q, params);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,7 +32,51 @@ export default class TasksControllerBase extends WorklenzControllerBase {
|
||||
}
|
||||
|
||||
public static updateTaskViewModel(task: any) {
|
||||
task.progress = ~~(task.total_minutes_spent / task.total_minutes * 100);
|
||||
console.log(`Processing task ${task.id} (${task.name})`);
|
||||
console.log(` manual_progress: ${task.manual_progress}, progress_value: ${task.progress_value}`);
|
||||
console.log(` project_use_manual_progress: ${task.project_use_manual_progress}, project_use_weighted_progress: ${task.project_use_weighted_progress}`);
|
||||
console.log(` has subtasks: ${task.sub_tasks_count > 0}`);
|
||||
|
||||
// For parent tasks (with subtasks), always use calculated progress from subtasks
|
||||
if (task.sub_tasks_count > 0) {
|
||||
// For parent tasks without manual progress, calculate from subtasks (already done via db function)
|
||||
console.log(` Parent task with subtasks: complete_ratio=${task.complete_ratio}`);
|
||||
|
||||
// Ensure progress matches complete_ratio for consistency
|
||||
task.progress = task.complete_ratio || 0;
|
||||
|
||||
// Important: Parent tasks should not have manual progress
|
||||
// If they somehow do, reset it
|
||||
if (task.manual_progress) {
|
||||
console.log(` WARNING: Parent task ${task.id} had manual_progress set to true, resetting`);
|
||||
task.manual_progress = false;
|
||||
task.progress_value = null;
|
||||
}
|
||||
}
|
||||
// For tasks without subtasks, respect manual progress if set
|
||||
else if (task.manual_progress === true && task.progress_value !== null && task.progress_value !== undefined) {
|
||||
// For manually set progress, use that value directly
|
||||
task.progress = parseInt(task.progress_value);
|
||||
task.complete_ratio = parseInt(task.progress_value);
|
||||
|
||||
console.log(` Using manual progress: progress=${task.progress}, complete_ratio=${task.complete_ratio}`);
|
||||
}
|
||||
// For tasks with no subtasks and no manual progress, calculate based on time
|
||||
else {
|
||||
task.progress = task.total_minutes_spent && task.total_minutes
|
||||
? ~~(task.total_minutes_spent / task.total_minutes * 100)
|
||||
: 0;
|
||||
|
||||
// Set complete_ratio to match progress
|
||||
task.complete_ratio = task.progress;
|
||||
|
||||
console.log(` Calculated time-based progress: progress=${task.progress}, complete_ratio=${task.complete_ratio}`);
|
||||
}
|
||||
|
||||
// Ensure numeric values
|
||||
task.progress = parseInt(task.progress) || 0;
|
||||
task.complete_ratio = parseInt(task.complete_ratio) || 0;
|
||||
|
||||
task.overdue = task.total_minutes < task.total_minutes_spent;
|
||||
|
||||
task.time_spent = {hours: ~~(task.total_minutes_spent / 60), minutes: task.total_minutes_spent % 60};
|
||||
@@ -73,9 +117,9 @@ export default class TasksControllerBase extends WorklenzControllerBase {
|
||||
if (task.timer_start_time)
|
||||
task.timer_start_time = moment(task.timer_start_time).valueOf();
|
||||
|
||||
const totalCompleted = +task.completed_sub_tasks + +task.parent_task_completed;
|
||||
const totalTasks = +task.sub_tasks_count + 1; // +1 for parent
|
||||
task.complete_ratio = TasksControllerBase.calculateTaskCompleteRatio(totalCompleted, totalTasks);
|
||||
// Set completed_count and total_tasks_count regardless of progress calculation method
|
||||
const totalCompleted = (+task.completed_sub_tasks + +task.parent_task_completed) || 0;
|
||||
const totalTasks = +task.sub_tasks_count || 0;
|
||||
task.completed_count = totalCompleted;
|
||||
task.total_tasks_count = totalTasks;
|
||||
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
import {ParsedQs} from "qs";
|
||||
import { ParsedQs } from "qs";
|
||||
|
||||
import db from "../config/db";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import {IWorkLenzRequest} from "../interfaces/worklenz-request";
|
||||
import {IWorkLenzResponse} from "../interfaces/worklenz-response";
|
||||
import {ServerResponse} from "../models/server-response";
|
||||
import {TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA, UNMAPPED} from "../shared/constants";
|
||||
import {getColor} from "../shared/utils";
|
||||
import TasksControllerBase, {GroupBy, ITaskGroup} from "./tasks-controller-base";
|
||||
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
|
||||
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
|
||||
import { ServerResponse } from "../models/server-response";
|
||||
import { TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA, UNMAPPED } from "../shared/constants";
|
||||
import { getColor, log_error } from "../shared/utils";
|
||||
import TasksControllerBase, { GroupBy, ITaskGroup } from "./tasks-controller-base";
|
||||
|
||||
export class TaskListGroup implements ITaskGroup {
|
||||
name: string;
|
||||
category_id: string | null;
|
||||
color_code: string;
|
||||
color_code_dark: string;
|
||||
start_date?: string;
|
||||
end_date?: string;
|
||||
todo_progress: number;
|
||||
@@ -26,6 +27,7 @@ export class TaskListGroup implements ITaskGroup {
|
||||
this.start_date = group.start_date || null;
|
||||
this.end_date = group.end_date || null;
|
||||
this.color_code = group.color_code + TASK_STATUS_COLOR_ALPHA;
|
||||
this.color_code_dark = group.color_code_dark;
|
||||
this.todo_progress = 0;
|
||||
this.doing_progress = 0;
|
||||
this.done_progress = 0;
|
||||
@@ -95,16 +97,21 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
try {
|
||||
const result = await db.query("SELECT get_task_complete_ratio($1) AS info;", [taskId]);
|
||||
const [data] = result.rows;
|
||||
data.info.ratio = +data.info.ratio.toFixed();
|
||||
return data.info;
|
||||
console.log("data", data);
|
||||
if (data && data.info && data.info.ratio !== undefined) {
|
||||
data.info.ratio = +((data.info.ratio || 0).toFixed());
|
||||
return data.info;
|
||||
}
|
||||
return null;
|
||||
} catch (error) {
|
||||
log_error(`Error in getTaskCompleteRatio: ${error}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static getQuery(userId: string, options: ParsedQs) {
|
||||
const searchField = options.search ? "t.name" : "sort_order";
|
||||
const {searchQuery, sortField} = TasksControllerV2.toPaginationOptions(options, searchField);
|
||||
const { searchQuery, sortField } = TasksControllerV2.toPaginationOptions(options, searchField);
|
||||
|
||||
const isSubTasks = !!options.parent_task;
|
||||
|
||||
@@ -124,6 +131,33 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
const filterByAssignee = TasksControllerV2.getFilterByAssignee(options.filterBy as string);
|
||||
// Returns statuses of each task as a json array if filterBy === "member"
|
||||
const statusesQuery = TasksControllerV2.getStatusesQuery(options.filterBy as string);
|
||||
|
||||
// Custom columns data query
|
||||
const customColumnsQuery = options.customColumns
|
||||
? `, (SELECT COALESCE(
|
||||
jsonb_object_agg(
|
||||
custom_cols.key,
|
||||
custom_cols.value
|
||||
),
|
||||
'{}'::JSONB
|
||||
)
|
||||
FROM (
|
||||
SELECT
|
||||
cc.key,
|
||||
CASE
|
||||
WHEN ccv.text_value IS NOT NULL THEN to_jsonb(ccv.text_value)
|
||||
WHEN ccv.number_value IS NOT NULL THEN to_jsonb(ccv.number_value)
|
||||
WHEN ccv.boolean_value IS NOT NULL THEN to_jsonb(ccv.boolean_value)
|
||||
WHEN ccv.date_value IS NOT NULL THEN to_jsonb(ccv.date_value)
|
||||
WHEN ccv.json_value IS NOT NULL THEN ccv.json_value
|
||||
ELSE NULL::JSONB
|
||||
END AS value
|
||||
FROM cc_column_values ccv
|
||||
JOIN cc_custom_columns cc ON ccv.column_id = cc.id
|
||||
WHERE ccv.task_id = t.id
|
||||
) AS custom_cols
|
||||
WHERE custom_cols.value IS NOT NULL) AS custom_column_values`
|
||||
: "";
|
||||
|
||||
const archivedFilter = options.archived === "true" ? "archived IS TRUE" : "archived IS FALSE";
|
||||
|
||||
@@ -163,6 +197,13 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
t.archived,
|
||||
t.description,
|
||||
t.sort_order,
|
||||
t.progress_value,
|
||||
t.manual_progress,
|
||||
t.weight,
|
||||
(SELECT use_manual_progress FROM projects WHERE id = t.project_id) AS project_use_manual_progress,
|
||||
(SELECT use_weighted_progress FROM projects WHERE id = t.project_id) AS project_use_weighted_progress,
|
||||
(SELECT use_time_progress FROM projects WHERE id = t.project_id) AS project_use_time_progress,
|
||||
(SELECT get_task_complete_ratio(t.id)->>'ratio') AS complete_ratio,
|
||||
|
||||
(SELECT phase_id FROM task_phase WHERE task_id = t.id) AS phase_id,
|
||||
(SELECT name
|
||||
@@ -173,7 +214,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
WHERE id = (SELECT phase_id FROM task_phase WHERE task_id = t.id)) AS phase_color_code,
|
||||
|
||||
(EXISTS(SELECT 1 FROM task_subscribers WHERE task_id = t.id)) AS has_subscribers,
|
||||
|
||||
(EXISTS(SELECT 1 FROM task_dependencies td WHERE td.task_id = t.id)) AS has_dependencies,
|
||||
(SELECT start_time
|
||||
FROM task_timers
|
||||
WHERE task_id = t.id
|
||||
@@ -183,6 +224,10 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
FROM sys_task_status_categories
|
||||
WHERE id = (SELECT category_id FROM task_statuses WHERE id = t.status_id)) AS status_color,
|
||||
|
||||
(SELECT color_code_dark
|
||||
FROM sys_task_status_categories
|
||||
WHERE id = (SELECT category_id FROM task_statuses WHERE id = t.status_id)) AS status_color_dark,
|
||||
|
||||
(SELECT COALESCE(ROW_TO_JSON(r), '{}'::JSON)
|
||||
FROM (SELECT is_done, is_doing, is_todo
|
||||
FROM sys_task_status_categories
|
||||
@@ -209,7 +254,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
(SELECT color_code FROM team_labels WHERE id = task_labels.label_id)
|
||||
FROM task_labels
|
||||
WHERE task_id = t.id) r) AS labels,
|
||||
|
||||
(SELECT is_completed(status_id, project_id)) AS is_complete,
|
||||
(SELECT name FROM users WHERE id = t.reporter_id) AS reporter,
|
||||
(SELECT id FROM task_priorities WHERE id = t.priority_id) AS priority,
|
||||
(SELECT value FROM task_priorities WHERE id = t.priority_id) AS priority_value,
|
||||
@@ -219,7 +264,9 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
updated_at,
|
||||
completed_at,
|
||||
start_date,
|
||||
END_DATE ${statusesQuery}
|
||||
billable,
|
||||
schedule_id,
|
||||
END_DATE ${customColumnsQuery} ${statusesQuery}
|
||||
FROM tasks t
|
||||
WHERE ${filters} ${searchQuery}
|
||||
ORDER BY ${sortFields}
|
||||
@@ -235,6 +282,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
SELECT id,
|
||||
name,
|
||||
(SELECT color_code FROM sys_task_status_categories WHERE id = task_statuses.category_id),
|
||||
(SELECT color_code_dark FROM sys_task_status_categories WHERE id = task_statuses.category_id),
|
||||
category_id
|
||||
FROM task_statuses
|
||||
WHERE project_id = $1
|
||||
@@ -243,7 +291,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
params = [projectId];
|
||||
break;
|
||||
case GroupBy.PRIORITY:
|
||||
q = `SELECT id, name, color_code
|
||||
q = `SELECT id, name, color_code, color_code_dark
|
||||
FROM task_priorities
|
||||
ORDER BY value DESC;`;
|
||||
break;
|
||||
@@ -261,7 +309,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
break;
|
||||
case GroupBy.PHASE:
|
||||
q = `
|
||||
SELECT id, name, color_code, start_date, end_date, sort_index
|
||||
SELECT id, name, color_code, color_code AS color_code_dark, start_date, end_date, sort_index
|
||||
FROM project_phases
|
||||
WHERE project_id = $1
|
||||
ORDER BY sort_index DESC;
|
||||
@@ -279,8 +327,16 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getList(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
// Before doing anything else, refresh task progress values for this project
|
||||
if (req.params.id) {
|
||||
await this.refreshProjectTaskProgressValues(req.params.id);
|
||||
}
|
||||
|
||||
const isSubTasks = !!req.query.parent_task;
|
||||
const groupBy = (req.query.group || GroupBy.STATUS) as string;
|
||||
|
||||
// Add customColumns flag to query params
|
||||
req.query.customColumns = "true";
|
||||
|
||||
const q = TasksControllerV2.getQuery(req.user?.id as string, req.query);
|
||||
const params = isSubTasks ? [req.params.id || null, req.query.parent_task] : [req.params.id || null];
|
||||
@@ -295,7 +351,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
return g;
|
||||
}, {});
|
||||
|
||||
this.updateMapByGroup(tasks, groupBy, map);
|
||||
await this.updateMapByGroup(tasks, groupBy, map);
|
||||
|
||||
const updatedGroups = Object.keys(map).map(key => {
|
||||
const group = map[key];
|
||||
@@ -314,12 +370,28 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
return res.status(200).send(new ServerResponse(true, updatedGroups));
|
||||
}
|
||||
|
||||
public static updateMapByGroup(tasks: any[], groupBy: string, map: { [p: string]: ITaskGroup }) {
|
||||
public static async updateMapByGroup(tasks: any[], groupBy: string, map: { [p: string]: ITaskGroup }) {
|
||||
let index = 0;
|
||||
const unmapped = [];
|
||||
|
||||
// First, ensure we have the latest progress values for all tasks
|
||||
for (const task of tasks) {
|
||||
// For any task with subtasks, ensure we have the latest progress values
|
||||
if (task.sub_tasks_count > 0) {
|
||||
const info = await this.getTaskCompleteRatio(task.id);
|
||||
if (info) {
|
||||
task.complete_ratio = info.ratio;
|
||||
task.progress_value = info.ratio; // Ensure progress_value reflects the calculated ratio
|
||||
console.log(`Updated task ${task.name} (${task.id}): complete_ratio=${task.complete_ratio}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Now group the tasks with their updated progress values
|
||||
for (const task of tasks) {
|
||||
task.index = index++;
|
||||
TasksControllerV2.updateTaskViewModel(task);
|
||||
|
||||
if (groupBy === GroupBy.STATUS) {
|
||||
map[task.status]?.tasks.push(task);
|
||||
} else if (groupBy === GroupBy.PRIORITY) {
|
||||
@@ -355,7 +427,16 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getTasksOnly(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
// Before doing anything else, refresh task progress values for this project
|
||||
if (req.params.id) {
|
||||
await this.refreshProjectTaskProgressValues(req.params.id);
|
||||
}
|
||||
|
||||
const isSubTasks = !!req.query.parent_task;
|
||||
|
||||
// Add customColumns flag to query params
|
||||
req.query.customColumns = "true";
|
||||
|
||||
const q = TasksControllerV2.getQuery(req.user?.id as string, req.query);
|
||||
const params = isSubTasks ? [req.params.id || null, req.query.parent_task] : [req.params.id || null];
|
||||
const result = await db.query(q, params);
|
||||
@@ -367,7 +448,24 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
[data] = result.rows;
|
||||
} else { // else we return a flat list of tasks
|
||||
data = [...result.rows];
|
||||
|
||||
for (const task of data) {
|
||||
// For tasks with subtasks, get the complete ratio from the database function
|
||||
if (task.sub_tasks_count > 0) {
|
||||
try {
|
||||
const result = await db.query("SELECT get_task_complete_ratio($1) AS info;", [task.id]);
|
||||
const [ratioData] = result.rows;
|
||||
if (ratioData && ratioData.info) {
|
||||
task.complete_ratio = +(ratioData.info.ratio || 0).toFixed();
|
||||
task.completed_count = ratioData.info.total_completed;
|
||||
task.total_tasks_count = ratioData.info.total_tasks;
|
||||
console.log(`Updated task ${task.id} (${task.name}) from DB: complete_ratio=${task.complete_ratio}`);
|
||||
}
|
||||
} catch (error) {
|
||||
// Proceed with default calculation if database call fails
|
||||
}
|
||||
}
|
||||
|
||||
TasksControllerV2.updateTaskViewModel(task);
|
||||
}
|
||||
}
|
||||
@@ -393,13 +491,60 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getNewKanbanTask(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const {id} = req.params;
|
||||
const { id } = req.params;
|
||||
const result = await db.query("SELECT get_single_task($1) AS task;", [id]);
|
||||
const [data] = result.rows;
|
||||
const task = TasksControllerV2.updateTaskViewModel(data.task);
|
||||
return res.status(200).send(new ServerResponse(true, task));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async resetParentTaskManualProgress(parentTaskId: string): Promise<void> {
|
||||
try {
|
||||
// Check if this task has subtasks
|
||||
const subTasksResult = await db.query(
|
||||
"SELECT COUNT(*) as subtask_count FROM tasks WHERE parent_task_id = $1 AND archived IS FALSE",
|
||||
[parentTaskId]
|
||||
);
|
||||
|
||||
const subtaskCount = parseInt(subTasksResult.rows[0]?.subtask_count || "0");
|
||||
|
||||
// If it has subtasks, reset the manual_progress flag to false
|
||||
if (subtaskCount > 0) {
|
||||
await db.query(
|
||||
"UPDATE tasks SET manual_progress = false WHERE id = $1",
|
||||
[parentTaskId]
|
||||
);
|
||||
console.log(`Reset manual progress for parent task ${parentTaskId} with ${subtaskCount} subtasks`);
|
||||
|
||||
// Get the project settings to determine which calculation method to use
|
||||
const projectResult = await db.query(
|
||||
"SELECT project_id FROM tasks WHERE id = $1",
|
||||
[parentTaskId]
|
||||
);
|
||||
|
||||
const projectId = projectResult.rows[0]?.project_id;
|
||||
|
||||
if (projectId) {
|
||||
// Recalculate the parent task's progress based on its subtasks
|
||||
const progressResult = await db.query(
|
||||
"SELECT get_task_complete_ratio($1) AS ratio",
|
||||
[parentTaskId]
|
||||
);
|
||||
|
||||
const progressRatio = progressResult.rows[0]?.ratio?.ratio || 0;
|
||||
|
||||
// Emit the updated progress value to all clients
|
||||
// Note: We don't have socket context here, so we can't directly emit
|
||||
// This will be picked up on the next client refresh
|
||||
console.log(`Recalculated progress for parent task ${parentTaskId}: ${progressRatio}%`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
log_error(`Error resetting parent task manual progress: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async convertToSubtask(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
|
||||
@@ -439,6 +584,11 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
? [req.body.id, req.body.to_group_id]
|
||||
: [req.body.id, req.body.project_id, req.body.parent_task_id, req.body.to_group_id];
|
||||
await db.query(q, params);
|
||||
|
||||
// Reset the parent task's manual progress when converting a task to a subtask
|
||||
if (req.body.parent_task_id) {
|
||||
await this.resetParentTaskManualProgress(req.body.parent_task_id);
|
||||
}
|
||||
|
||||
const result = await db.query("SELECT get_single_task($1) AS task;", [req.body.id]);
|
||||
const [data] = result.rows;
|
||||
@@ -474,9 +624,333 @@ export default class TasksControllerV2 extends TasksControllerBase {
|
||||
|
||||
}
|
||||
|
||||
public static async getTasksByName(searchString: string, projectId: string, taskId: string) {
|
||||
const q = `SELECT id AS value ,
|
||||
name AS label,
|
||||
CONCAT((SELECT key FROM projects WHERE id = t.project_id), '-', task_no) AS task_key
|
||||
FROM tasks t
|
||||
WHERE t.name ILIKE '%${searchString}%'
|
||||
AND t.project_id = $1 AND t.id != $2
|
||||
LIMIT 15;`;
|
||||
const result = await db.query(q, [projectId, taskId]);
|
||||
|
||||
return result.rows;
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getSubscribers(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const subscribers = await this.getTaskSubscribers(req.params.id);
|
||||
return res.status(200).send(new ServerResponse(true, subscribers));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async searchTasks(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { projectId, taskId, searchQuery } = req.query;
|
||||
const tasks = await this.getTasksByName(searchQuery as string, projectId as string, taskId as string);
|
||||
return res.status(200).send(new ServerResponse(true, tasks));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async getTaskDependencyStatus(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { statusId, taskId } = req.query;
|
||||
const canContinue = await TasksControllerV2.checkForCompletedDependencies(taskId as string, statusId as string);
|
||||
return res.status(200).send(new ServerResponse(true, { can_continue: canContinue }));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async checkForCompletedDependencies(taskId: string, nextStatusId: string): Promise<IWorkLenzResponse> {
|
||||
const q = `SELECT
|
||||
CASE
|
||||
WHEN EXISTS (
|
||||
-- Check if the status id is not in the "done" category
|
||||
SELECT 1
|
||||
FROM task_statuses ts
|
||||
WHERE ts.id = $2
|
||||
AND ts.project_id = (SELECT project_id FROM tasks WHERE id = $1)
|
||||
AND ts.category_id IN (
|
||||
SELECT id FROM sys_task_status_categories WHERE is_done IS FALSE
|
||||
)
|
||||
) THEN TRUE -- If status is not in the "done" category, continue immediately (TRUE)
|
||||
|
||||
WHEN EXISTS (
|
||||
-- Check if any dependent tasks are not completed
|
||||
SELECT 1
|
||||
FROM task_dependencies td
|
||||
LEFT JOIN public.tasks t ON t.id = td.related_task_id
|
||||
WHERE td.task_id = $1
|
||||
AND t.status_id NOT IN (
|
||||
SELECT id
|
||||
FROM task_statuses ts
|
||||
WHERE t.project_id = ts.project_id
|
||||
AND ts.category_id IN (
|
||||
SELECT id FROM sys_task_status_categories WHERE is_done IS TRUE
|
||||
)
|
||||
)
|
||||
) THEN FALSE -- If there are incomplete dependent tasks, do not continue (FALSE)
|
||||
|
||||
ELSE TRUE -- Continue if no other conditions block the process
|
||||
END AS can_continue;`;
|
||||
const result = await db.query(q, [taskId, nextStatusId]);
|
||||
const [data] = result.rows;
|
||||
|
||||
return data.can_continue;
|
||||
}
|
||||
|
||||
public static async getTaskStatusColor(status_id: string) {
|
||||
try {
|
||||
const q = `SELECT color_code, color_code_dark
|
||||
FROM sys_task_status_categories
|
||||
WHERE id = (SELECT category_id FROM task_statuses WHERE id = $1)`;
|
||||
const result = await db.query(q, [status_id]);
|
||||
const [data] = result.rows;
|
||||
return data;
|
||||
} catch (e) {
|
||||
log_error(e);
|
||||
}
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async assignLabelsToTask(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const { id } = req.params;
|
||||
const { labels }: { labels: string[] } = req.body;
|
||||
|
||||
labels.forEach(async (label: string) => {
|
||||
const q = `SELECT add_or_remove_task_label($1, $2) AS labels;`;
|
||||
await db.query(q, [id, label]);
|
||||
});
|
||||
return res.status(200).send(new ServerResponse(true, null, "Labels assigned successfully"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a custom column value for a task
|
||||
* @param req The request object
|
||||
* @param res The response object
|
||||
*/
|
||||
@HandleExceptions()
|
||||
public static async updateCustomColumnValue(
|
||||
req: IWorkLenzRequest,
|
||||
res: IWorkLenzResponse
|
||||
): Promise<IWorkLenzResponse> {
|
||||
const { taskId } = req.params;
|
||||
const { column_key, value, project_id } = req.body;
|
||||
|
||||
if (!taskId || !column_key || value === undefined || !project_id) {
|
||||
return res.status(400).send(new ServerResponse(false, "Missing required parameters"));
|
||||
}
|
||||
|
||||
// Get column information
|
||||
const columnQuery = `
|
||||
SELECT id, field_type
|
||||
FROM cc_custom_columns
|
||||
WHERE project_id = $1 AND key = $2
|
||||
`;
|
||||
const columnResult = await db.query(columnQuery, [project_id, column_key]);
|
||||
|
||||
if (columnResult.rowCount === 0) {
|
||||
return res.status(404).send(new ServerResponse(false, "Custom column not found"));
|
||||
}
|
||||
|
||||
const column = columnResult.rows[0];
|
||||
const columnId = column.id;
|
||||
const fieldType = column.field_type;
|
||||
|
||||
// Determine which value field to use based on the field_type
|
||||
let textValue = null;
|
||||
let numberValue = null;
|
||||
let dateValue = null;
|
||||
let booleanValue = null;
|
||||
let jsonValue = null;
|
||||
|
||||
switch (fieldType) {
|
||||
case "number":
|
||||
numberValue = parseFloat(String(value));
|
||||
break;
|
||||
case "date":
|
||||
dateValue = new Date(String(value));
|
||||
break;
|
||||
case "checkbox":
|
||||
booleanValue = Boolean(value);
|
||||
break;
|
||||
case "people":
|
||||
jsonValue = JSON.stringify(Array.isArray(value) ? value : [value]);
|
||||
break;
|
||||
default:
|
||||
textValue = String(value);
|
||||
}
|
||||
|
||||
// Check if a value already exists
|
||||
const existingValueQuery = `
|
||||
SELECT id
|
||||
FROM cc_column_values
|
||||
WHERE task_id = $1 AND column_id = $2
|
||||
`;
|
||||
const existingValueResult = await db.query(existingValueQuery, [taskId, columnId]);
|
||||
|
||||
if (existingValueResult.rowCount && existingValueResult.rowCount > 0) {
|
||||
// Update existing value
|
||||
const updateQuery = `
|
||||
UPDATE cc_column_values
|
||||
SET text_value = $1,
|
||||
number_value = $2,
|
||||
date_value = $3,
|
||||
boolean_value = $4,
|
||||
json_value = $5,
|
||||
updated_at = NOW()
|
||||
WHERE task_id = $6 AND column_id = $7
|
||||
`;
|
||||
await db.query(updateQuery, [
|
||||
textValue,
|
||||
numberValue,
|
||||
dateValue,
|
||||
booleanValue,
|
||||
jsonValue,
|
||||
taskId,
|
||||
columnId
|
||||
]);
|
||||
} else {
|
||||
// Insert new value
|
||||
const insertQuery = `
|
||||
INSERT INTO cc_column_values
|
||||
(task_id, column_id, text_value, number_value, date_value, boolean_value, json_value, created_at, updated_at)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7, NOW(), NOW())
|
||||
`;
|
||||
await db.query(insertQuery, [
|
||||
taskId,
|
||||
columnId,
|
||||
textValue,
|
||||
numberValue,
|
||||
dateValue,
|
||||
booleanValue,
|
||||
jsonValue
|
||||
]);
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, {
|
||||
task_id: taskId,
|
||||
column_key,
|
||||
value
|
||||
}));
|
||||
}
|
||||
|
||||
public static async refreshProjectTaskProgressValues(projectId: string): Promise<void> {
|
||||
try {
|
||||
// Run the recalculate_all_task_progress function only for tasks in this project
|
||||
const query = `
|
||||
DO $$
|
||||
BEGIN
|
||||
-- First, reset manual_progress flag for all tasks that have subtasks within this project
|
||||
UPDATE tasks AS t
|
||||
SET manual_progress = FALSE
|
||||
WHERE project_id = '${projectId}'
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
FROM tasks
|
||||
WHERE parent_task_id = t.id
|
||||
AND archived IS FALSE
|
||||
);
|
||||
|
||||
-- Start recalculation from leaf tasks (no subtasks) and propagate upward
|
||||
-- This ensures calculations are done in the right order
|
||||
WITH RECURSIVE task_hierarchy AS (
|
||||
-- Base case: Start with all leaf tasks (no subtasks) in this project
|
||||
SELECT
|
||||
id,
|
||||
parent_task_id,
|
||||
0 AS level
|
||||
FROM tasks
|
||||
WHERE project_id = '${projectId}'
|
||||
AND NOT EXISTS (
|
||||
SELECT 1 FROM tasks AS sub
|
||||
WHERE sub.parent_task_id = tasks.id
|
||||
AND sub.archived IS FALSE
|
||||
)
|
||||
AND archived IS FALSE
|
||||
|
||||
UNION ALL
|
||||
|
||||
-- Recursive case: Move up to parent tasks, but only after processing all their children
|
||||
SELECT
|
||||
t.id,
|
||||
t.parent_task_id,
|
||||
th.level + 1
|
||||
FROM tasks t
|
||||
JOIN task_hierarchy th ON t.id = th.parent_task_id
|
||||
WHERE t.archived IS FALSE
|
||||
)
|
||||
-- Sort by level to ensure we calculate in the right order (leaves first, then parents)
|
||||
UPDATE tasks
|
||||
SET progress_value = (SELECT (get_task_complete_ratio(tasks.id)->>'ratio')::FLOAT)
|
||||
FROM (
|
||||
SELECT id, level
|
||||
FROM task_hierarchy
|
||||
ORDER BY level
|
||||
) AS ordered_tasks
|
||||
WHERE tasks.id = ordered_tasks.id
|
||||
AND tasks.project_id = '${projectId}'
|
||||
AND (manual_progress IS FALSE OR manual_progress IS NULL);
|
||||
END $$;
|
||||
`;
|
||||
|
||||
await db.query(query);
|
||||
console.log(`Finished refreshing progress values for project ${projectId}`);
|
||||
} catch (error) {
|
||||
log_error("Error refreshing project task progress values", error);
|
||||
}
|
||||
}
|
||||
|
||||
public static async updateTaskProgress(taskId: string): Promise<void> {
|
||||
try {
|
||||
// Calculate the task's progress using get_task_complete_ratio
|
||||
const result = await db.query("SELECT get_task_complete_ratio($1) AS info;", [taskId]);
|
||||
const [data] = result.rows;
|
||||
|
||||
if (data && data.info && data.info.ratio !== undefined) {
|
||||
const progressValue = +((data.info.ratio || 0).toFixed());
|
||||
|
||||
// Update the task's progress_value in the database
|
||||
await db.query(
|
||||
"UPDATE tasks SET progress_value = $1 WHERE id = $2",
|
||||
[progressValue, taskId]
|
||||
);
|
||||
|
||||
console.log(`Updated progress for task ${taskId} to ${progressValue}%`);
|
||||
|
||||
// If this task has a parent, update the parent's progress as well
|
||||
const parentResult = await db.query(
|
||||
"SELECT parent_task_id FROM tasks WHERE id = $1",
|
||||
[taskId]
|
||||
);
|
||||
|
||||
if (parentResult.rows.length > 0 && parentResult.rows[0].parent_task_id) {
|
||||
await this.updateTaskProgress(parentResult.rows[0].parent_task_id);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
log_error(`Error updating task progress: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Add this method to update progress when a task's weight is changed
|
||||
public static async updateTaskWeight(taskId: string, weight: number): Promise<void> {
|
||||
try {
|
||||
// Update the task's weight
|
||||
await db.query(
|
||||
"UPDATE tasks SET weight = $1 WHERE id = $2",
|
||||
[weight, taskId]
|
||||
);
|
||||
|
||||
// Get the parent task ID
|
||||
const parentResult = await db.query(
|
||||
"SELECT parent_task_id FROM tasks WHERE id = $1",
|
||||
[taskId]
|
||||
);
|
||||
|
||||
// If this task has a parent, update the parent's progress
|
||||
if (parentResult.rows.length > 0 && parentResult.rows[0].parent_task_id) {
|
||||
await this.updateTaskProgress(parentResult.rows[0].parent_task_id);
|
||||
}
|
||||
} catch (error) {
|
||||
log_error(`Error updating task weight: ${error}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,9 +6,9 @@ import { IWorkLenzResponse } from "../interfaces/worklenz-response";
|
||||
import db from "../config/db";
|
||||
|
||||
import { ServerResponse } from "../models/server-response";
|
||||
import { TASK_STATUS_COLOR_ALPHA } from "../shared/constants";
|
||||
import { S3_URL, TASK_STATUS_COLOR_ALPHA } from "../shared/constants";
|
||||
import { getDates, getMinMaxOfTaskDates, getMonthRange, getWeekRange } from "../shared/tasks-controller-utils";
|
||||
import { getColor, getRandomColorCode, log_error, toMinutes } from "../shared/utils";
|
||||
import { getColor, getRandomColorCode, humanFileSize, log_error, toMinutes } from "../shared/utils";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import { NotificationsService } from "../services/notifications/notifications.service";
|
||||
@@ -18,9 +18,9 @@ import TasksControllerV2 from "./tasks-controller-v2";
|
||||
import { IO } from "../shared/io";
|
||||
import { SocketEvents } from "../socket.io/events";
|
||||
import TasksControllerBase from "./tasks-controller-base";
|
||||
import { insertToActivityLogs, logStatusChange } from "../services/activity-logs/activity-logs.service";
|
||||
import { forEach } from "lodash";
|
||||
import { insertToActivityLogs } from "../services/activity-logs/activity-logs.service";
|
||||
import { IActivityLog } from "../services/activity-logs/interfaces";
|
||||
import { getKey, getRootDir, uploadBase64 } from "../shared/s3";
|
||||
|
||||
export default class TasksController extends TasksControllerBase {
|
||||
private static notifyProjectUpdates(socketId: string, projectId: string) {
|
||||
@@ -29,14 +29,54 @@ export default class TasksController extends TasksControllerBase {
|
||||
.emit(SocketEvents.PROJECT_UPDATES_AVAILABLE.toString());
|
||||
}
|
||||
|
||||
public static async uploadAttachment(attachments: any, teamId: string, userId: string) {
|
||||
try {
|
||||
const promises = attachments.map(async (attachment: any) => {
|
||||
const { file, file_name, project_id, size } = attachment;
|
||||
const type = file_name.split(".").pop();
|
||||
|
||||
const q = `
|
||||
INSERT INTO task_attachments (name, task_id, team_id, project_id, uploaded_by, size, type)
|
||||
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||
RETURNING id, name, size, type, created_at, CONCAT($8::TEXT, '/', team_id, '/', project_id, '/', id, '.', type) AS url;
|
||||
`;
|
||||
|
||||
const result = await db.query(q, [
|
||||
file_name,
|
||||
null,
|
||||
teamId,
|
||||
project_id,
|
||||
userId,
|
||||
size,
|
||||
type,
|
||||
`${S3_URL}/${getRootDir()}`
|
||||
]);
|
||||
|
||||
const [data] = result.rows;
|
||||
await uploadBase64(file, getKey(teamId, project_id, data.id, data.type));
|
||||
return data.id;
|
||||
});
|
||||
|
||||
const attachmentIds = await Promise.all(promises);
|
||||
return attachmentIds;
|
||||
} catch (error) {
|
||||
log_error(error);
|
||||
}
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
public static async create(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const userId = req.user?.id as string;
|
||||
const teamId = req.user?.team_id as string;
|
||||
|
||||
if (req.body.attachments_raw) {
|
||||
req.body.attachments = await this.uploadAttachment(req.body.attachments_raw, teamId, userId);
|
||||
}
|
||||
|
||||
const q = `SELECT create_task($1) AS task;`;
|
||||
const result = await db.query(q, [JSON.stringify(req.body)]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const userId = req.user?.id as string;
|
||||
|
||||
for (const member of data?.task.assignees || []) {
|
||||
NotificationsService.createTaskUpdate(
|
||||
"ASSIGN",
|
||||
@@ -468,7 +508,7 @@ export default class TasksController extends TasksControllerBase {
|
||||
|
||||
TasksController.notifyProjectUpdates(req.user?.socket_id as string, req.query.project as string);
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, data));
|
||||
return res.status(200).send(new ServerResponse(true, { failed_tasks: data.task }));
|
||||
}
|
||||
|
||||
@HandleExceptions()
|
||||
|
||||
@@ -13,7 +13,9 @@ import { SocketEvents } from "../socket.io/events";
|
||||
import WorklenzControllerBase from "./worklenz-controller-base";
|
||||
import HandleExceptions from "../decorators/handle-exceptions";
|
||||
import { formatDuration, getColor } from "../shared/utils";
|
||||
import { TEAM_MEMBER_TREE_MAP_COLOR_ALPHA } from "../shared/constants";
|
||||
import { statusExclude, TEAM_MEMBER_TREE_MAP_COLOR_ALPHA } from "../shared/constants";
|
||||
import { checkTeamSubscriptionStatus } from "../shared/paddle-utils";
|
||||
import { updateUsers } from "../shared/paddle-requests";
|
||||
import { NotificationsService } from "../services/notifications/notifications.service";
|
||||
|
||||
export default class TeamMembersController extends WorklenzControllerBase {
|
||||
@@ -80,6 +82,98 @@ export default class TeamMembersController extends WorklenzControllerBase {
|
||||
return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks the subscription status of the team.
|
||||
* @type {Object} subscriptionData - Object containing subscription information
|
||||
*/
|
||||
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id);
|
||||
|
||||
let incrementBy = 0;
|
||||
|
||||
// Handle self-hosted subscriptions differently
|
||||
if (subscriptionData.subscription_type === 'SELF_HOSTED') {
|
||||
// Check if users exist and add them if they don't
|
||||
await Promise.all(req.body.emails.map(async (email: string) => {
|
||||
const trimmedEmail = email.trim();
|
||||
const userExists = await this.checkIfUserAlreadyExists(req.user?.owner_id as string, trimmedEmail);
|
||||
if (!userExists) {
|
||||
incrementBy = incrementBy + 1;
|
||||
}
|
||||
}));
|
||||
|
||||
// Create or invite new members
|
||||
const newMembers = await this.createOrInviteMembers(req.body, req.user);
|
||||
return res.status(200).send(new ServerResponse(true, newMembers, `Your teammates will get an email that gives them access to your team.`).withTitle("Invitations sent"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Iterates through each email in the request body and checks if the user already exists.
|
||||
* If the user doesn't exist, increments the counter.
|
||||
* @param {string} email - Email address to check
|
||||
*/
|
||||
await Promise.all(req.body.emails.map(async (email: string) => {
|
||||
const trimmedEmail = email.trim();
|
||||
|
||||
const userExists = await this.checkIfUserAlreadyExists(req.user?.owner_id as string, trimmedEmail);
|
||||
const isUserActive = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, trimmedEmail);
|
||||
|
||||
if (!userExists || !isUserActive) {
|
||||
incrementBy = incrementBy + 1;
|
||||
}
|
||||
}));
|
||||
|
||||
/**
|
||||
* Checks various conditions to determine if the maximum number of lifetime users is exceeded.
|
||||
* Sends a response if the limit is reached.
|
||||
*/
|
||||
if (
|
||||
incrementBy > 0
|
||||
&& subscriptionData.is_ltd
|
||||
&& subscriptionData.current_count
|
||||
&& ((parseInt(subscriptionData.current_count) + req.body.emails.length) > parseInt(subscriptionData.ltd_users))) {
|
||||
return res.status(200).send(new ServerResponse(false, null, "Cannot exceed the maximum number of life time users."));
|
||||
}
|
||||
|
||||
if (
|
||||
subscriptionData.is_ltd
|
||||
&& subscriptionData.current_count
|
||||
&& ((parseInt(subscriptionData.current_count) + incrementBy) > parseInt(subscriptionData.ltd_users))) {
|
||||
return res.status(200).send(new ServerResponse(false, null, "Cannot exceed the maximum number of life time users."));
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks subscription details and updates the user count if applicable.
|
||||
* Sends a response if there is an issue with the subscription.
|
||||
*/
|
||||
// if (!subscriptionData.is_credit && !subscriptionData.is_custom && subscriptionData.subscription_status === "active") {
|
||||
// const response = await updateUsers(subscriptionData.subscription_id, (subscriptionData.quantity + incrementBy));
|
||||
|
||||
// if (!response.body.subscription_id) {
|
||||
// return res.status(200).send(new ServerResponse(false, null, response.message || "Please check your subscription."));
|
||||
// }
|
||||
// }
|
||||
|
||||
if (!subscriptionData.is_credit && !subscriptionData.is_custom && subscriptionData.subscription_status === "active") {
|
||||
const updatedCount = parseInt(subscriptionData.current_count) + incrementBy;
|
||||
const requiredSeats = updatedCount - subscriptionData.quantity;
|
||||
if (updatedCount > subscriptionData.quantity) {
|
||||
const obj = {
|
||||
seats_enough: false,
|
||||
required_count: requiredSeats,
|
||||
current_seat_amount: subscriptionData.quantity
|
||||
};
|
||||
return res.status(200).send(new ServerResponse(false, obj, null));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if the subscription status is in the exclusion list.
|
||||
* Sends a response if the status is excluded.
|
||||
*/
|
||||
if (statusExclude.includes(subscriptionData.subscription_status)) {
|
||||
return res.status(200).send(new ServerResponse(false, null, "Unable to add user! Please check your subscription status."));
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates or invites new members based on the request body and user information.
|
||||
* Sends a response with the result.
|
||||
@@ -93,12 +187,24 @@ export default class TeamMembersController extends WorklenzControllerBase {
|
||||
req.query.field = ["is_owner", "active", "u.name", "u.email"];
|
||||
req.query.order = "descend";
|
||||
|
||||
// Helper function to check for encoded components
|
||||
function containsEncodedComponents(x: string) {
|
||||
return decodeURI(x) !== decodeURIComponent(x);
|
||||
}
|
||||
|
||||
// Decode search parameter if it contains encoded components
|
||||
if (req.query.search && typeof req.query.search === 'string') {
|
||||
if (containsEncodedComponents(req.query.search)) {
|
||||
req.query.search = decodeURIComponent(req.query.search);
|
||||
}
|
||||
}
|
||||
|
||||
const {
|
||||
searchQuery,
|
||||
sortField,
|
||||
sortOrder,
|
||||
size,
|
||||
offset
|
||||
searchQuery,
|
||||
sortField,
|
||||
sortOrder,
|
||||
size,
|
||||
offset
|
||||
} = this.toPaginationOptions(req.query, ["u.name", "u.email"], true);
|
||||
|
||||
const paginate = req.query.all === "false" ? `LIMIT ${size} OFFSET ${offset}` : "";
|
||||
@@ -126,7 +232,7 @@ export default class TeamMembersController extends WorklenzControllerBase {
|
||||
ELSE FALSE END) AS is_owner,
|
||||
(SELECT email
|
||||
FROM team_member_info_view
|
||||
WHERE team_member_info_view.team_member_id = team_members.id),
|
||||
WHERE team_member_info_view.team_member_id = team_members.id) AS email,
|
||||
EXISTS(SELECT email
|
||||
FROM email_invitations
|
||||
WHERE team_member_id = team_members.id
|
||||
@@ -277,12 +383,33 @@ export default class TeamMembersController extends WorklenzControllerBase {
|
||||
|
||||
if (!id || !req.user?.team_id) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
|
||||
|
||||
// check subscription status
|
||||
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id);
|
||||
if (statusExclude.includes(subscriptionData.subscription_status)) {
|
||||
return res.status(200).send(new ServerResponse(false, "Please check your subscription status."));
|
||||
}
|
||||
|
||||
const q = `SELECT remove_team_member($1, $2, $3) AS member;`;
|
||||
const result = await db.query(q, [id, req.user?.id, req.user?.team_id]);
|
||||
const [data] = result.rows;
|
||||
|
||||
const message = `You have been removed from <b>${req.user?.team_name}</b> by <b>${req.user?.name}</b>`;
|
||||
|
||||
// if (subscriptionData.status === "trialing") break;
|
||||
// if (!subscriptionData.is_credit && !subscriptionData.is_custom) {
|
||||
// if (subscriptionData.subscription_status === "active" && subscriptionData.quantity > 0) {
|
||||
// const obj = await getActiveTeamMemberCount(req.user?.owner_id ?? "");
|
||||
// // const activeObj = await getActiveTeamMemberCount(req.user?.owner_id ?? "");
|
||||
|
||||
// const userActiveInOtherTeams = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, req.query?.email as string);
|
||||
|
||||
// if (!userActiveInOtherTeams) {
|
||||
// const response = await updateUsers(subscriptionData.subscription_id, obj.user_count);
|
||||
// if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
NotificationsService.sendNotification({
|
||||
receiver_socket_id: data.socket_id,
|
||||
message,
|
||||
@@ -871,20 +998,68 @@ export default class TeamMembersController extends WorklenzControllerBase {
|
||||
public static async toggleMemberActiveStatus(req: IWorkLenzRequest, res: IWorkLenzResponse) {
|
||||
if (!req.user?.team_id) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
|
||||
|
||||
const q1 = `SELECT active FROM team_members WHERE id = $1;`;
|
||||
const result1 = await db.query(q1, [req.params?.id]);
|
||||
const [status] = result1.rows;
|
||||
|
||||
if (status.active) {
|
||||
const updateQ1 = `UPDATE users
|
||||
SET active_team = (SELECT id FROM teams WHERE user_id = users.id ORDER BY created_at DESC LIMIT 1)
|
||||
WHERE id = (SELECT user_id FROM team_members WHERE id = $1 AND active IS TRUE LIMIT 1);`;
|
||||
await db.query(updateQ1, [req.params?.id]);
|
||||
// check subscription status
|
||||
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id);
|
||||
if (statusExclude.includes(subscriptionData.subscription_status)) {
|
||||
return res.status(200).send(new ServerResponse(false, "Please check your subscription status."));
|
||||
}
|
||||
|
||||
const q = `UPDATE team_members SET active = NOT active WHERE id = $1 RETURNING active;`;
|
||||
const result = await db.query(q, [req.params?.id]);
|
||||
const [data] = result.rows;
|
||||
let data: any;
|
||||
|
||||
if (req.query.active === "true") {
|
||||
const q1 = `SELECT active FROM team_members WHERE id = $1;`;
|
||||
const result1 = await db.query(q1, [req.params?.id]);
|
||||
const [status] = result1.rows;
|
||||
|
||||
if (status.active) {
|
||||
const updateQ1 = `UPDATE users
|
||||
SET active_team = (SELECT id FROM teams WHERE user_id = users.id ORDER BY created_at DESC LIMIT 1)
|
||||
WHERE id = (SELECT user_id FROM team_members WHERE id = $1 AND active IS TRUE LIMIT 1);`;
|
||||
await db.query(updateQ1, [req.params?.id]);
|
||||
}
|
||||
|
||||
const q = `UPDATE team_members SET active = NOT active WHERE id = $1 RETURNING active;`;
|
||||
const result = await db.query(q, [req.params?.id]);
|
||||
data = result.rows[0];
|
||||
|
||||
// const userExists = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, req.query?.email as string);
|
||||
|
||||
// if (subscriptionData.status === "trialing") break;
|
||||
// if (!userExists && !subscriptionData.is_credit && !subscriptionData.is_custom) {
|
||||
// if (subscriptionData.subscription_status === "active" && subscriptionData.quantity > 0) {
|
||||
// const operator = req.query.active === "true" ? - 1 : + 1;
|
||||
// const response = await updateUsers(subscriptionData.subscription_id, subscriptionData.quantity + operator);
|
||||
// if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
|
||||
// }
|
||||
// }
|
||||
} else {
|
||||
|
||||
const userExists = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, req.query?.email as string);
|
||||
|
||||
// if (subscriptionData.status === "trialing") break;
|
||||
// if (!userExists && !subscriptionData.is_credit && !subscriptionData.is_custom) {
|
||||
// if (subscriptionData.subscription_status === "active" && subscriptionData.quantity > 0) {
|
||||
// const operator = req.query.active === "true" ? - 1 : + 1;
|
||||
// const response = await updateUsers(subscriptionData.subscription_id, subscriptionData.quantity + operator);
|
||||
// if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
|
||||
// }
|
||||
// }
|
||||
|
||||
const q1 = `SELECT active FROM team_members WHERE id = $1;`;
|
||||
const result1 = await db.query(q1, [req.params?.id]);
|
||||
const [status] = result1.rows;
|
||||
|
||||
if (status.active) {
|
||||
const updateQ1 = `UPDATE users
|
||||
SET active_team = (SELECT id FROM teams WHERE user_id = users.id ORDER BY created_at DESC LIMIT 1)
|
||||
WHERE id = (SELECT user_id FROM team_members WHERE id = $1 AND active IS TRUE LIMIT 1);`;
|
||||
await db.query(updateQ1, [req.params?.id]);
|
||||
}
|
||||
|
||||
const q = `UPDATE team_members SET active = NOT active WHERE id = $1 RETURNING active;`;
|
||||
const result = await db.query(q, [req.params?.id]);
|
||||
data = result.rows[0];
|
||||
}
|
||||
|
||||
return res.status(200).send(new ServerResponse(true, [], `Team member ${data.active ? " activated" : " deactivated"} successfully.`));
|
||||
}
|
||||
@@ -899,6 +1074,21 @@ export default class TeamMembersController extends WorklenzControllerBase {
|
||||
|
||||
if (!req.body.team_id || !req.user?.id) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
|
||||
|
||||
// check the subscription status
|
||||
const subscriptionData = await checkTeamSubscriptionStatus(req.body.team_id);
|
||||
|
||||
if (statusExclude.includes(subscriptionData.subscription_status)) {
|
||||
return res.status(200).send(new ServerResponse(false, "Please check your subscription status."));
|
||||
}
|
||||
|
||||
// if (subscriptionData.status === "trialing") break;
|
||||
if (!subscriptionData.is_credit && !subscriptionData.is_custom) {
|
||||
if (subscriptionData.subscription_status === "active") {
|
||||
const response = await updateUsers(subscriptionData.subscription_id, subscriptionData.quantity + (req.body.emails.length || 1));
|
||||
if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
|
||||
}
|
||||
}
|
||||
|
||||
const newMembers = await this.createOrInviteMembers(req.body, req.user);
|
||||
return res.status(200).send(new ServerResponse(true, newMembers, `Your teammates will get an email that gives them access to your team.`).withTitle("Invitations sent"));
|
||||
}
|
||||
|
||||
@@ -16,8 +16,8 @@ export default class TimezonesController extends WorklenzControllerBase {
|
||||
|
||||
@HandleExceptions()
|
||||
public static async update(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
|
||||
const q = `UPDATE users SET timezone_id = $2 WHERE id = $1;`;
|
||||
const result = await db.query(q, [req.user?.id, req.body.timezone]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows, "Timezone updated"));
|
||||
const q = `UPDATE users SET timezone_id = $2, language = $3 WHERE id = $1;`;
|
||||
const result = await db.query(q, [req.user?.id, req.body.timezone, req.body.language]);
|
||||
return res.status(200).send(new ServerResponse(true, result.rows, "Updated successfully"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,8 +12,8 @@ export function mapMembersWithAnd(members: string) {
|
||||
}
|
||||
|
||||
export function getBaseUrl() {
|
||||
if (isLocalServer()) return `http://${process.env.HOSTNAME}`;
|
||||
return `https://${process.env.HOSTNAME}`;
|
||||
if (isLocalServer()) return `http://${process.env.FRONTEND_URL}`;
|
||||
return `https://${process.env.FRONTEND_URL}`;
|
||||
}
|
||||
|
||||
function mapMembers(project: ITaskAssignmentModelProject) {
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import {startDailyDigestJob} from "./daily-digest-job";
|
||||
import {startNotificationsJob} from "./notifications-job";
|
||||
import {startProjectDigestJob} from "./project-digest-job";
|
||||
import {startRecurringTasksJob} from "./recurring-tasks";
|
||||
|
||||
export function startCronJobs() {
|
||||
startNotificationsJob();
|
||||
startDailyDigestJob();
|
||||
startProjectDigestJob();
|
||||
// startRecurringTasksJob();
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ import {sendProjectDailyDigest} from "../shared/email-notifications";
|
||||
|
||||
// At 11:00+00 (4.30pm+530) on every day-of-month if it's on every day-of-week from Monday through Friday.
|
||||
const TIME = "0 11 */1 * 1-5";
|
||||
// const TIME = "0/10 * * * *";
|
||||
|
||||
// const TIME = "* * * * *";
|
||||
|
||||
const log = (value: any) => console.log("project-digest-cron-job:", value);
|
||||
|
||||
113
worklenz-backend/src/cron_jobs/recurring-tasks.ts
Normal file
113
worklenz-backend/src/cron_jobs/recurring-tasks.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { CronJob } from "cron";
|
||||
import { calculateNextEndDate, log_error } from "../shared/utils";
|
||||
import db from "../config/db";
|
||||
import { IRecurringSchedule, ITaskTemplate } from "../interfaces/recurring-tasks";
|
||||
import moment from "moment";
|
||||
import TasksController from "../controllers/tasks-controller";
|
||||
|
||||
// At 11:00+00 (4.30pm+530) on every day-of-month if it's on every day-of-week from Monday through Friday.
|
||||
// const TIME = "0 11 */1 * 1-5";
|
||||
const TIME = "*/2 * * * *"; // runs every 2 minutes - for testing purposes
|
||||
const TIME_FORMAT = "YYYY-MM-DD";
|
||||
// const TIME = "0 0 * * *"; // Runs at midnight every day
|
||||
|
||||
const log = (value: any) => console.log("recurring-task-cron-job:", value);
|
||||
|
||||
async function onRecurringTaskJobTick() {
|
||||
try {
|
||||
log("(cron) Recurring tasks job started.");
|
||||
|
||||
const templatesQuery = `
|
||||
SELECT t.*, s.*, (SELECT MAX(end_date) FROM tasks WHERE schedule_id = s.id) as last_task_end_date
|
||||
FROM task_recurring_templates t
|
||||
JOIN task_recurring_schedules s ON t.schedule_id = s.id;
|
||||
`;
|
||||
const templatesResult = await db.query(templatesQuery);
|
||||
const templates = templatesResult.rows as (ITaskTemplate & IRecurringSchedule)[];
|
||||
|
||||
const now = moment();
|
||||
let createdTaskCount = 0;
|
||||
|
||||
for (const template of templates) {
|
||||
const lastTaskEndDate = template.last_task_end_date
|
||||
? moment(template.last_task_end_date)
|
||||
: moment(template.created_at);
|
||||
|
||||
const futureLimit = moment(template.last_checked_at || template.created_at).add(1, "week");
|
||||
|
||||
let nextEndDate = calculateNextEndDate(template, lastTaskEndDate);
|
||||
|
||||
// Find the next future occurrence
|
||||
while (nextEndDate.isSameOrBefore(now)) {
|
||||
nextEndDate = calculateNextEndDate(template, nextEndDate);
|
||||
}
|
||||
|
||||
// Only create a task if it's within the future limit
|
||||
if (nextEndDate.isSameOrBefore(futureLimit)) {
|
||||
const existingTaskQuery = `
|
||||
SELECT id FROM tasks
|
||||
WHERE schedule_id = $1 AND end_date::DATE = $2::DATE;
|
||||
`;
|
||||
const existingTaskResult = await db.query(existingTaskQuery, [template.schedule_id, nextEndDate.format(TIME_FORMAT)]);
|
||||
|
||||
if (existingTaskResult.rows.length === 0) {
|
||||
const createTaskQuery = `SELECT create_quick_task($1::json) as task;`;
|
||||
const taskData = {
|
||||
name: template.name,
|
||||
priority_id: template.priority_id,
|
||||
project_id: template.project_id,
|
||||
reporter_id: template.reporter_id,
|
||||
status_id: template.status_id || null,
|
||||
end_date: nextEndDate.format(TIME_FORMAT),
|
||||
schedule_id: template.schedule_id
|
||||
};
|
||||
const createTaskResult = await db.query(createTaskQuery, [JSON.stringify(taskData)]);
|
||||
const createdTask = createTaskResult.rows[0].task;
|
||||
|
||||
if (createdTask) {
|
||||
createdTaskCount++;
|
||||
|
||||
for (const assignee of template.assignees) {
|
||||
await TasksController.createTaskBulkAssignees(assignee.team_member_id, template.project_id, createdTask.id, assignee.assigned_by);
|
||||
}
|
||||
|
||||
for (const label of template.labels) {
|
||||
const q = `SELECT add_or_remove_task_label($1, $2) AS labels;`;
|
||||
await db.query(q, [createdTask.id, label.label_id]);
|
||||
}
|
||||
|
||||
console.log(`Created task for template ${template.name} with end date ${nextEndDate.format(TIME_FORMAT)}`);
|
||||
}
|
||||
} else {
|
||||
console.log(`Skipped creating task for template ${template.name} with end date ${nextEndDate.format(TIME_FORMAT)} - task already exists`);
|
||||
}
|
||||
} else {
|
||||
console.log(`No task created for template ${template.name} - next occurrence is beyond the future limit`);
|
||||
}
|
||||
|
||||
// Update the last_checked_at in the schedule
|
||||
const updateScheduleQuery = `
|
||||
UPDATE task_recurring_schedules
|
||||
SET last_checked_at = $1::DATE, last_created_task_end_date = $2
|
||||
WHERE id = $3;
|
||||
`;
|
||||
await db.query(updateScheduleQuery, [moment(template.last_checked_at || template.created_at).add(1, "day").format(TIME_FORMAT), nextEndDate.format(TIME_FORMAT), template.schedule_id]);
|
||||
}
|
||||
|
||||
log(`(cron) Recurring tasks job ended with ${createdTaskCount} new tasks created.`);
|
||||
} catch (error) {
|
||||
log_error(error);
|
||||
log("(cron) Recurring task job ended with errors.");
|
||||
}
|
||||
}
|
||||
|
||||
export function startRecurringTasksJob() {
|
||||
log("(cron) Recurring task job ready.");
|
||||
const job = new CronJob(
|
||||
TIME,
|
||||
() => void onRecurringTaskJobTick(),
|
||||
() => log("(cron) Recurring task job successfully executed."),
|
||||
true
|
||||
);
|
||||
job.start();
|
||||
}
|
||||
@@ -17,4 +17,5 @@ export interface IPassportSession extends IUser {
|
||||
socket_id?: string;
|
||||
is_expired?: boolean;
|
||||
owner_id?: string;
|
||||
subscription_status?: string;
|
||||
}
|
||||
|
||||
38
worklenz-backend/src/interfaces/recurring-tasks.ts
Normal file
38
worklenz-backend/src/interfaces/recurring-tasks.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
export interface IRecurringSchedule {
|
||||
id: string;
|
||||
schedule_type: "daily" | "weekly" | "monthly" | "yearly" | "every_x_days" | "every_x_weeks" | "every_x_months";
|
||||
days_of_week: number[] | null;
|
||||
day_of_month: number | null;
|
||||
date_of_month: number | null;
|
||||
week_of_month: number | null;
|
||||
interval_days: number | null;
|
||||
interval_weeks: number | null;
|
||||
interval_months: number | null;
|
||||
last_created_task_end_date: Date | null;
|
||||
last_checked_at: Date | null;
|
||||
last_task_end_date: Date | null;
|
||||
created_at: Date;
|
||||
}
|
||||
|
||||
interface ITaskTemplateAssignee {
|
||||
team_member_id: string;
|
||||
assigned_by: string
|
||||
}
|
||||
|
||||
interface ITaskTemplateLabel {
|
||||
label_id: string;
|
||||
}
|
||||
|
||||
|
||||
export interface ITaskTemplate {
|
||||
task_id: string;
|
||||
schedule_id: string;
|
||||
created_at: Date;
|
||||
name: string;
|
||||
priority_id: string;
|
||||
project_id: string;
|
||||
reporter_id: string;
|
||||
status_id: string;
|
||||
assignees: ITaskTemplateAssignee[];
|
||||
labels: ITaskTemplateLabel[]
|
||||
}
|
||||
@@ -1,3 +1,3 @@
|
||||
export interface ISerializeCallback {
|
||||
(error: string | null, id: string | null): void;
|
||||
(error: string | null, user: { id: string | null } | null): void;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
export interface ISocketSession {
|
||||
session?: {
|
||||
passport?: { user?: string; }
|
||||
passport?: { user?: { id: string } }
|
||||
}
|
||||
}
|
||||
|
||||
9
worklenz-backend/src/keys/PRIVATE_KEY_DEV.pem
Normal file
9
worklenz-backend/src/keys/PRIVATE_KEY_DEV.pem
Normal file
@@ -0,0 +1,9 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIBOgIBAAJBAMe4wKg0OazdVWEyLCnTxubXHqpp6U7S7MiIE96Iufe+T4fe1EJl
|
||||
2+7UJ0Vh0iO9vy/dr03Y9Mjm/IxgiaLEqFECAwEAAQJBAIV17jf4fjoHxZAnyN9C
|
||||
h32mbvWNxLxJsrTmSfDBCRSFRv+ME7WAb7wGhfeDPZcxC+sDZv5EhTnDwQoVl0+3
|
||||
tOECIQDzAbIUX6IS401UKISr8rk9dmPa+i89z5JAyiuhX8sQdQIhANJmkUYjHJtp
|
||||
do/4dmDC6Dgv6SPr9zrNFg2A9Hgu3zztAiBpSHDJFu33VPep4Kwqe0z6bhKxSvew
|
||||
xf/NhkoE7qXiCQIgEltslWf+2PhspccR3QNka3KSrtWprnGyWN9FdS7xv0kCIDje
|
||||
m2QMP/tkiyGlX4cxpDvoB3syPEsbnH+3iaGMlD1T
|
||||
-----END RSA PRIVATE KEY-----
|
||||
9
worklenz-backend/src/keys/PRIVATE_KEY_PROD.pem
Normal file
9
worklenz-backend/src/keys/PRIVATE_KEY_PROD.pem
Normal file
@@ -0,0 +1,9 @@
|
||||
-----BEGIN RSA PRIVATE KEY-----
|
||||
MIIBOwIBAAJBALdfkpZY9GkPSezqNtNP70SDc5ovnB8NttBxheDecIXRiKkGQaTc
|
||||
QuDq19IlDPr+jPvJ6VyMZXtK1UQ09ewUZQ0CAwEAAQJBAKIKkaXMW8bPHNt/qQ0Y
|
||||
kO4xXyF8OvDyFH+kIdMxnauRm8Z28EC4S8F9sfaqL/haj8lMDDDUEhJJB5P3l4XW
|
||||
3WECIQDbZBsfv5+++ie08FzW4K0IrTeFrkanbuV9fhx9sqpgNQIhANX43uuGl7qE
|
||||
RfGEesIfK3FurZhNUXBzYwpZoGC4Drx5AiANK18tcrVGI4IKrHsGMwpwAOXaUnHP
|
||||
Tyrbc5yGNxlfGQIgGgFGLn/MHvoGeiTsun0JTZ7y8Citdio/5jkgWcDk4ZkCIQCk
|
||||
TLAHaLJHiN63o3F/lTwyMib/3xQrsjcxs6k/Y9VEHw==
|
||||
-----END RSA PRIVATE KEY-----
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user