Compare commits

...

72 Commits

Author SHA1 Message Date
Chamika J
f80ec9797e Merge pull request #216 from Dev-Tanaay/bug/Documentation
Solved the documentation error with issue number
2025-07-02 09:09:27 +05:30
Dev-Tanaay
fbbd820512 Updation in Documentation 2025-07-01 17:29:02 +05:30
Chamika J
5d0777f67c Merge pull request #183 from Worklenz/fix/added-i18n-alb-and-de
Update LANGUAGE_TYPE enum to include 'alb' and 'de' for Albanian and …
2025-06-24 22:39:29 +05:30
chamiakJ
f1d504f985 Update LANGUAGE_TYPE enum to include 'alb' and 'de' for Albanian and German languages 2025-06-24 22:28:33 +05:30
Chamika J
6a4d77d904 Merge pull request #182 from Worklenz/feature/german-i18n
Feature/german i18n
2025-06-24 21:47:45 +05:30
Chamika J
c35d53266a Merge pull request #179 from OminduHirushka/language/de
German Language
2025-06-24 21:47:06 +05:30
Omindu Hirushka
dc096f5e12 german language switch 2025-06-24 14:17:23 +05:30
Omindu Hirushka
a681aadcfa german language 2025-06-24 14:17:08 +05:30
Chamika J
f15f3f5110 Merge pull request #176 from Worklenz/feature/albanian-i18n
Feature/albanian i18n
2025-06-23 07:24:12 +05:30
Chamika J
07ae71fd23 Merge pull request #168 from OminduHirushka/upstream/language/alb
albanian language
2025-06-23 07:23:42 +05:30
Chamika J
26270b2842 Merge pull request #166 from kithmina1999/chore/docker-image-size-reduction
Chore/docker image size reduction
2025-06-23 06:56:14 +05:30
kithmina1999
0d0596b767 ci(docker): update postgres healthcheck and retry settings
- Modify healthcheck command to use DB_NAME and DB_USER environment variables
- Reduce retries from 10 to 5 to fail faster if database is unavailable
2025-06-20 08:48:51 +05:30
kithmina1999
eca7af2d6f chore: remove obsolete git file with process information
The file contained outdated process information that was no longer relevant to the project. This cleanup helps maintain a cleaner codebase.
2025-06-19 22:32:17 +05:30
kithmina1999
3ace14fcdb build(docker): optimize docker setup and compose configuration
- Split Dockerfile into multi-stage build to reduce final image size
- Update docker-compose.yml with restart policies and health checks
- Improve .dockerignore files for both frontend and backend
- Fix MinIO bucket creation script to use 'alias set' instead of deprecated command
- Enhance PostgreSQL healthcheck configuration
2025-06-19 22:26:20 +05:30
Omindu Hirushka
99bec6c7f9 albanian language switch 2025-06-18 17:04:58 +05:30
Omindu Hirushka
ef299f1f4a albanian language 2025-06-17 16:03:08 +05:30
Chamika J
66b0709e6e Merge pull request #154 from Worklenz/chore/added-google-analytics
Chore/added google analytics
2025-06-12 10:10:00 +05:30
chamiakJ
a2ed33214d Merge branch 'main' of https://github.com/Worklenz/worklenz into chore/added-google-analytics 2025-06-12 09:49:32 +05:30
Chamika J
a3dccd690d Merge pull request #141 from gdevenyi/patch-1
Fix use of deprecated mc command for bucket creation
2025-06-12 09:46:45 +05:30
chamiakJ
69313fba34 Enhance privacy notice functionality in index.html
- Updated the privacy notice button to use an ID for easier event handling.
- Added an event listener to the button to manage the display of the notice and store user consent in localStorage.
2025-06-12 09:38:24 +05:30
chamiakJ
1889c58598 Refactor privacy notice implementation in index.html
- Introduced a dedicated function to display the privacy notice for users of the open source version.
- Updated the styling and content of the privacy notice for improved visibility and clarity.
- Added a DOMContentLoaded event listener to conditionally show the notice based on the environment and previous interactions.
2025-06-12 09:34:53 +05:30
chamiakJ
e9f0162439 Enhance documentation and integrate Google Analytics
- Added an Analytics section to the README.md, detailing what is tracked, privacy measures, and opt-out instructions.
- Implemented Google Analytics in index.html, including a privacy notice for open source users and environment-specific tracking IDs.
- Updated worklenz-frontend README.md to include a License section.
2025-06-12 09:26:00 +05:30
Chamika J
323b17185c Merge pull request #148 from kithmina1999/fix/postgres-password-auth
fix: change DB_PASSWORD to static value
2025-06-05 13:18:41 +05:30
kithmina1999
09f44a5685 fix: change DB_PASSWORD to static value for development
Using a static password simplifies development environment setup. The previous random password generation caused issues during local testing and debugging.
2025-06-05 10:40:06 +05:30
Chamika J
f4ab7841fb Merge pull request #105 from MRNafisiA/main
increase the memory limit to prevent crashing during build time.
2025-05-30 10:15:47 +05:30
Chamika J
3de4f69a62 Merge pull request #142 from gdevenyi/patch-2
Generate random passwords in update-docker-env.sh
2025-05-30 09:42:46 +05:30
Gabriel A. Devenyi
102be2c24a Generate random passwords in update-docker-env.sh 2025-05-29 15:56:56 -04:00
Chamika J
3a39b25e64 Merge pull request #144 from kithmina1999/docs/add-video-guides
Update README.md to include video guides for local and remote deployment
2025-05-28 13:01:16 +05:30
kithmina1999
32248f8424 Update README.md to include video guides for local and remote deployment
- Added a section for a video guide on local Docker deployment.
- Included a video guide for deploying Worklenz to a remote server.
2025-05-28 09:32:32 +05:30
Gabriel A. Devenyi
a1f8776743 Fix use of deprecated mc command for bucket creation 2025-05-27 22:32:36 -04:00
Chamika J
7e431d645a Merge pull request #134 from Worklenz/chamikaJ-patch-1
Update README.md
2025-05-21 08:28:32 +05:30
Chamika J
cef4bffd69 Update README.md
updated logo URL
2025-05-21 08:28:09 +05:30
Chamika J
51767ebbdb Merge pull request #115 from kithmina1999/fix/minio-createbuckets-entrypoint
correct MinIO createbuckets entrypoint script syntax
2025-05-09 15:41:02 +05:30
kithmina1999
ad91148616 Add Google OAuth variables to Docker environment setup script 2025-05-09 15:34:27 +05:30
kithmina1999
38df66044d Fix entrypoint syntax for MinIO bucket creation script 2025-05-09 15:29:36 +05:30
MRNafisiA
75391641fd increase the memory limit to prevent crashing during build time. 2025-05-02 15:53:48 +03:30
Chamika J
24dc99a19a Merge pull request #96 from Worklenz/development
Development
2025-04-28 22:34:45 +05:30
Chamika J
907075f51d Merge pull request #95 from chamikaJ/fix/docker-compose-fix
Refactor Dockerfile to streamline environment configuration
2025-04-28 22:05:21 +05:30
chamiakJ
b48ac45085 Refactor Dockerfile to streamline environment configuration
- Introduced a new env-config.sh script to handle runtime environment variable updates for VITE_API_URL and VITE_SOCKET_URL.
- Updated start.sh to execute env-config.sh, improving script organization and clarity.
- Enhanced the overall structure of the Dockerfile for better maintainability.
2025-04-28 22:04:00 +05:30
Chamika J
b115d0a772 Merge pull request #94 from chamikaJ/fix/docker-compose-fix
Enhance startup scripts with environment update checks and service UR…
2025-04-28 21:56:04 +05:30
chamiakJ
ad0cdfe1d9 Enhance startup scripts with environment update checks and service URL clarity
- Added checks for the existence of the update-docker-env.sh script in both start.bat and start.sh, providing guidance for updating environment variables.
- Removed legacy .env file checks from both scripts to streamline the startup process.
- Updated service URL outputs to clarify SSL options for both frontend and backend services.
- Improved user instructions for stopping services and updating environment variables.
2025-04-28 21:54:31 +05:30
Chamika J
a50ef47a52 Merge pull request #93 from Worklenz/development
Development
2025-04-28 17:23:04 +05:30
Chamika J
db4240d99b Merge pull request #92 from chamikaJ/fix/docker-compose-fix
Enhance backend API with health check endpoint and update environment…
2025-04-28 16:28:01 +05:30
chamiakJ
bf1d48709c Enhance backend API with health check endpoint and update environment script
- Added a new health check endpoint to the public router in index.ts, returning a simple status response.
- Updated update-docker-env.sh to include the MinIO Dashboard URL in the environment configuration and output messages.
2025-04-28 16:22:54 +05:30
Chamika J
c3c0c288a8 Merge pull request #91 from chamikaJ/fix/docker-compose-fix
Refactor start.sh script creation in Dockerfile
2025-04-28 15:40:07 +05:30
chamiakJ
79e8bb3734 Refactor start.sh script creation in Dockerfile
- Updated the Dockerfile to create the start.sh script in a more structured manner, improving readability and maintainability.
- Ensured that the script dynamically updates env-config.js with runtime environment variables for API and WebSocket URLs.
2025-04-28 15:36:52 +05:30
Chamika J
a6884440a0 Merge pull request #90 from chamikaJ/fix/docker-compose-fix
Refactor frontend service configuration in docker-compose.yml
2025-04-28 15:22:39 +05:30
chamiakJ
b9e5f396fd Refactor frontend service configuration in docker-compose.yml
- Changed the frontend service to build from a local context instead of using a pre-built image.
- Specified the Dockerfile for the frontend build process.
2025-04-28 15:20:39 +05:30
Chamika J
fc40ebcaba Merge pull request #89 from chamikaJ/fix/docker-compose-fix
Enhance environment configuration for frontend and CORS support
2025-04-28 13:04:16 +05:30
chamiakJ
54642037d3 Enhance environment configuration for frontend and CORS support
- Updated update-docker-env.sh to define FRONTEND_URL and streamline CORS settings.
- Modified app.ts to include SERVER_CORS and FRONTEND_URL in allowed origins for both production and development environments.
- Improved output messages in update-docker-env.sh to provide clearer information on URLs and CORS configuration.
2025-04-28 13:02:29 +05:30
Chamika J
0778089ff3 Merge pull request #88 from chamikaJ/fix/docker-compose-fix
Remove legacy environment files and update Docker configuration for e…
2025-04-28 12:44:15 +05:30
chamiakJ
ac2afd6949 Remove legacy environment files and update Docker configuration for environment management
- Deleted .env and .env.example files to streamline environment variable management.
- Updated docker-compose.yml to utilize env_file for frontend and backend services.
- Enhanced update-docker-env.sh to create separate environment files for development and production.
- Revised README.md to reflect the new environment file structure and setup instructions.
2025-04-28 12:37:41 +05:30
Chamika J
8162ce65cb Merge pull request #87 from chamikaJ/fix/docker-compose-fix
Enhance WebSocket support and update environment configuration
2025-04-28 11:43:52 +05:30
chamiakJ
6e4bdea1c2 Enhance WebSocket support and update environment configuration
- Added VITE_SOCKET_URL to docker-compose.yml for WebSocket connection.
- Updated update-docker-env.sh to handle SSL options for WebSocket URLs.
- Modified Dockerfile to include VITE_SOCKET_URL in env-config.js.
- Implemented getSocketUrl function in frontend to retrieve WebSocket URL.
- Refactored socket configuration to use centralized socket URL from environment settings.
2025-04-28 11:43:16 +05:30
Chamika J
daf8ec2e0a Merge pull request #86 from chamikaJ/fix/docker-compose-fix
Fix/docker compose fix
2025-04-28 11:37:14 +05:30
chamiakJ
2a3ae31e4e Enhance Docker deployment with environment variable configuration
- Added environment variable setup in docker-compose.yml for VITE_API_URL.
- Introduced update-docker-env.sh script to create/update .env file for local and remote deployments.
- Updated Dockerfile to dynamically create env-config.js during build.
- Modified frontend to load environment configuration from env-config.js.
- Refactored API client to use centralized config for API URL.
2025-04-28 11:32:44 +05:30
chamikaJ
9c27c41a5e Add script to inject environment variables in Dockerfile
- Created a start.sh script to set environment variables for the application.
- Updated CMD to execute the new script instead of directly serving the build.
2025-04-25 16:42:45 +05:30
chamikaJ
a328da679c Update docker-compose.yml to use bind mount for database initialization and add script execution for dos2unix; modify CORS origin check in app.ts for production environment 2025-04-25 12:30:15 +05:30
Chamika J
122496513b Merge pull request #82 from chamikaJ/fix/socket-event-not-updating
Refactor socket session user structure and update user ID retrieval
2025-04-22 21:21:23 +05:30
chamiakJ
7363c4c692 Refactor socket session user structure and update user ID retrieval
- Changed the user structure in ISocketSession to include an object with an 'id' property.
- Updated the getLoggedInUserIdFromSocket function to return the user ID directly from the new structure.
2025-04-22 21:19:13 +05:30
Chamika J
012e683240 Merge pull request #80 from chamikaJ/react-version
Update security contact email in README.md
2025-04-21 22:11:09 +05:30
chamiakJ
b3a37df4be Update security contact email in README.md 2025-04-21 22:10:41 +05:30
Chamika J
cb94b19e61 Merge pull request #79 from chamikaJ/react-version
Update README.md and scripts for improved setup and service management
2025-04-21 20:56:40 +05:30
chamiakJ
50c4f1a6ac Update README.md and scripts for improved setup and service management
- Changed repository URLs in README.md for consistency.
- Enhanced start.sh with a service health check function to verify if services are running and responding.
- Improved output messages in start.sh and stop.sh for better user experience.
- Added checks for port conflicts and ensured proper stopping of services in stop.sh.
2025-04-21 20:54:50 +05:30
Chamika J
1d02313585 Merge pull request #77 from chamikaJ/react-version
Comment out project template functions in SQL script to prevent execu…
2025-04-21 10:53:52 +05:30
chamiakJ
04ffc049b0 Comment out project template functions in SQL script to prevent execution during database initialization 2025-04-21 09:15:16 +05:30
Chamika J
ca3db02ce8 Merge pull request #76 from chamikaJ/react-version
React version
2025-04-21 08:00:20 +05:30
chamiakJ
ad7c2b20a2 Update README.md and .env.example for Node.js version and redirect URLs
- Updated Node.js version requirement in README.md to v18 or newer.
- Modified .env.example to change Google callback and redirect URLs for consistency in authentication flow.
2025-04-21 07:43:35 +05:30
chamiakJ
89e39520ba Update README.md and .env.template for improved clarity and configuration
- Enhanced README.md with a new header, centered links, and an updated project description.
- Modified .env.template to change Google callback and redirect URLs for consistency in authentication flow.
2025-04-21 07:43:17 +05:30
chamiakJ
6efaeb3ff6 Enhance database schema and functions
- Added 'PHASE' value to the WL_TASK_LIST_COL_KEY enum type.
- Introduced sys_insert_license_types function to populate license types in the database.
- Updated sys_insert_project_templates function to include a new project template and adjusted associated task statuses.
- Modified organization insertion logic to set a longer trial period and change subscription status to 'active' for self-hosted licenses.
2025-04-21 00:12:10 +05:30
chamikaJ
e42819ef64 Update environment configuration, Docker setup, and frontend/backend dependencies
- Updated .env.example and .env files for backend and frontend with placeholder values.
- Enhanced .gitignore to include additional files and directories.
- Modified docker-compose.yml to change image names and improve service health checks.
- Updated README.md and SETUP_THE_PROJECT.md for clearer setup instructions.
- Added database initialization scripts and SQL files for structured database setup.
- Updated frontend Dockerfile to use Node.js 22 and adjusted package.json scripts.
- Improved error handling and logging in start scripts for better debugging.
- Added reCAPTCHA support in the signup page with conditional loading based on environment variables.
2025-04-18 17:10:56 +05:30
chamikaJ
8825b0410a init 2025-04-17 18:28:54 +05:30
2977 changed files with 246894 additions and 129236 deletions

79
.gitignore vendored
View File

@@ -1,4 +1,79 @@
.idea
.vscode
# Dependencies
node_modules/
.pnp/
.pnp.js
# Build outputs
dist/
build/
out/
.next/
.nuxt/
.cache/
# Environment variables
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
.env.development
.env.production
.env.*
!.env.example
!.env.template
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea/
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
*.sublime-workspace
# Testing
coverage/
.nyc_output/
# Temp files
.temp/
.tmp/
temp/
tmp/
# Debug
.debug/
# Misc
.DS_Store
Thumbs.db
.thumbs.db
ehthumbs.db
Desktop.ini
$RECYCLE.BIN/
# Yarn
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
# TypeScript
*.tsbuildinfo

View File

@@ -19,7 +19,7 @@ We have adopted a Code of Conduct to ensure a welcoming and inclusive environmen
## Coding Standards
- Follow the [Angular Style Guide](https://angular.io/guide/styleguide) for the frontend code.
- Follow the [React Documentation](https://react.dev/learn) for best practices in React development.
- Use [TypeScript](https://www.typescriptlang.org/) for both frontend and backend code.
- Follow the [Conventional Commits](https://www.conventionalcommits.org/) specification for commit messages.

386
README.md
View File

@@ -1,6 +1,6 @@
<h1 align="center">
<a href="https://worklenz.com" target="_blank" rel="noopener noreferrer">
<img src="https://app.worklenz.com/assets/icons/icon-144x144.png" alt="Worklenz Logo" width="75">
<img src="https://s3.us-west-2.amazonaws.com/worklenz.com/assets/icon-144x144.png" alt="Worklenz Logo" width="75">
</a>
<br>
Worklenz
@@ -39,20 +39,178 @@ comprehensive solution for managing projects, tasks, and collaboration within te
This repository contains the frontend and backend code for Worklenz.
- **Frontend**: Built using Angular, with [Ant Design of Angular](https://ng.ant.design/docs/introduce/en) as the UI
library..
- **Backend**: Built using a custom TypeScript implementation of ExpressJS, with PostgreSQL as the database, providing a
robust, scalable, and type-safe backend.
- **Frontend**: Built using React with Ant Design as the UI library.
- **Backend**: Built using TypeScript, Express.js, with PostgreSQL as the database.
## Requirements
- Node.js version v18 or newer
- Postgres version v15.6
- Redis version v4.6.7 (not used yet. setup only.)
- PostgreSQL version v15 or newer
- Docker and Docker Compose (for containerized setup)
## Getting started with Worklenz.
- Containerized Installation - Use docker to deploy Worklenz in production or development environments.
- Manual installation - To get started with Worklenz, please follow this guide [worklenz setup guidelines](SETUP_THE_PROJECT.md).
## Getting Started
These instructions will help you set up and run the Worklenz project on your local machine for development and testing purposes.
### Prerequisites
- Node.js (version 18 or higher)
- PostgreSQL database
- An S3-compatible storage service (like MinIO) or Azure Blob Storage
### Option 1: Manual Installation
1. Clone the repository
```bash
git clone https://github.com/Worklenz/worklenz.git
cd worklenz
```
2. Set up environment variables
- Copy the example environment files
```bash
cp worklenz-backend/.env.template worklenz-backend/.env
```
- Update the environment variables with your configuration
3. Install dependencies
```bash
# Install backend dependencies
cd worklenz-backend
npm install
# Install frontend dependencies
cd ../worklenz-frontend
npm install
```
4. Set up the database
```bash
# Create a PostgreSQL database named worklenz_db
cd worklenz-backend
# Execute the SQL setup files in the correct order
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
```
5. Start the development servers
```bash
# In one terminal, start the backend
cd worklenz-backend
npm run dev
# In another terminal, start the frontend
cd worklenz-frontend
npm run dev
```
6. Access the application at http://localhost:5000
### Option 2: Docker Setup
The project includes a fully configured Docker setup with:
- Frontend React application
- Backend server
- PostgreSQL database
- MinIO for S3-compatible storage
1. Clone the repository:
```bash
git clone https://github.com/Worklenz/worklenz.git
cd worklenz
```
2. Start the Docker containers (choose one option):
**Using Docker Compose directly**
```bash
docker-compose up -d
```
3. The application will be available at:
- Frontend: http://localhost:5000
- Backend API: http://localhost:3000
- MinIO Console: http://localhost:9001 (login with minioadmin/minioadmin)
4. To stop the services:
```bash
docker-compose down
```
## Configuration
### Environment Variables
Worklenz requires several environment variables to be configured for proper operation. These include:
- Database credentials
- Session secrets
- Storage configuration (S3 or Azure)
- Authentication settings
Please refer to the `.env.example` files for a full list of required variables.
### MinIO Integration
The project uses MinIO as an S3-compatible object storage service, which provides an open-source alternative to AWS S3 for development and production.
- **MinIO Console**: http://localhost:9001
- Username: minioadmin
- Password: minioadmin
- **Default Bucket**: worklenz-bucket (created automatically when the containers start)
### Security Considerations
For production deployments:
1. Use strong, unique passwords and keys for all services
2. Do not commit `.env` files to version control
3. Use a production-grade PostgreSQL setup with proper backup procedures
4. Enable HTTPS for all public endpoints
5. Review and update dependencies regularly
## Contributing
We welcome contributions from the community! If you'd like to contribute, please follow our [contributing guidelines](CONTRIBUTING.md).
## Security
If you believe you have found a security vulnerability in Worklenz, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports.
Email [info@worklenz.com](mailto:info@worklenz.com) to disclose any security vulnerabilities.
## License
This project is licensed under the [MIT License](LICENSE).
## Analytics
Worklenz uses Google Analytics to understand how the application is being used. This helps us improve the application and make better decisions about future development.
### What We Track
- Anonymous usage statistics
- Page views and navigation patterns
- Feature usage
- Browser and device information
### Privacy
- Analytics is opt-in only
- No personal information is collected
- Users can opt-out at any time
- Data is stored according to Google's privacy policy
### How to Opt-Out
If you've previously opted in and want to opt-out:
1. Clear your browser's local storage for the Worklenz domain
2. Or click the "Decline" button in the analytics notice if it appears
## Screenshots
@@ -107,16 +265,210 @@ This repository contains the frontend and backend code for Worklenz.
We welcome contributions from the community! If you'd like to contribute, please follow
our [contributing guidelines](CONTRIBUTING.md).
### Security
If you believe you have found a security vulnerability in Worklenz, we encourage you to responsibly disclose this and
not open a public issue. We will investigate all legitimate reports.
Email [info@worklenz.com](mailto:info@worklenz.com) to disclose any security vulnerabilities.
### License
Worklenz is open source and released under the [GNU Affero General Public License Version 3 (AGPLv3)](LICENSE).
By contributing to Worklenz, you agree that your contributions will be licensed under its AGPL.
# Worklenz React
This repository contains the React version of Worklenz with a Docker setup for easy development and deployment.
## Getting Started with Docker
The project includes a fully configured Docker setup with:
- Frontend React application
- Backend server
- PostgreSQL database
- MinIO for S3-compatible storage
### Prerequisites
- Docker and Docker Compose installed on your system
- Git
### Quick Start
1. Clone the repository:
```bash
git clone https://github.com/Worklenz/worklenz.git
cd worklenz
```
2. Start the Docker containers (choose one option):
**Option 1: Using the provided scripts (easiest)**
- On Windows:
```
start.bat
```
- On Linux/macOS:
```bash
./start.sh
```
**Option 2: Using Docker Compose directly**
```bash
docker-compose up -d
```
3. The application will be available at:
- Frontend: http://localhost:5000
- Backend API: http://localhost:3000
- MinIO Console: http://localhost:9001 (login with minioadmin/minioadmin)
4. To stop the services (choose one option):
**Option 1: Using the provided scripts**
- On Windows:
```
stop.bat
```
- On Linux/macOS:
```bash
./stop.sh
```
**Option 2: Using Docker Compose directly**
```bash
docker-compose down
```
## MinIO Integration
The project uses MinIO as an S3-compatible object storage service, which provides an open-source alternative to AWS S3 for development and production.
### Working with MinIO
MinIO provides an S3-compatible API, so any code that works with S3 will work with MinIO by simply changing the endpoint URL. The backend has been configured to use MinIO by default, with no additional configuration required.
- **MinIO Console**: http://localhost:9001
- Username: minioadmin
- Password: minioadmin
- **Default Bucket**: worklenz-bucket (created automatically when the containers start)
### Backend Storage Configuration
The backend is pre-configured to use MinIO with the following settings:
```javascript
// S3 credentials with MinIO defaults
export const REGION = process.env.AWS_REGION || "us-east-1";
export const BUCKET = process.env.AWS_BUCKET || "worklenz-bucket";
export const S3_URL = process.env.S3_URL || "http://minio:9000/worklenz-bucket";
export const S3_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID || "minioadmin";
export const S3_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY || "minioadmin";
```
The S3 client is initialized with special MinIO configuration:
```javascript
const s3Client = new S3Client({
region: REGION,
credentials: {
accessKeyId: S3_ACCESS_KEY_ID || "",
secretAccessKey: S3_SECRET_ACCESS_KEY || "",
},
endpoint: getEndpointFromUrl(), // Extracts endpoint from S3_URL
forcePathStyle: true, // Required for MinIO
});
```
### Environment Configuration
The project uses the following environment file structure:
- **Frontend**:
- `worklenz-frontend/.env.development` - Development environment variables
- `worklenz-frontend/.env.production` - Production build variables
- **Backend**:
- `worklenz-backend/.env` - Backend environment variables
### Setting Up Environment Files
The Docker environment script will create or overwrite all environment files:
```bash
# For HTTP/WS
./update-docker-env.sh your-hostname
# For HTTPS/WSS
./update-docker-env.sh your-hostname true
```
This script generates properly configured environment files for both development and production environments.
## Docker Deployment
### Local Development with Docker
1. Set up the environment files:
```bash
# For HTTP/WS
./update-docker-env.sh
# For HTTPS/WSS
./update-docker-env.sh localhost true
```
2. Run the application using Docker Compose:
```bash
docker-compose up -d
```
3. Access the application:
- Frontend: http://localhost:5000
- Backend API: http://localhost:3000 (or https://localhost:3000 with SSL)
4. Video Guide
For a visual walkthrough of the local Docker deployment process, check out our [step-by-step video guide](https://www.youtube.com/watch?v=AfwAKxJbqLg).
### Remote Server Deployment
When deploying to a remote server:
1. Set up the environment files with your server's hostname:
```bash
# For HTTP/WS
./update-docker-env.sh your-server-hostname
# For HTTPS/WSS
./update-docker-env.sh your-server-hostname true
```
This ensures that the frontend correctly connects to the backend API.
2. Pull and run the latest Docker images:
```bash
docker-compose pull
docker-compose up -d
```
3. Access the application through your server's hostname:
- Frontend: http://your-server-hostname:5000
- Backend API: http://your-server-hostname:3000
4. Video Guide
For a complete walkthrough of deploying Worklenz to a remote server, check out our [deployment video guide](https://www.youtube.com/watch?v=CAZGu2iOXQs&t=10s).
### Environment Configuration
The Docker setup uses environment variables to configure the services:
- Frontend:
- `VITE_API_URL`: URL of the backend API (default: http://backend:3000 for container networking)
- `VITE_SOCKET_URL`: WebSocket URL for real-time communication (default: ws://backend:3000)
- Backend:
- Database connection parameters
- Storage configuration
- Other backend settings
For custom configuration, edit the `.env` file or the `update-docker-env.sh` script.

View File

@@ -4,21 +4,20 @@ Getting started with development is a breeze! Follow these steps and you'll be c
## Requirements
- Node.js version v18 or newer - [Node.js](https://nodejs.org/en/download/current)
- Postgres version v15.6 - [PostgreSQL](https://www.postgresql.org/download/)
- Redis version v4.6.7 (not used yet. setup only.)
- Node.js version v16 or newer - [Node.js](https://nodejs.org/en/download/)
- PostgreSQL version v15 or newer - [PostgreSQL](https://www.postgresql.org/download/)
- S3-compatible storage (like MinIO) for file storage
## Prerequisites
- `$ npm install -g ts-node`
- `$ npm install -g typescript`
- `$ npm install -g grunt grunt-cli`
- `$ npm install -g typescript` (optional, but recommended)
## Installation
**Clone the repository:**
```bash
git clone https://github.com/Worklenz/worklenz.git
cd worklenz
```
### Frontend installation
@@ -32,13 +31,14 @@ Getting started with development is a breeze! Follow these steps and you'll be c
```bash
npm install
```
3. **Run the frontend:**
```bash
npm start
```
4. Navigate to [http://localhost:4200](http://localhost:4200)
4. Navigate to [http://localhost:5173](http://localhost:5173)
### Backend installation
@@ -54,13 +54,34 @@ Getting started with development is a breeze! Follow these steps and you'll be c
3. **Configure Environment Variables:**
- Create a copy of the `.env.template` file and name it `.env`.
- Update the required fields in `.env` with the specific information.
- Create a copy of the `.env.example` file and name it `.env`.
- Update the required fields in `.env` with your specific configuration.
4. **Restore Database**
4. **Set up Database**
- Create a new database named `worklenz_db` on your local PostgreSQL server.
- Update the `DATABASE_NAME` and `PASSWORD` in the `database/6_user_permission.sql` with your DB credentials.
- Open a query console and execute the queries from the .sql files in the `database` directories, following the provided order.
- Update the database connection details in your `.env` file.
- Execute the SQL setup files in the correct order:
```bash
# From your PostgreSQL client or command line
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
```
Alternatively, you can use the provided shell script:
```bash
# Make sure the script is executable
chmod +x database/00-init-db.sh
# Run the script (may need modifications for local execution)
./database/00-init-db.sh
```
5. **Install Dependencies:**
@@ -68,48 +89,49 @@ Getting started with development is a breeze! Follow these steps and you'll be c
npm install
```
This command installs all the necessary libraries required to run the project.
6. **Run the Development Server:**
**a. Start the TypeScript compiler:**
Open a new terminal window and run the following command:
```bash
grunt dev
```
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript.
**b. Start the development server:**
Open another separate terminal window and run the following command:
```bash
npm start
```
```bash
npm run dev
```
This starts the development server allowing you to work on the project.
7. **Run the Production Server:**
**a. Compile TypeScript to JavaScript:**
**a. Build the project:**
Open a new terminal window and run the following command:
```bash
npm run build
```
```bash
grunt build
```
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript for production use.
This will compile the TypeScript code into JavaScript for production use.
**b. Start the production server:**
Once the compilation is complete, run the following command in the same terminal window:
```bash
npm start
```
```bash
npm start
```
## Docker Setup (Alternative)
This starts the production server for your application.
For an easier setup, you can use Docker and Docker Compose:
1. Make sure you have Docker and Docker Compose installed on your system.
2. From the root directory, run:
```bash
docker-compose up -d
```
3. Access the application:
- Frontend: http://localhost:5000
- Backend API: http://localhost:3000
- MinIO Console: http://localhost:9001 (login with minioadmin/minioadmin)
4. To stop the services:
```bash
docker-compose down
```

View File

@@ -1,20 +1,20 @@
services:
frontend:
image: ghcr.io/worklenz/worklenz-frontend
build:
context: ./worklenz-frontend
dockerfile: Dockerfile
container_name: worklenz_frontend
ports:
- "4200:4200"
- "5000:5000"
depends_on:
backend:
condition: service_started
- backend
restart: unless-stopped
env_file:
- ./worklenz-frontend/.env.production
networks:
- worklenz
- worklenz
backend:
image: ghcr.io/worklenz/worklenz-backend
build:
context: ./worklenz-backend
dockerfile: Dockerfile
@@ -24,60 +24,92 @@ services:
depends_on:
db:
condition: service_healthy
environment:
- ANGULAR_DIST_DIR
- ANGULAR_SRC_DIR
- AWS_REGION
- BACKEND_PUBLIC_DIR
- BACKEND_VIEWS_DIR
- COMMIT_BUILD_IMMEDIATELY
- COOKIE_SECRET
- DB_HOST
- DB_MAX_CLIENTS
- DB_NAME
- DB_PASSWORD
- DB_PORT
- DB_USER
- GOOGLE_CALLBACK_URL
- GOOGLE_CLIENT_ID
- GOOGLE_CLIENT_SECRET
- HOSTNAME
- LOGIN_FAILURE_REDIRECT
- NODE_ENV
- PORT
- SESSION_NAME
- SESSION_SECRET
- SLACK_WEBHOOK
- SOCKET_IO_CORS
- SOURCE_EMAIL
- USE_PG_NATIVE
- BUCKET
- REGION
- S3_URL
- S3_ACCESS_KEY_ID
- S3_SECRET_ACCESS_KEY
minio:
condition: service_started
restart: unless-stopped
env_file:
- ./worklenz-backend/.env
networks:
- worklenz
- worklenz
minio:
image: minio/minio:latest
container_name: worklenz_minio
ports:
- "9000:9000"
- "9001:9001"
restart: unless-stopped
environment:
MINIO_ROOT_USER: ${S3_ACCESS_KEY_ID:-minioadmin}
MINIO_ROOT_PASSWORD: ${S3_SECRET_ACCESS_KEY:-minioadmin}
volumes:
- worklenz_minio_data:/data
command: server /data --console-address ":9001"
networks:
- worklenz
# MinIO setup helper - creates default bucket on startup
createbuckets:
image: minio/mc
container_name: worklenz_createbuckets
depends_on:
- minio
restart: on-failure
entrypoint: >
/bin/sh -c '
echo "Waiting for MinIO to start...";
sleep 15;
for i in 1 2 3 4 5; do
echo "Attempt $i to connect to MinIO...";
if /usr/bin/mc alias set myminio http://minio:9000 minioadmin minioadmin; then
echo "Successfully connected to MinIO!";
/usr/bin/mc mb --ignore-existing myminio/worklenz-bucket;
/usr/bin/mc policy set public myminio/worklenz-bucket;
exit 0;
fi
echo "Connection failed, retrying in 5 seconds...";
sleep 5;
done;
echo "Failed to connect to MinIO after 5 attempts";
exit 1;
'
networks:
- worklenz
db:
image: postgres:15
container_name: worklenz_db
environment:
POSTGRES_DB: "${DB_NAME}"
POSTGRES_PASSWORD: "${DB_PASSWORD}"
POSTGRES_USER: ${DB_USER:-postgres}
POSTGRES_DB: ${DB_NAME:-worklenz_db}
POSTGRES_PASSWORD: ${DB_PASSWORD:-password}
healthcheck:
test: ["CMD-SHELL", "pg_isready -d ${DB_NAME} -U ${DB_USER}"]
test: [ "CMD-SHELL", "pg_isready -d ${DB_NAME:-worklenz_db} -U ${DB_USER:-postgres}" ]
interval: 10s
timeout: 5s
retries: 5
restart: unless-stopped
networks:
- worklenz
- worklenz
volumes:
- worklenz_postgres_data:/var/lib/postgresql/data
- ./worklenz-backend/database/:/docker-entrypoint-initdb.d
- type: bind
source: ./worklenz-backend/database
target: /docker-entrypoint-initdb.d
consistency: cached
command: >
bash -c ' if command -v apt-get >/dev/null 2>&1; then
apt-get update && apt-get install -y dos2unix
elif command -v apk >/dev/null 2>&1; then
apk add --no-cache dos2unix
fi && find /docker-entrypoint-initdb.d -type f -name "*.sh" -exec sh -c '\''
dos2unix "{}" 2>/dev/null || true
chmod +x "{}"
'\'' \; && exec docker-entrypoint.sh postgres '
volumes:
worklenz_postgres_data:
worklenz_minio_data:
networks:
worklenz:
worklenz:

185
start.bat Normal file
View File

@@ -0,0 +1,185 @@
@echo off
echo Starting Worklenz setup... > worklenz_startup.log
echo %DATE% %TIME% >> worklenz_startup.log
echo.
echo " __ __ _ _"
echo " \ \ / / | | | |"
echo " \ \ /\ / /__ _ __| | _| | ___ _ __ ____"
echo " \ \/ \/ / _ \| '__| |/ / |/ _ \ '_ \|_ /"
echo " \ /\ / (_) | | | <| | __/ | | |/ /"
echo " \/ \/ \___/|_| |_|\_\_|\___|_| |_/___|"
echo.
echo W O R K L E N Z
echo.
echo Starting Worklenz Docker Environment...
echo.
REM Check for Docker installation
echo Checking for Docker installation...
where docker >nul 2>>worklenz_startup.log
IF %ERRORLEVEL% NEQ 0 (
echo [91mWarning: Docker is not installed or not in PATH[0m
echo Warning: Docker is not installed or not in PATH >> worklenz_startup.log
echo Please install Docker first: https://docs.docker.com/get-docker/
echo [93mContinuing for debugging purposes...[0m
) ELSE (
echo [92m^✓[0m Docker is installed
echo Docker is installed >> worklenz_startup.log
)
REM Check for docker-compose installation
echo Checking for docker-compose...
where docker-compose >nul 2>>worklenz_startup.log
IF %ERRORLEVEL% NEQ 0 (
echo [91mWarning: docker-compose is not installed or not in PATH[0m
echo Warning: docker-compose is not installed or not in PATH >> worklenz_startup.log
echo [93mContinuing for debugging purposes...[0m
) ELSE (
echo [92m^✓[0m docker-compose is installed
echo docker-compose is installed >> worklenz_startup.log
)
REM Check for update-docker-env.sh
IF EXIST update-docker-env.sh (
echo [94mFound update-docker-env.sh script. You can use it to update environment variables.[0m
echo Found update-docker-env.sh script >> worklenz_startup.log
)
REM Run preflight checks
echo Running Docker daemon check...
docker info >nul 2>>worklenz_startup.log
IF %ERRORLEVEL% NEQ 0 (
echo [91mWarning: Docker daemon is not running[0m
echo Warning: Docker daemon is not running >> worklenz_startup.log
echo Please start Docker and try again
echo [93mContinuing for debugging purposes...[0m
) ELSE (
echo [92m^✓[0m Docker daemon is running
echo Docker daemon is running >> worklenz_startup.log
)
REM Stop any running containers
echo Stopping any running containers...
docker-compose down > nul 2>>worklenz_startup.log
IF %ERRORLEVEL% NEQ 0 (
echo [91mWarning: Error stopping containers[0m
echo Warning: Error stopping containers >> worklenz_startup.log
echo [93mContinuing anyway...[0m
)
REM Start the containers
echo Starting containers...
echo Attempting to start containers... >> worklenz_startup.log
REM Start with docker-compose
docker-compose up -d > docker_up_output.txt 2>&1
type docker_up_output.txt >> worklenz_startup.log
REM Check for errors in output
findstr /C:"Error" docker_up_output.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [91mErrors detected during startup[0m
echo Errors detected during startup >> worklenz_startup.log
type docker_up_output.txt
)
del docker_up_output.txt > nul 2>&1
REM Wait for services to be ready
echo Waiting for services to start...
timeout /t 10 /nobreak > nul
echo After timeout, checking services >> worklenz_startup.log
REM Check service status using docker-compose
echo Checking service status...
echo Checking service status... >> worklenz_startup.log
docker-compose ps --services --filter "status=running" > running_services.txt 2>>worklenz_startup.log
REM Log services output
type running_services.txt >> worklenz_startup.log
echo.
echo Checking individual services:
echo Checking individual services: >> worklenz_startup.log
REM Check frontend
findstr /C:"frontend" running_services.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [92m^✓[0m Frontend is running
echo Frontend URL: http://localhost:5000 (or https://localhost:5000 if SSL is enabled)
echo Frontend is running >> worklenz_startup.log
) ELSE (
echo [91m^✗[0m Frontend service failed to start
echo Frontend service failed to start >> worklenz_startup.log
)
REM Check backend
findstr /C:"backend" running_services.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [92m^✓[0m Backend is running
echo Backend URL: http://localhost:3000 (or https://localhost:3000 if SSL is enabled)
echo Backend is running >> worklenz_startup.log
) ELSE (
echo [91m^✗[0m Backend service failed to start
echo Backend service failed to start >> worklenz_startup.log
)
REM Check MinIO
findstr /C:"minio" running_services.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [92m^✓[0m MinIO is running
echo MinIO Console URL: http://localhost:9001 (login: minioadmin/minioadmin)
echo MinIO is running >> worklenz_startup.log
) ELSE (
echo [91m^✗[0m MinIO service failed to start
echo MinIO service failed to start >> worklenz_startup.log
REM Check MinIO logs
echo Checking MinIO logs for errors:
docker-compose logs minio --tail=20 > minio_logs.txt
type minio_logs.txt
type minio_logs.txt >> worklenz_startup.log
del minio_logs.txt > nul 2>&1
)
REM Check Database
findstr /C:"db" running_services.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [92m^✓[0m Database is running
echo Database is running >> worklenz_startup.log
) ELSE (
echo [91m^✗[0m Database service failed to start
echo Database service failed to start >> worklenz_startup.log
)
del running_services.txt > nul 2>&1
REM Check if all services are running
set allRunning=1
docker-compose ps --services | findstr /V /C:"frontend" /C:"backend" /C:"minio" /C:"db" > remaining_services.txt
FOR /F "tokens=*" %%s IN (remaining_services.txt) DO (
findstr /C:"%%s" running_services.txt > nul || set allRunning=0
)
del remaining_services.txt > nul 2>&1
IF %allRunning% EQU 1 (
echo.
echo [92mWorklenz setup completed![0m
echo Setup completed successfully >> worklenz_startup.log
) ELSE (
echo.
echo [93mWarning: Some services may not be running correctly.[0m
echo Warning: Some services may not be running correctly >> worklenz_startup.log
echo Run 'docker-compose logs' to check for errors.
)
echo You can access the application at: http://localhost:5000
echo To stop the services, run: stop.bat
echo To update environment variables, run: update-docker-env.sh
echo.
echo Note: To enable SSL, set ENABLE_SSL=true in your .env file and run update-docker-env.sh
echo.
echo For any errors, check worklenz_startup.log file
echo.
echo Press any key to exit...
pause > nul

151
start.sh Executable file
View File

@@ -0,0 +1,151 @@
#!/bin/bash
# Colors for terminal output
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Print banner
echo -e "${GREEN}"
echo " __ __ _ _"
echo " \ \ / / | | | |"
echo " \ \ /\ / /__ _ __| | _| | ___ _ __ ____"
echo " \ \/ \/ / _ \| '__| |/ / |/ _ \ '_ \|_ /"
echo " \ /\ / (_) | | | <| | __/ | | |/ /"
echo " \/ \/ \___/|_| |_|\_\_|\___|_| |_/___|"
echo ""
echo " W O R K L E N Z "
echo -e "${NC}"
echo "Starting Worklenz Docker Environment..."
# Check if update-docker-env.sh exists and is executable
if [ -f update-docker-env.sh ] && [ -x update-docker-env.sh ]; then
echo -e "${BLUE}Found update-docker-env.sh script. You can use it to update environment variables.${NC}"
fi
# Function to check if a service is running
check_service() {
local service_name=$1
local container_name=$2
local url=$3
local max_attempts=30
local attempt=1
echo -e "${BLUE}Checking ${service_name} service...${NC}"
# First check if the container is running
while [ $attempt -le $max_attempts ]; do
if docker ps | grep -q "${container_name}"; then
# Container is running
if [ -z "$url" ]; then
# No URL to check, assume service is up
echo -e "${GREEN}${NC} ${service_name} is running"
return 0
else
# Check if service endpoint is responding
if curl -s -f -o /dev/null "$url"; then
echo -e "${GREEN}${NC} ${service_name} is running and responding at ${url}"
return 0
else
if [ $attempt -eq $max_attempts ]; then
echo -e "${YELLOW}${NC} ${service_name} container is running but not responding at ${url}"
return 1
fi
fi
fi
else
if [ $attempt -eq $max_attempts ]; then
echo -e "${RED}${NC} ${service_name} failed to start"
return 1
fi
fi
echo -n "."
attempt=$((attempt+1))
sleep 1
done
return 1
}
# Check if Docker is installed
if ! command -v docker &> /dev/null; then
echo -e "${RED}Error: Docker is not installed or not in PATH${NC}"
echo "Please install Docker first: https://docs.docker.com/get-docker/"
exit 1
fi
# Check if Docker daemon is running
echo -e "${BLUE}Running preflight checks...${NC}"
if ! docker info &> /dev/null; then
echo -e "${RED}Error: Docker daemon is not running${NC}"
echo "Please start Docker and try again"
exit 1
fi
echo -e "${GREEN}${NC} Docker is running"
# Determine Docker Compose command to use
DOCKER_COMPOSE_CMD=""
if command -v docker compose &> /dev/null; then
DOCKER_COMPOSE_CMD="docker compose"
echo -e "${GREEN}${NC} Using Docker Compose V2"
elif command -v docker-compose &> /dev/null; then
DOCKER_COMPOSE_CMD="docker-compose"
echo -e "${YELLOW}${NC} Using legacy Docker Compose"
else
echo -e "${RED}Error: Docker Compose is not installed or not in PATH${NC}"
echo "Please install Docker Compose: https://docs.docker.com/compose/install/"
exit 1
fi
# Check if any of the ports are already in use
ports=(3000 5000 9000 9001 5432)
for port in "${ports[@]}"; do
if lsof -i:"$port" > /dev/null 2>&1; then
echo -e "${YELLOW}⚠ Warning: Port $port is already in use. This may cause conflicts.${NC}"
fi
done
# Start the containers
echo -e "${BLUE}Starting Worklenz services...${NC}"
$DOCKER_COMPOSE_CMD down
$DOCKER_COMPOSE_CMD up -d
# Wait for services to fully initialize
echo -e "${BLUE}Waiting for services to initialize...${NC}"
echo "This may take a minute or two depending on your system..."
# Check each service
check_service "Database" "worklenz_db" ""
DB_STATUS=$?
check_service "MinIO" "worklenz_minio" "http://localhost:9000/minio/health/live"
MINIO_STATUS=$?
check_service "Backend" "worklenz_backend" "http://localhost:3000/public/health"
BACKEND_STATUS=$?
check_service "Frontend" "worklenz_frontend" "http://localhost:5000"
FRONTEND_STATUS=$?
# Display service URLs
echo -e "\n${BLUE}Service URLs:${NC}"
[ $FRONTEND_STATUS -eq 0 ] && echo " • Frontend: http://localhost:5000 (or https://localhost:5000 if SSL is enabled)"
[ $BACKEND_STATUS -eq 0 ] && echo " • Backend API: http://localhost:3000 (or https://localhost:3000 if SSL is enabled)"
[ $MINIO_STATUS -eq 0 ] && echo " • MinIO Console: http://localhost:9001 (login: minioadmin/minioadmin)"
# Check if all services are up
if [ $DB_STATUS -eq 0 ] && [ $MINIO_STATUS -eq 0 ] && [ $BACKEND_STATUS -eq 0 ] && [ $FRONTEND_STATUS -eq 0 ]; then
echo -e "\n${GREEN}✅ All Worklenz services are running successfully!${NC}"
else
echo -e "\n${YELLOW}⚠ Some services may not be running properly. Check the logs for more details:${NC}"
echo " $DOCKER_COMPOSE_CMD logs"
fi
echo -e "\n${BLUE}Useful commands:${NC}"
echo " • View logs: $DOCKER_COMPOSE_CMD logs -f"
echo " • Stop services: ./stop.sh"
echo " • Update environment variables: ./update-docker-env.sh"
echo -e "\n${YELLOW}Note:${NC} To enable SSL, set ENABLE_SSL=true in your .env file and run ./update-docker-env.sh"

7
stop.bat Normal file
View File

@@ -0,0 +1,7 @@
@echo off
echo [91mStopping Worklenz Docker Environment...[0m
REM Stop the containers
docker-compose down
echo [92mWorklenz services have been stopped.[0m

50
stop.sh Executable file
View File

@@ -0,0 +1,50 @@
#!/bin/bash
# Colors for terminal output
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Print banner
echo -e "${RED}"
echo " __ __ _ _"
echo " \ \ / / | | | |"
echo " \ \ /\ / /__ _ __| | _| | ___ _ __ ____"
echo " \ \/ \/ / _ \| '__| |/ / |/ _ \ '_ \|_ /"
echo " \ /\ / (_) | | | <| | __/ | | |/ /"
echo " \/ \/ \___/|_| |_|\_\_|\___|_| |_/___|"
echo ""
echo " W O R K L E N Z "
echo -e "${NC}"
echo -e "${BLUE}Stopping Worklenz Docker Environment...${NC}"
# Determine Docker Compose command to use
DOCKER_COMPOSE_CMD=""
if command -v docker compose &> /dev/null; then
DOCKER_COMPOSE_CMD="docker compose"
echo -e "${GREEN}${NC} Using Docker Compose V2"
elif command -v docker-compose &> /dev/null; then
DOCKER_COMPOSE_CMD="docker-compose"
echo -e "${YELLOW}${NC} Using legacy Docker Compose"
else
echo -e "${RED}Error: Docker Compose is not installed or not in PATH${NC}"
echo "Please install Docker Compose: https://docs.docker.com/compose/install/"
exit 1
fi
# Stop the containers
echo -e "${BLUE}Stopping all services...${NC}"
$DOCKER_COMPOSE_CMD down
# Check if containers are still running
if docker ps | grep -q "worklenz_"; then
echo -e "${YELLOW}⚠ Some Worklenz containers are still running. Forcing stop...${NC}"
docker stop $(docker ps -q --filter "name=worklenz_")
echo -e "${GREEN}${NC} Forced stop completed."
else
echo -e "${GREEN}${NC} All Worklenz services have been stopped successfully."
fi
echo -e "\n${BLUE}To start Worklenz again, run:${NC} ./start.sh"

141
update-docker-env.sh Executable file
View File

@@ -0,0 +1,141 @@
#!/bin/bash
# Script to set environment variables for Docker deployment
# Usage: ./update-docker-env.sh [hostname] [use_ssl]
# Default hostname if not provided
DEFAULT_HOSTNAME="localhost"
HOSTNAME=${1:-$DEFAULT_HOSTNAME}
# Check if SSL should be used
USE_SSL=${2:-false}
# Set protocol prefixes based on SSL flag
if [ "$USE_SSL" = "true" ]; then
HTTP_PREFIX="https://"
WS_PREFIX="wss://"
else
HTTP_PREFIX="http://"
WS_PREFIX="ws://"
fi
# Frontend URLs
FRONTEND_URL="${HTTP_PREFIX}${HOSTNAME}:5000"
MINIO_DASHBOARD_URL="${HTTP_PREFIX}${HOSTNAME}:9001"
# Create or overwrite frontend .env.development file
mkdir -p worklenz-frontend
cat > worklenz-frontend/.env.development << EOL
# API Connection
VITE_API_URL=http://localhost:3000
VITE_SOCKET_URL=ws://localhost:3000
# Application Environment
VITE_APP_TITLE=Worklenz
VITE_APP_ENV=development
# Mixpanel
VITE_MIXPANEL_TOKEN=
# Recaptcha
VITE_ENABLE_RECAPTCHA=false
VITE_RECAPTCHA_SITE_KEY=
# Session ID
VITE_WORKLENZ_SESSION_ID=worklenz-session-id
EOL
# Create frontend .env.production file
cat > worklenz-frontend/.env.production << EOL
# API Connection
VITE_API_URL=${HTTP_PREFIX}${HOSTNAME}:3000
VITE_SOCKET_URL=${WS_PREFIX}${HOSTNAME}:3000
# Application Environment
VITE_APP_TITLE=Worklenz
VITE_APP_ENV=production
# Mixpanel
VITE_MIXPANEL_TOKEN=
# Recaptcha
VITE_ENABLE_RECAPTCHA=false
VITE_RECAPTCHA_SITE_KEY=
# Session ID
VITE_WORKLENZ_SESSION_ID=worklenz-session-id
EOL
# Create backend environment file
mkdir -p worklenz-backend
cat > worklenz-backend/.env << EOL
# Server
NODE_ENV=production
PORT=3000
SESSION_NAME=worklenz.sid
SESSION_SECRET=$(openssl rand -base64 48)
COOKIE_SECRET=$(openssl rand -base64 48)
# CORS
SOCKET_IO_CORS=${FRONTEND_URL}
SERVER_CORS=${FRONTEND_URL}
# Google Login
GOOGLE_CLIENT_ID="your_google_client_id"
GOOGLE_CLIENT_SECRET="your_google_client_secret"
GOOGLE_CALLBACK_URL="${FRONTEND_URL}/secure/google/verify"
LOGIN_FAILURE_REDIRECT="${FRONTEND_URL}/auth/authenticating"
LOGIN_SUCCESS_REDIRECT="${FRONTEND_URL}/auth/authenticating"
# Database
DB_HOST=db
DB_PORT=5432
DB_USER=postgres
DB_PASSWORD=password
DB_NAME=worklenz_db
DB_MAX_CLIENTS=50
USE_PG_NATIVE=true
# Storage Configuration
STORAGE_PROVIDER=s3
AWS_REGION=us-east-1
AWS_BUCKET=worklenz-bucket
AWS_ACCESS_KEY_ID=minioadmin
AWS_SECRET_ACCESS_KEY=minioadmin
S3_URL=http://minio:9000
# Backend Directories
BACKEND_PUBLIC_DIR=./public
BACKEND_VIEWS_DIR=./views
# Host
HOSTNAME=${HOSTNAME}
FRONTEND_URL=${FRONTEND_URL}
# Email
SOURCE_EMAIL=no-reply@example.com
# Notifications
SLACK_WEBHOOK=
# Other Settings
COMMIT_BUILD_IMMEDIATELY=true
# JWT Secret
JWT_SECRET=$(openssl rand -base64 48)
EOL
echo "Environment configuration updated for ${HOSTNAME} with" $([ "$USE_SSL" = "true" ] && echo "HTTPS/WSS" || echo "HTTP/WS")
echo "Created/updated environment files:"
echo "- worklenz-frontend/.env.development (development)"
echo "- worklenz-frontend/.env.production (production build)"
echo "- worklenz-backend/.env"
echo
echo "To run with Docker Compose, use: docker-compose up -d"
echo
echo "Frontend URL: ${FRONTEND_URL}"
echo "API URL: ${HTTP_PREFIX}${HOSTNAME}:3000"
echo "Socket URL: ${WS_PREFIX}${HOSTNAME}:3000"
echo "MinIO Dashboard URL: ${MINIO_DASHBOARD_URL}"
echo "CORS is configured to allow requests from: ${FRONTEND_URL}"

View File

@@ -1,5 +1,9 @@
node_modules
npm-debug.log
build
.scannerwork
coverage
.dockerignore
.git
*.md
tests

View File

@@ -2,56 +2,75 @@
NODE_ENV=development
PORT=3000
SESSION_NAME=worklenz.sid
SESSION_SECRET="YOUR_SESSION_SECRET_HERE"
COOKIE_SECRET="YOUR_COOKIE_SECRET_HERE"
SESSION_SECRET="your_session_secret"
COOKIE_SECRET="your_cookie_secret"
# CORS
SOCKET_IO_CORS=http://localhost:4200
SOCKET_IO_CORS=http://localhost:5000
SERVER_CORS=*
# Database
DB_USER=DATABASE_USER_HERE # default : worklenz_backend (update "user-permission.sql" if needed)
DB_PASSWORD=DATABASE_PASSWORD_HERE
DB_NAME=DATABASE_NAME_HERE # default : worklenz_db
DB_HOST=DATABASE_HOST_HERE # default : localhost
DB_PORT=DATABASE_PORT_HERE # default : 5432
DB_USER=postgres
DB_PASSWORD=your_db_password
DB_NAME=worklenz_db
DB_HOST=localhost
DB_PORT=5432
DB_MAX_CLIENTS=50
# Google Login
GOOGLE_CLIENT_ID="GOOGLE_CLIENT_ID_HERE"
GOOGLE_CLIENT_SECRET="GOOGLE_CLIENT_SECRET_HERE"
GOOGLE_CALLBACK_URL="http://localhost:3000/secure/google/verify"
LOGIN_FAILURE_REDIRECT="/"
LOGIN_SUCCESS_REDIRECT="http://localhost:4200/auth/authenticate"
GOOGLE_CLIENT_ID="your_google_client_id"
GOOGLE_CLIENT_SECRET="your_google_client_secret"
GOOGLE_CALLBACK_URL="http://localhost:5000/secure/google/verify"
LOGIN_FAILURE_REDIRECT="http://localhost:5000/auth/authenticating"
LOGIN_SUCCESS_REDIRECT="http://localhost:5000/auth/authenticating"
# CLI
ANGULAR_DIST_DIR="/path/worklenz_frontend/dist/worklenz"
ANGULAR_SRC_DIR="/path/worklenz_frontend"
BACKEND_PUBLIC_DIR="/path/worklenz_backend/src/public"
BACKEND_VIEWS_DIR="/path/worklenz_backend/src/views/admin"
COMMIT_BUILD_IMMEDIATELY=true
ANGULAR_DIST_DIR="path/to/frontend/dist"
ANGULAR_SRC_DIR="path/to/frontend"
BACKEND_PUBLIC_DIR="path/to/backend/public"
BACKEND_VIEWS_DIR="path/to/backend/views"
COMMIT_BUILD_IMMEDIATELY=false
# HOST
HOSTNAME=localhost:4200
HOSTNAME=localhost:5000
# SLACK
SLACK_WEBHOOK=SLACK_WEBHOOK_HERE
USE_PG_NATIVE=true
SLACK_WEBHOOK=your_slack_webhook_url
USE_PG_NATIVE=false
# JWT SECRET
JWT_SECRET=JWT_SECRET_CODE_HERE
JWT_SECRET=your_jwt_secret
# FRONTEND_URL
FRONTEND_URL=http://localhost:5000
# STORAGE
STORAGE_PROVIDER=s3 # values s3 or azure
# AWS
AWS_REGION="us-west-2"
AWS_ACCESS_KEY_ID="AWS_ACCESS_KEY_ID_HERE"
AWS_SECRET_ACCESS_KEY="AWS_SECRET_ACCESS_KEY_HERE"
AWS_REGION="your_aws_region"
AWS_ACCESS_KEY_ID="your_aws_access_key_id"
AWS_SECRET_ACCESS_KEY="your_aws_secret_access_key"
AWS_BUCKET="your_s3_bucket"
S3_URL="your_s3_url"
# S3 Credentials
REGION="us-west-2"
BUCKET="BUCKET_NAME_HERE"
S3_URL="S3_URL_HERE"
S3_ACCESS_KEY_ID="S3_ACCESS_KEY_ID_HERE"
S3_SECRET_ACCESS_KEY="S3_SECRET_ACCESS_KEY_HERE"
# Azure Storage
AZURE_STORAGE_ACCOUNT_NAME="your_storage_account_name"
AZURE_STORAGE_CONTAINER="your_storage_container"
AZURE_STORAGE_ACCOUNT_KEY="your_storage_account_key"
AZURE_STORAGE_URL="your_storage_url"
# SES email
SOURCE_EMAIL="SOURCE_EMAIL_HERE" #Worklenz <noreply@worklenz.com>
# DIRECTPAY
DP_STAGE=DEV
DP_URL=your_url
DP_MERCHANT_ID=your_merchant_id
DP_SECRET_KEY=your_secret_key
DP_API_KEY=your_api_key
CONTACT_US_EMAIL=support@example.com
GOOGLE_CAPTCHA_SECRET_KEY=your_captcha_secret_key
GOOGLE_CAPTCHA_PASS_SCORE=0.8
# Email Cronjobs
ENABLE_EMAIL_CRONJOBS=true

View File

@@ -1,26 +1,39 @@
# Use the official Node.js 18 image as a base
FROM node:18
# --- Stage 1: Build ---
FROM node:20-slim AS builder
ARG RELEASE_VERSION
RUN apt-get update && apt-get install -y \
python3 \
make \
g++ \
curl \
postgresql-server-dev-all \
&& rm -rf /var/lib/apt/lists/*
# Create and set the working directory
WORKDIR /usr/src/app
# Install global dependencies
RUN npm install -g ts-node typescript grunt grunt-cli
# Copy package.json and package-lock.json (if available)
COPY package*.json ./
# Install app dependencies
RUN npm ci
# Copy the rest of the application code
COPY . .
# Run the build script to compile TypeScript to JavaScript
RUN npm run build
# Expose the port the app runs on
EXPOSE 3000
RUN echo "$RELEASE_VERSION" > release
# --- Stage 2: Production Image ---
FROM node:20-slim
RUN apt-get update && apt-get install -y libpq5 && rm -rf /var/lib/apt/lists/*
WORKDIR /app
COPY --from=builder /usr/src/app/package*.json ./
COPY --from=builder /usr/src/app/build ./build
COPY --from=builder /usr/src/app/node_modules ./node_modules
COPY --from=builder /usr/src/app/release ./release
COPY --from=builder /usr/src/app/worklenz-email-templates ./worklenz-email-templates
EXPOSE 3000
CMD ["node", "build/bin/www"]
# Start the application
CMD ["npm", "start"]

View File

@@ -1,81 +1,96 @@
# Worklenz Backend
1. **Open your IDE:**
This is the Express.js backend for the Worklenz project management application.
Open the project directory in your preferred code editor or IDE like Visual Studio Code.
## Getting Started
2. **Configure Environment Variables:**
Follow these steps to set up the backend for development:
- Create a copy of the `.env.template` file and name it `.env`.
- Update the required fields in `.env` with the specific information.
1. **Configure Environment Variables:**
3. **Restore Database**
- Create a new database named `worklenz_db` on your local PostgreSQL server.
- Update the `DATABASE_NAME` and `PASSWORD` in the `database/6_user_permission.sql` with your DB credentials.
- Open a query console and execute the queries from the .sql files in the `database` directories, following the provided order.
- Create a copy of the `.env.example` file and name it `.env`.
- Update the required fields in `.env` with your specific configuration.
4. **Install Dependencies:**
2. **Set up Database:**
- Create a new database named `worklenz_db` on your local PostgreSQL server.
- Update the database connection details in your `.env` file.
- Execute the SQL setup files in the correct order:
```bash
# From your PostgreSQL client or command line
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
```
Alternatively, you can use the provided shell script:
```bash
# Make sure the script is executable
chmod +x database/00-init-db.sh
# Run the script (may need modifications for local execution)
./database/00-init-db.sh
```
3. **Install Dependencies:**
```bash
npm install
```
This command installs all the necessary libraries required to run the project.
4. **Run the Development Server:**
5. **Run the Development Server:**
```bash
npm run dev
```
**a. Start the TypeScript compiler:**
This starts the development server with hot reloading enabled.
Open a new terminal window and run the following command:
5. **Build for Production:**
```bash
grunt dev
```
```bash
npm run build
```
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript.
This will compile the TypeScript code into JavaScript for production use.
**b. Start the development server:**
6. **Start Production Server:**
Open another separate terminal window and run the following command:
```bash
npm start
```
```bash
npm start
```
## API Documentation
This starts the development server allowing you to work on the project.
The API endpoints are organized into logical controllers and follow RESTful design principles. The main API routes are prefixed with `/api/v1`.
6. **Run the Production Server:**
### Authentication
**a. Compile TypeScript to JavaScript:**
Authentication is handled via JWT tokens. Protected routes require a valid token in the Authorization header.
Open a new terminal window and run the following command:
### File Storage
```bash
grunt build
```
The application supports both S3-compatible storage and Azure Blob Storage for file uploads. Configure your preferred storage option in the `.env` file.
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript for production use.
## Development Guidelines
**b. Start the production server:**
- Code should be written in TypeScript
- Follow the established patterns for controllers, services, and middlewares
- Add proper error handling for all API endpoints
- Write unit tests for critical functionality
- Document API endpoints with clear descriptions and examples
Once the compilation is complete, run the following command in the same terminal window:
## Running Tests
```bash
npm start
```
```bash
npm test
```
This starts the production server for your application.
## Docker Support
### CLI
- Create controller: `$ node new controller Test`
- Create angular release: `$ node new release`
### Developement Rules
- Controllers should only generate/create using the CLI (`node new controller Projects`)
- Validations should only be done using a middleware placed under src/validators/ and used inside the routers (E.g., api-router.ts)
- Validators should only generate/create using the CLI (`node new vaidator projects-params`)
## Pull submodules
- git submodule update --init --recursive
The backend can be run in a Docker container. See the main project README for Docker setup instructions.

View File

@@ -0,0 +1,55 @@
#!/bin/bash
set -e
# This script controls the order of SQL file execution during database initialization
echo "Starting database initialization..."
# Check if we have SQL files in expected locations
if [ -f "/docker-entrypoint-initdb.d/sql/0_extensions.sql" ]; then
SQL_DIR="/docker-entrypoint-initdb.d/sql"
echo "Using SQL files from sql/ subdirectory"
elif [ -f "/docker-entrypoint-initdb.d/0_extensions.sql" ]; then
# First time setup - move files to subdirectory
echo "Moving SQL files to sql/ subdirectory..."
mkdir -p /docker-entrypoint-initdb.d/sql
# Move all SQL files (except this script) to the subdirectory
for f in /docker-entrypoint-initdb.d/*.sql; do
if [ -f "$f" ]; then
cp "$f" /docker-entrypoint-initdb.d/sql/
echo "Copied $f to sql/ subdirectory"
fi
done
SQL_DIR="/docker-entrypoint-initdb.d/sql"
else
echo "SQL files not found in expected locations!"
exit 1
fi
# Execute SQL files in the correct order
echo "Executing 0_extensions.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/0_extensions.sql"
echo "Executing 1_tables.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/1_tables.sql"
echo "Executing indexes.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/indexes.sql"
echo "Executing 4_functions.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/4_functions.sql"
echo "Executing triggers.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/triggers.sql"
echo "Executing 3_views.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/3_views.sql"
echo "Executing 2_dml.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/2_dml.sql"
echo "Executing 5_database_user.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/5_database_user.sql"
echo "Database initialization completed successfully"

File diff suppressed because it is too large Load Diff

View File

@@ -1,77 +0,0 @@
CREATE OR REPLACE FUNCTION sys_insert_task_priorities() RETURNS VOID AS
$$
BEGIN
INSERT INTO task_priorities (name, value, color_code) VALUES ('Low', 0, '#75c997');
INSERT INTO task_priorities (name, value, color_code) VALUES ('Medium', 1, '#fbc84c');
INSERT INTO task_priorities (name, value, color_code) VALUES ('High', 2, '#f37070');
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_project_access_levels() RETURNS VOID AS
$$
BEGIN
INSERT INTO project_access_levels (name, key)
VALUES ('Admin', 'ADMIN');
INSERT INTO project_access_levels (name, key)
VALUES ('Member', 'MEMBER');
INSERT INTO project_access_levels (name, key)
VALUES ('Project Manager', 'PROJECT_MANAGER');
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_task_status_categories() RETURNS VOID AS
$$
BEGIN
INSERT INTO sys_task_status_categories (name, color_code, index, is_todo)
VALUES ('To do', '#a9a9a9', 0, TRUE);
INSERT INTO sys_task_status_categories (name, color_code, index, is_doing)
VALUES ('Doing', '#70a6f3', 1, TRUE);
INSERT INTO sys_task_status_categories (name, color_code, index, is_done)
VALUES ('Done', '#75c997', 2, TRUE);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_project_statuses() RETURNS VOID AS
$$
BEGIN
INSERT INTO sys_project_statuses (name, color_code, icon, sort_order, is_default)
VALUES ('Cancelled', '#f37070', 'close-circle', 0, FALSE),
('Blocked', '#cbc8a1', 'stop', 1, FALSE),
('On Hold', '#cbc8a1', 'stop', 2, FALSE),
('Proposed', '#cbc8a1', 'clock-circle', 3, TRUE),
('In Planning', '#cbc8a1', 'clock-circle', 4, FALSE),
('In Progress', '#80ca79', 'clock-circle', 5, FALSE),
('Completed', '#80ca79', 'check-circle', 6, FALSE);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_project_healths() RETURNS VOID AS
$$
BEGIN
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
VALUES ('Not Set', '#a9a9a9', 0, TRUE);
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
VALUES ('Needs Attention', '#fbc84c', 1, FALSE);
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
VALUES ('At Risk', '#f37070', 2, FALSE);
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
VALUES ('Good', '#75c997', 3, FALSE);
END;
$$ LANGUAGE plpgsql;
SELECT sys_insert_task_priorities();
SELECT sys_insert_project_access_levels();
SELECT sys_insert_task_status_categories();
SELECT sys_insert_project_statuses();
SELECT sys_insert_project_healths();
DROP FUNCTION sys_insert_task_priorities();
DROP FUNCTION sys_insert_project_access_levels();
DROP FUNCTION sys_insert_task_status_categories();
DROP FUNCTION sys_insert_project_statuses();
DROP FUNCTION sys_insert_project_healths();
INSERT INTO timezones (name, abbrev, utc_offset)
SELECT name, abbrev, utc_offset
FROM pg_timezone_names;

View File

@@ -1,34 +0,0 @@
CREATE VIEW task_labels_view(name, task_id, label_id) AS
SELECT (SELECT team_labels.name
FROM team_labels
WHERE team_labels.id = task_labels.label_id) AS name,
task_labels.task_id,
task_labels.label_id
FROM task_labels;
CREATE VIEW tasks_with_status_view(task_id, parent_task_id, is_todo, is_doing, is_done) AS
SELECT tasks.id AS task_id,
tasks.parent_task_id,
stsc.is_todo,
stsc.is_doing,
stsc.is_done
FROM tasks
JOIN task_statuses ts ON tasks.status_id = ts.id
JOIN sys_task_status_categories stsc ON ts.category_id = stsc.id
WHERE tasks.archived IS FALSE;
CREATE VIEW team_member_info_view(avatar_url, email, name, user_id, team_member_id, team_id) AS
SELECT u.avatar_url,
COALESCE(u.email, (SELECT email_invitations.email
FROM email_invitations
WHERE email_invitations.team_member_id = team_members.id)) AS email,
COALESCE(u.name, (SELECT email_invitations.name
FROM email_invitations
WHERE email_invitations.team_member_id = team_members.id)) AS name,
u.id AS user_id,
team_members.id AS team_member_id,
team_members.team_id
FROM team_members
LEFT JOIN users u ON team_members.user_id = u.id;

File diff suppressed because it is too large Load Diff

View File

@@ -1,35 +0,0 @@
-- Default ROLE : worklenz_client
-- Default USER : worklenz_backend
-- Change DATABASE_NAME, ROLE, PASSWORD and USER as needed.
REVOKE CREATE ON SCHEMA public FROM PUBLIC;
CREATE ROLE worklenz_client;
GRANT CONNECT ON DATABASE 'DATABASE_NAME' TO worklenz_client;
GRANT INSERT, SELECT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO worklenz_client;
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO worklenz_client;
REVOKE ALL PRIVILEGES ON task_priorities FROM worklenz_client;
GRANT SELECT ON task_priorities TO worklenz_client;
REVOKE ALL PRIVILEGES ON project_access_levels FROM worklenz_client;
GRANT SELECT ON project_access_levels TO worklenz_client;
REVOKE ALL PRIVILEGES ON timezones FROM worklenz_client;
GRANT SELECT ON timezones TO worklenz_client;
REVOKE ALL PRIVILEGES ON worklenz_alerts FROM worklenz_client;
GRANT SELECT ON worklenz_alerts TO worklenz_client;
REVOKE ALL PRIVILEGES ON sys_task_status_categories FROM worklenz_client;
GRANT SELECT ON sys_task_status_categories TO worklenz_client;
REVOKE ALL PRIVILEGES ON sys_project_statuses FROM worklenz_client;
GRANT SELECT ON sys_project_statuses TO worklenz_client;
REVOKE ALL PRIVILEGES ON sys_project_healths FROM worklenz_client;
GRANT SELECT ON sys_project_healths TO worklenz_client;
CREATE USER worklenz_backend WITH PASSWORD 'PASSWORD';
GRANT worklenz_client TO worklenz_backend;

View File

@@ -1 +1,36 @@
All database DDLs, DMLs and migrations relates to the application should be stored here as well.
# Worklenz Database
## Directory Structure
- `sql/` - Contains all SQL files needed for database initialization
- `migrations/` - Contains database migration scripts
- `00-init-db.sh` - Initialization script that executes SQL files in the correct order
## SQL File Execution Order
The database initialization files should be executed in the following order:
1. `sql/0_extensions.sql` - PostgreSQL extensions
2. `sql/1_tables.sql` - Table definitions and constraints
3. `sql/indexes.sql` - All database indexes
4. `sql/4_functions.sql` - Database functions
5. `sql/triggers.sql` - Database triggers
6. `sql/3_views.sql` - Database views
7. `sql/2_dml.sql` - Data Manipulation Language statements (inserts, updates)
8. `sql/5_database_user.sql` - Database user setup
## Docker-based Setup
In the Docker environment, we use a shell script called `00-init-db.sh` to control the SQL file execution order:
1. The shell script creates a `sql/` subdirectory if it doesn't exist
2. It copies all .sql files into this subdirectory
3. It executes the SQL files from the subdirectory in the correct order
This approach prevents the SQL files from being executed twice by Docker's automatic initialization mechanism, which would cause errors for objects that already exist.
## Manual Setup
If you're setting up the database manually, please follow the execution order listed above. Ensure your SQL files are in the `sql/` subdirectory before executing the script.

View File

@@ -0,0 +1,2 @@
Migrations should be executed out in the sequence specified by the filename. They should be removed once the migrations
have been released to all databases.

View File

@@ -0,0 +1,3 @@
-- Extensions
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE EXTENSION IF NOT EXISTS "unaccent";

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,144 @@
CREATE OR REPLACE FUNCTION sys_insert_task_priorities() RETURNS VOID AS
$$
BEGIN
INSERT INTO task_priorities (name, value, color_code, color_code_dark) VALUES ('Medium', 1, '#fbc84c', '#FFC227');
INSERT INTO task_priorities (name, value, color_code, color_code_dark) VALUES ('Low', 0, '#75c997', '#46D980');
INSERT INTO task_priorities (name, value, color_code, color_code_dark) VALUES ('High', 2, '#f37070', '#FF4141');
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_project_access_levels() RETURNS VOID AS
$$
BEGIN
INSERT INTO project_access_levels (name, key)
VALUES ('Admin', 'ADMIN');
INSERT INTO project_access_levels (name, key)
VALUES ('Member', 'MEMBER');
INSERT INTO project_access_levels (name, key)
VALUES ('Project Manager', 'PROJECT_MANAGER');
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_task_status_categories() RETURNS VOID AS
$$
BEGIN
INSERT INTO public.sys_task_status_categories (name, color_code, index, is_todo, is_doing, is_done, description,
color_code_dark)
VALUES ('To do', '#a9a9a9', 1, TRUE, FALSE, FALSE,
'For tasks that have not been started.', '#989898');
INSERT INTO public.sys_task_status_categories (name, color_code, index, is_todo, is_doing, is_done, description,
color_code_dark)
VALUES ('Doing', '#70a6f3', 2, FALSE, TRUE, FALSE,
'For tasks that have been started.', '#4190FF');
INSERT INTO public.sys_task_status_categories (name, color_code, index, is_todo, is_doing, is_done, description,
color_code_dark)
VALUES ('Done', '#75c997', 3, FALSE, FALSE, TRUE,
'For tasks that have been completed.', '#46D980');
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_project_statuses() RETURNS VOID AS
$$
BEGIN
INSERT INTO public.sys_project_statuses (name, color_code, icon, sort_order, is_default)
VALUES ('Cancelled', '#f37070', 'close-circle', 0, FALSE),
('Blocked', '#cbc8a1', 'stop', 1, FALSE),
('On Hold', '#cbc8a1', 'stop', 2, FALSE),
('Proposed', '#cbc8a1', 'clock-circle', 3, TRUE),
('In Planning', '#cbc8a1', 'clock-circle', 4, FALSE),
('In Progress', '#80ca79', 'clock-circle', 5, FALSE),
('Completed', '#80ca79', 'check-circle', 6, FALSE),
('Continuous', '#80ca79', 'clock-circle', 7, FALSE);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_project_healths() RETURNS VOID AS
$$
BEGIN
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
VALUES ('Not Set', '#a9a9a9', 0, TRUE);
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
VALUES ('Needs Attention', '#fbc84c', 1, FALSE);
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
VALUES ('At Risk', '#f37070', 2, FALSE);
INSERT INTO sys_project_healths (name, color_code, sort_order, is_default)
VALUES ('Good', '#75c997', 3, FALSE);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_license_types() RETURNS VOID AS
$$
BEGIN
INSERT INTO public.sys_license_types (name, key)
VALUES ('Custom Subscription', 'CUSTOM'),
('Free Trial', 'TRIAL'),
('Paddle Subscription', 'PADDLE'),
('Credit Subscription', 'CREDIT'),
('Free Plan', 'FREE'),
('Life Time Deal', 'LIFE_TIME_DEAL'),
('Self Hosted', 'SELF_HOSTED');
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION sys_insert_project_templates() RETURNS VOID AS
$$
DECLARE
medium_priority_id UUID;
todo_category_id UUID;
doing_category_id UUID;
done_category_id UUID;
BEGIN
-- Fetch IDs to avoid repeated subqueries
SELECT id INTO medium_priority_id FROM task_priorities WHERE name = 'Medium' LIMIT 1;
SELECT id INTO todo_category_id FROM public.sys_task_status_categories WHERE name = 'To do' LIMIT 1;
SELECT id INTO doing_category_id FROM public.sys_task_status_categories WHERE name = 'Doing' LIMIT 1;
SELECT id INTO done_category_id FROM public.sys_task_status_categories WHERE name = 'Done' LIMIT 1;
INSERT INTO public.pt_project_templates (id, name, key, description, phase_label, image_url, color_code)
VALUES ('39db59be-1dba-448b-87f4-3b955ea699d2', 'Bug Tracking', 'BT', 'The "Bug Tracking" project template is a versatile solution meticulously designed to streamline and enhance the bug management processes of businesses across diverse industries. This template is especially valuable for organizations that rely on software development, IT services, or digital product management. It provides a structured and efficient approach to tracking, resolving, and improving software issues.', 'Phase', 'https://worklenz.s3.amazonaws.com/project-template-gifs/bug-tracking.gif', '#3b7ad4');
INSERT INTO public.pt_statuses (id, name, template_id, category_id)
VALUES ('c3242606-5a24-48aa-8320-cc90a05c2589', 'To Do', '39db59be-1dba-448b-87f4-3b955ea699d2', todo_category_id),
('05ed8d04-92b1-4c44-bd06-abee29641f31', 'Doing', '39db59be-1dba-448b-87f4-3b955ea699d2', doing_category_id),
('66e80bc8-6b29-4e72-a484-1593eb1fb44b', 'Done', '39db59be-1dba-448b-87f4-3b955ea699d2', done_category_id);
INSERT INTO public.pt_tasks (id, name, description, total_minutes, sort_order, priority_id, template_id, parent_task_id, status_id)
VALUES ('a75993d9-3fb3-4d0b-a5d4-cab53b60462c', 'Testing and Verification', NULL, 0, 0, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, 'c3242606-5a24-48aa-8320-cc90a05c2589'),
('3fdb6801-bc09-4d71-8273-987cd3d1e0f6', 'Bug Prioritization', NULL, 0, 6, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '05ed8d04-92b1-4c44-bd06-abee29641f31'),
('ca64f247-a186-4edb-affd-738f1c2a4d60', 'Bug reporting', NULL, 0, 2, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, 'c3242606-5a24-48aa-8320-cc90a05c2589'),
('1e493de8-38cf-4e6e-8f0b-5e1f6f3b07f4', 'Bug Assignment', NULL, 0, 5, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '05ed8d04-92b1-4c44-bd06-abee29641f31'),
('67b2ab3c-53e5-428c-bbad-8bdc19dc88de', 'Bug Closure', NULL, 0, 4, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '66e80bc8-6b29-4e72-a484-1593eb1fb44b'),
('9311ff84-1052-4989-8192-0fea20204fbe', 'Documentation', NULL, 0, 3, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '66e80bc8-6b29-4e72-a484-1593eb1fb44b'),
('7d0697cd-868c-4b41-9f4f-f9a8c1131b24', 'Reporting', NULL, 0, 1, medium_priority_id, '39db59be-1dba-448b-87f4-3b955ea699d2', NULL, '66e80bc8-6b29-4e72-a484-1593eb1fb44b');
INSERT INTO public.pt_task_phases (task_id, phase_id)
VALUES ('a75993d9-3fb3-4d0b-a5d4-cab53b60462c', '4b4a8fe0-4f35-464a-a337-848e5b432ab5'),
('3fdb6801-bc09-4d71-8273-987cd3d1e0f6', '557b58ca-3335-4b41-9880-fdd0f990deb9'),
('ca64f247-a186-4edb-affd-738f1c2a4d60', '62097027-979f-4b00-afb8-f70fba533f80'),
('1e493de8-38cf-4e6e-8f0b-5e1f6f3b07f4', 'e3128891-4873-4795-ad8a-880474280045'),
('67b2ab3c-53e5-428c-bbad-8bdc19dc88de', '77204bf3-fcb3-4e39-a843-14458b2f659d'),
('9311ff84-1052-4989-8192-0fea20204fbe', '62097027-979f-4b00-afb8-f70fba533f80'),
('7d0697cd-868c-4b41-9f4f-f9a8c1131b24', '62097027-979f-4b00-afb8-f70fba533f80');
END;
$$ LANGUAGE plpgsql;
SELECT sys_insert_task_priorities();
SELECT sys_insert_project_access_levels();
SELECT sys_insert_task_status_categories();
SELECT sys_insert_project_statuses();
SELECT sys_insert_project_healths();
SELECT sys_insert_license_types();
-- SELECT sys_insert_project_templates();
DROP FUNCTION sys_insert_task_priorities();
DROP FUNCTION sys_insert_project_access_levels();
DROP FUNCTION sys_insert_task_status_categories();
DROP FUNCTION sys_insert_project_statuses();
DROP FUNCTION sys_insert_project_healths();
DROP FUNCTION sys_insert_license_types();
-- DROP FUNCTION sys_insert_project_templates();
INSERT INTO timezones (name, abbrev, utc_offset)
SELECT name, abbrev, utc_offset
FROM pg_timezone_names;

View File

@@ -0,0 +1,34 @@
CREATE OR REPLACE VIEW task_labels_view(name, task_id, label_id) AS
SELECT (SELECT team_labels.name
FROM team_labels
WHERE team_labels.id = task_labels.label_id) AS name,
task_labels.task_id,
task_labels.label_id
FROM task_labels;
CREATE OR REPLACE VIEW tasks_with_status_view(task_id, parent_task_id, is_todo, is_doing, is_done) AS
SELECT tasks.id AS task_id,
tasks.parent_task_id,
stsc.is_todo,
stsc.is_doing,
stsc.is_done
FROM tasks
JOIN task_statuses ts ON tasks.status_id = ts.id
JOIN sys_task_status_categories stsc ON ts.category_id = stsc.id
WHERE tasks.archived IS FALSE;
CREATE OR REPLACE VIEW team_member_info_view(avatar_url, email, name, user_id, team_member_id, team_id, active) AS
SELECT u.avatar_url,
COALESCE(u.email, (SELECT email_invitations.email
FROM email_invitations
WHERE email_invitations.team_member_id = team_members.id)) AS email,
COALESCE(u.name, (SELECT email_invitations.name
FROM email_invitations
WHERE email_invitations.team_member_id = team_members.id)) AS name,
u.id AS user_id,
team_members.id AS team_member_id,
team_members.team_id,
team_members.active
FROM team_members
LEFT JOIN users u ON team_members.user_id = u.id;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
REVOKE CREATE ON SCHEMA public FROM PUBLIC;
CREATE ROLE worklenz_client;
GRANT CONNECT ON DATABASE worklenz_db TO worklenz_client;
GRANT INSERT, SELECT, UPDATE, DELETE ON ALL TABLES IN SCHEMA public TO worklenz_client;
GRANT USAGE, SELECT ON ALL SEQUENCES IN SCHEMA public TO worklenz_client;
REVOKE ALL PRIVILEGES ON task_priorities FROM worklenz_client;
GRANT SELECT ON task_priorities TO worklenz_client;
REVOKE ALL PRIVILEGES ON project_access_levels FROM worklenz_client;
GRANT SELECT ON project_access_levels TO worklenz_client;
REVOKE ALL PRIVILEGES ON timezones FROM worklenz_client;
GRANT SELECT ON timezones TO worklenz_client;
REVOKE ALL PRIVILEGES ON worklenz_alerts FROM worklenz_client;
GRANT SELECT ON worklenz_alerts TO worklenz_client;
REVOKE ALL PRIVILEGES ON sys_task_status_categories FROM worklenz_client;
GRANT SELECT ON sys_task_status_categories TO worklenz_client;
REVOKE ALL PRIVILEGES ON sys_project_statuses FROM worklenz_client;
GRANT SELECT ON sys_project_statuses TO worklenz_client;
REVOKE ALL PRIVILEGES ON sys_project_healths FROM worklenz_client;
GRANT SELECT ON sys_project_healths TO worklenz_client;
CREATE USER worklenz_backend WITH PASSWORD 'n?&bb24=aWmnw+G@';
GRANT worklenz_client TO worklenz_backend;

View File

@@ -0,0 +1,134 @@
-- Indexes
CREATE UNIQUE INDEX IF NOT EXISTS permissions_name_uindex
ON permissions (name);
CREATE UNIQUE INDEX IF NOT EXISTS bounced_emails_email_uindex
ON bounced_emails (email);
CREATE INDEX IF NOT EXISTS clients_id_team_id_index
ON clients (id, team_id);
CREATE UNIQUE INDEX IF NOT EXISTS clients_name_team_id_uindex
ON clients (name, team_id);
CREATE UNIQUE INDEX IF NOT EXISTS cpt_phases_name_project_uindex
ON cpt_phases (name, template_id);
CREATE UNIQUE INDEX IF NOT EXISTS cpt_task_phase_cpt_task_phase_uindex
ON cpt_task_phases (task_id, phase_id);
CREATE UNIQUE INDEX IF NOT EXISTS cpt_task_phase_task_id_uindex
ON cpt_task_phases (task_id);
CREATE UNIQUE INDEX IF NOT EXISTS cpt_task_statuses_template_id_name_uindex
ON cpt_task_statuses (template_id, name);
CREATE UNIQUE INDEX IF NOT EXISTS custom_project_templates_name_team_id_uindex
ON custom_project_templates (name, team_id);
-- Create index on expire field
CREATE INDEX IF NOT EXISTS idx_pg_sessions_expire
ON pg_sessions (expire);
CREATE UNIQUE INDEX IF NOT EXISTS job_titles_name_team_id_uindex
ON job_titles (name, team_id);
CREATE INDEX IF NOT EXISTS job_titles_team_id_index
ON job_titles (team_id);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_name_uindex
ON licensing_admin_users (name);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_phone_no_uindex
ON licensing_admin_users (phone_no);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_username_uindex
ON licensing_admin_users (username);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_coupon_codes_coupon_code_uindex
ON licensing_coupon_codes (coupon_code);
CREATE INDEX IF NOT EXISTS licensing_coupon_codes_redeemed_by_index
ON licensing_coupon_codes (redeemed_by);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_pricing_plans_uindex
ON licensing_pricing_plans (id);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_user_plans_uindex
ON licensing_user_subscriptions (id);
CREATE INDEX IF NOT EXISTS licensing_user_subscriptions_user_id_index
ON licensing_user_subscriptions (user_id);
CREATE INDEX IF NOT EXISTS notification_settings_team_user_id_index
ON notification_settings (team_id, user_id);
CREATE UNIQUE INDEX IF NOT EXISTS personal_todo_list_index_uindex
ON personal_todo_list (user_id, index);
CREATE UNIQUE INDEX IF NOT EXISTS project_access_levels_key_uindex
ON project_access_levels (key);
CREATE UNIQUE INDEX IF NOT EXISTS project_access_levels_name_uindex
ON project_access_levels (name);
CREATE UNIQUE INDEX IF NOT EXISTS project_categories_name_team_id_uindex
ON project_categories (name, team_id);
CREATE INDEX IF NOT EXISTS project_comments_project_id_index
ON project_comments (project_id);
CREATE UNIQUE INDEX IF NOT EXISTS project_folders_team_id_key_uindex
ON project_folders (team_id, key);
CREATE UNIQUE INDEX IF NOT EXISTS project_folders_team_id_name_uindex
ON project_folders (team_id, name);
CREATE INDEX IF NOT EXISTS project_members_project_id_index
ON project_members (project_id);
CREATE INDEX IF NOT EXISTS project_members_project_id_member_id_index
ON project_members (project_id, team_member_id);
CREATE INDEX IF NOT EXISTS project_members_team_member_id_index
ON project_members (team_member_id);
CREATE UNIQUE INDEX IF NOT EXISTS project_members_team_member_project_uindex
ON project_members (team_member_id, project_id);
CREATE UNIQUE INDEX IF NOT EXISTS project_phases_name_project_uindex
ON project_phases (name, project_id);
CREATE UNIQUE INDEX IF NOT EXISTS project_subscribers_user_task_team_member_uindex
ON project_subscribers (user_id, project_id, team_member_id);
CREATE INDEX IF NOT EXISTS project_task_list_cols_index
ON project_task_list_cols (project_id, index);
CREATE UNIQUE INDEX IF NOT EXISTS project_task_list_cols_key_project_uindex
ON project_task_list_cols (key, project_id);
CREATE INDEX IF NOT EXISTS projects_folder_id_index
ON projects (folder_id);
CREATE INDEX IF NOT EXISTS projects_id_team_id_index
ON projects (id, team_id);
CREATE UNIQUE INDEX IF NOT EXISTS projects_key_team_id_uindex
ON projects (key, team_id);
CREATE INDEX IF NOT EXISTS projects_name_index
ON projects (name);
CREATE UNIQUE INDEX IF NOT EXISTS projects_name_team_id_uindex
ON projects (name, team_id);
CREATE INDEX IF NOT EXISTS projects_team_id_folder_id_index
ON projects (team_id, folder_id);
CREATE INDEX IF NOT EXISTS projects_team_id_index
ON projects (team_id);
CREATE INDEX IF NOT EXISTS projects_team_id_name_index
ON projects (team_id, name);

View File

@@ -0,0 +1,47 @@
ALTER TABLE teams
ADD CONSTRAINT teams_name_check CHECK (CHAR_LENGTH(name) <= 55);
ALTER TABLE clients
ADD CONSTRAINT clients_name_check CHECK (CHAR_LENGTH(name) <= 60);
ALTER TABLE job_titles
ADD CONSTRAINT job_titles_name_check CHECK (CHAR_LENGTH(name) <= 55);
ALTER TABLE users
ADD CONSTRAINT users_name_check CHECK (CHAR_LENGTH(name) <= 55);
ALTER TABLE users
ADD CONSTRAINT users_email_check CHECK (CHAR_LENGTH(email) <= 255);
ALTER TABLE projects
ADD CONSTRAINT projects_name_check CHECK (CHAR_LENGTH(name) <= 100);
ALTER TABLE projects
ADD CONSTRAINT projects_notes_check CHECK (CHAR_LENGTH(notes) <= 500);
ALTER TABLE task_statuses
ADD CONSTRAINT task_statuses_name_check CHECK (CHAR_LENGTH(name) <= 50);
ALTER TABLE tasks
ADD CONSTRAINT tasks_name_check CHECK (CHAR_LENGTH(name) <= 500);
ALTER TABLE tasks
ADD CONSTRAINT tasks_description_check CHECK (CHAR_LENGTH(description) <= 500000);
ALTER TABLE team_labels
ADD CONSTRAINT team_labels_name_check CHECK (CHAR_LENGTH(name) <= 40);
ALTER TABLE personal_todo_list
ADD CONSTRAINT personal_todo_list_name_check CHECK (CHAR_LENGTH(name) <= 100);
ALTER TABLE personal_todo_list
ADD CONSTRAINT personal_todo_list_description_check CHECK (CHAR_LENGTH(description) <= 200);
ALTER TABLE task_work_log
ADD CONSTRAINT task_work_log_description_check CHECK (CHAR_LENGTH(description) <= 500);
ALTER TABLE task_comment_contents
ADD CONSTRAINT task_comment_contents_name_check CHECK (CHAR_LENGTH(text_content) <= 500);
ALTER TABLE task_attachments
ADD CONSTRAINT task_attachments_name_check CHECK (CHAR_LENGTH(name) <= 110);

View File

@@ -0,0 +1,75 @@
TRUNCATE TABLE public.pg_sessions RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.email_invitations RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_labels RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.team_labels RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.tasks_assignees RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.project_members RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.project_access_levels RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.role_permissions RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.permissions RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.project_logs RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.personal_todo_list RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.user_notifications RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_work_log RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_comment_contents RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_comments RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.team_members RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.job_titles RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.roles RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_attachments RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.worklenz_alerts RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.favorite_projects RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.archived_projects RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.shared_projects RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_templates_tasks RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_templates RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.notification_settings RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_updates RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_timers RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.tasks RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_priorities RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.task_statuses RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.sys_task_status_categories RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.project_task_list_cols RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.projects RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.clients RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.teams, public.users RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.timezones RESTART IDENTITY CASCADE;
TRUNCATE TABLE public.sys_project_statuses RESTART IDENTITY CASCADE;

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 265 KiB

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 737 KiB

File diff suppressed because it is too large Load Diff

View File

@@ -15,6 +15,7 @@
"tcs": "grunt build:tsc",
"build": "grunt build",
"watch": "grunt watch",
"dev": "grunt dev",
"es": "esbuild `find src -type f -name '*.ts'` --platform=node --minify=true --watch=true --target=esnext --format=cjs --tsconfig=tsconfig.prod.json --outdir=dist",
"copy": "grunt copy",
"sonar": "sonar-scanner -Dproject.settings=sonar-project-dev.properties",
@@ -32,15 +33,18 @@
"@aws-sdk/client-ses": "^3.378.0",
"@aws-sdk/s3-request-presigner": "^3.378.0",
"@aws-sdk/util-format-url": "^3.357.0",
"@azure/storage-blob": "^12.27.0",
"axios": "^1.6.0",
"bcrypt": "^5.1.0",
"bluebird": "^3.7.2",
"chartjs-to-image": "^1.2.1",
"compression": "^1.7.4",
"connect-flash": "^0.1.1",
"connect-pg-simple": "^7.0.0",
"cookie-parser": "~1.4.4",
"cors": "^2.8.5",
"cron": "^2.4.0",
"crypto-js": "^4.1.1",
"csurf": "^1.11.0",
"debug": "^4.3.4",
"dotenv": "^16.3.1",
@@ -60,13 +64,13 @@
"moment-timezone": "^0.5.43",
"morgan": "^1.10.0",
"nanoid": "^3.3.6",
"passport": "^0.5.3",
"passport": "^0.7.0",
"passport-google-oauth2": "^0.2.0",
"passport-google-oauth20": "^2.0.0",
"passport-local": "^1.0.0",
"path": "^0.12.7",
"pg": "^8.11.1",
"pg-native": "^3.0.1",
"pg": "^8.14.1",
"pg-native": "^3.3.0",
"pug": "^3.0.2",
"redis": "^4.6.7",
"sanitize-html": "^2.11.0",
@@ -87,13 +91,12 @@
"@types/connect-flash": "^0.0.37",
"@types/cookie-parser": "^1.4.3",
"@types/cron": "^2.0.1",
"@types/crypto-js": "^4.2.2",
"@types/csurf": "^1.11.2",
"@types/express": "^4.17.17",
"@types/express-brute": "^1.0.2",
"@types/express-brute-redis": "^0.0.4",
"@types/express-rate-limit": "^6.0.0",
"@types/express-session": "^1.17.7",
"@types/express-validator": "^3.0.0",
"@types/fs-extra": "^9.0.13",
"@types/hpp": "^0.2.2",
"@types/http-errors": "^1.8.2",
@@ -103,15 +106,13 @@
"@types/mime-types": "^2.1.1",
"@types/morgan": "^1.9.4",
"@types/node": "^18.17.1",
"@types/passport": "^1.0.12",
"@types/passport-google-oauth20": "^2.0.11",
"@types/passport-local": "^1.0.35",
"@types/pg": "^8.10.2",
"@types/passport": "^1.0.17",
"@types/passport-google-oauth20": "^2.0.16",
"@types/passport-local": "^1.0.38",
"@types/pg": "^8.11.11",
"@types/pug": "^2.0.6",
"@types/redis": "^4.0.11",
"@types/sanitize-html": "^2.9.0",
"@types/sharp": "^0.31.1",
"@types/socket.io": "^3.0.2",
"@types/swagger-jsdoc": "^6.0.1",
"@types/toobusy-js": "^0.5.2",
"@types/uglify-js": "^3.17.1",
@@ -126,7 +127,6 @@
"eslint-plugin-security": "^1.7.1",
"fs-extra": "^10.1.0",
"grunt": "^1.6.1",
"grunt-cli": "^1.4.3",
"grunt-contrib-clean": "^2.0.1",
"grunt-contrib-compress": "^2.0.0",
"grunt-contrib-copy": "^1.0.0",
@@ -134,6 +134,7 @@
"grunt-contrib-watch": "^1.1.0",
"grunt-shell": "^4.0.0",
"grunt-sync": "^0.8.2",
"highcharts": "^11.1.0",
"jest": "^28.1.3",
"jest-sonar-reporter": "^2.0.0",
"ncp": "^2.0.0",

View File

@@ -1 +1 @@
901
954

View File

@@ -1,5 +1,5 @@
import createError from "http-errors";
import express, {NextFunction, Request, Response} from "express";
import express, { NextFunction, Request, Response } from "express";
import path from "path";
import cookieParser from "cookie-parser";
import logger from "morgan";
@@ -9,101 +9,181 @@ import passport from "passport";
import csurf from "csurf";
import rateLimit from "express-rate-limit";
import cors from "cors";
import uglify from "uglify-js";
import flash from "connect-flash";
import hpp from "hpp";
import passportConfig from "./passport";
import indexRouter from "./routes/index";
import apiRouter from "./routes/apis";
import authRouter from "./routes/auth";
import emailTemplatesRouter from "./routes/email-templates";
import public_router from "./routes/public";
import {isInternalServer, isProduction} from "./shared/utils";
import { isInternalServer, isProduction } from "./shared/utils";
import sessionMiddleware from "./middlewares/session-middleware";
import {send_to_slack} from "./shared/slack";
import {CSP_POLICIES} from "./shared/csp";
import safeControllerFunction from "./shared/safe-controller-function";
import AwsSesController from "./controllers/aws-ses-controller";
import { CSP_POLICIES } from "./shared/csp";
const app = express();
app.use(compression());
app.use(helmet({crossOriginResourcePolicy: false, crossOriginEmbedderPolicy: false}));
// Trust first proxy if behind reverse proxy
app.set("trust proxy", 1);
// Basic middleware setup
app.use(compression());
app.use(logger("dev"));
app.use(express.json({ limit: "50mb" }));
app.use(express.urlencoded({ extended: false, limit: "50mb" }));
app.use(cookieParser(process.env.COOKIE_SECRET));
app.use(hpp());
// Helmet security headers
app.use(helmet({
crossOriginEmbedderPolicy: false,
crossOriginResourcePolicy: false,
}));
// Custom security headers
app.use((_req: Request, res: Response, next: NextFunction) => {
res.setHeader("X-XSS-Protection", "1; mode=block");
res.removeHeader("server");
res.setHeader("Content-Security-Policy", CSP_POLICIES);
next();
});
// CORS configuration
const allowedOrigins = [
isProduction()
? [
`http://localhost:5000`,
`http://127.0.0.1:5000`,
process.env.SERVER_CORS || "", // Add hostname from env
process.env.FRONTEND_URL || "" // Support FRONTEND_URL as well
].filter(Boolean) // Remove empty strings
: [
"http://localhost:3000",
"http://localhost:5173",
"http://127.0.0.1:5173",
"http://127.0.0.1:3000",
"http://127.0.0.1:5000",
`http://localhost:5000`,
process.env.SERVER_CORS || "", // Add hostname from env
process.env.FRONTEND_URL || "" // Support FRONTEND_URL as well
].filter(Boolean) // Remove empty strings
].flat();
app.use(cors({
origin: (origin, callback) => {
if (!isProduction() || !origin || allowedOrigins.includes(origin)) {
callback(null, true);
} else {
console.log("Blocked origin:", origin, process.env.NODE_ENV);
callback(new Error("Not allowed by CORS"));
}
},
credentials: true,
methods: ["GET", "POST", "PUT", "DELETE", "OPTIONS", "PATCH"],
allowedHeaders: [
"Origin",
"X-Requested-With",
"Content-Type",
"Accept",
"Authorization",
"X-CSRF-Token"
],
exposedHeaders: ["Set-Cookie", "X-CSRF-Token"]
}));
// Handle preflight requests
app.options("*", cors());
// Session setup - must be before passport and CSRF
app.use(sessionMiddleware);
// Passport initialization
passportConfig(passport);
app.use(passport.initialize());
app.use(passport.session());
// Flash messages
app.use(flash());
// Auth check middleware
function isLoggedIn(req: Request, _res: Response, next: NextFunction) {
return req.user ? next() : next(createError(401));
}
passportConfig(passport);
// eslint-disable-next-line @typescript-eslint/no-var-requires
require("pug").filters = {
/**
* ```pug
* script
* :minify_js
* // JavaScript Syntax
* ```
* @param {String} text
* @param {Object} options
*/
minify_js(text: string) {
if (!text) return;
// return text;
return uglify.minify({"script.js": text}).code;
}
};
// view engine setup
app.set("views", path.join(__dirname, "views"));
app.set("view engine", "pug");
app.use(logger("dev"));
app.use(express.json({limit: "50mb"}));
app.use(express.urlencoded({extended: false, limit: "50mb"}));
// Prevent HTTP Parameter Pollution
app.use(hpp());
app.use(cookieParser(process.env.COOKIE_SECRET));
app.use(cors({
origin: [`https://${process.env.HOSTNAME}`],
methods: "GET,PUT,POST,DELETE",
preflightContinue: false,
credentials: true
}));
app.post("/-/csp", (req: express.Request, res: express.Response) => {
send_to_slack({
type: "⚠️ CSP Report",
body: req.body
});
res.sendStatus(200);
// CSRF configuration
const csrfProtection = csurf({
cookie: {
key: "XSRF-TOKEN",
path: "/",
httpOnly: false,
secure: isProduction(), // Only secure in production
sameSite: isProduction() ? "none" : "lax", // Different settings for dev vs prod
domain: isProduction() ? ".worklenz.com" : undefined // Only set domain in production
},
ignoreMethods: ["HEAD", "OPTIONS"]
});
// Apply CSRF selectively (exclude webhooks and public routes)
app.use((req, res, next) => {
if (
req.path.startsWith("/webhook/") ||
req.path.startsWith("/secure/") ||
req.path.startsWith("/api/") ||
req.path.startsWith("/public/")
) {
next();
} else {
csrfProtection(req, res, next);
}
});
// Set CSRF token cookie
app.use((req: Request, res: Response, next: NextFunction) => {
if (req.csrfToken) {
const token = req.csrfToken();
res.cookie("XSRF-TOKEN", token, {
httpOnly: false,
secure: isProduction(),
sameSite: isProduction() ? "none" : "lax",
domain: isProduction() ? ".worklenz.com" : undefined,
path: "/"
});
}
next();
});
// CSRF token refresh endpoint
app.get("/csrf-token", (req: Request, res: Response) => {
if (req.csrfToken) {
const token = req.csrfToken();
res.cookie("XSRF-TOKEN", token, {
httpOnly: false,
secure: isProduction(),
sameSite: isProduction() ? "none" : "lax",
domain: isProduction() ? ".worklenz.com" : undefined,
path: "/"
});
res.status(200).json({ done: true, message: "CSRF token refreshed" });
} else {
res.status(500).json({ done: false, message: "Failed to generate CSRF token" });
}
});
// Webhook endpoints (no CSRF required)
app.post("/webhook/emails/bounce", safeControllerFunction(AwsSesController.handleBounceResponse));
app.post("/webhook/emails/complaints", safeControllerFunction(AwsSesController.handleComplaintResponse));
app.post("/webhook/emails/reply", safeControllerFunction(AwsSesController.handleReplies));
app.use(flash());
app.use(csurf({cookie: true}));
app.use((req: Request, res: Response, next: NextFunction) => {
res.setHeader("Content-Security-Policy", CSP_POLICIES);
const token = req.csrfToken();
res.cookie("XSRF-TOKEN", token);
res.locals.csrf = token;
next();
});
// Static file serving
if (isProduction()) {
app.use(express.static(path.join(__dirname, "build"), {
maxAge: "1y",
etag: false,
}));
// Handle compressed files
app.get("*.js", (req, res, next) => {
if (req.header("Accept-Encoding")?.includes("br")) {
req.url = `${req.url}.br`;
@@ -116,61 +196,62 @@ if (isProduction()) {
}
next();
});
} else {
app.use(express.static(path.join(__dirname, "public")));
}
app.use(express.static(path.join(__dirname, "public")));
app.set("trust proxy", 1);
app.use(sessionMiddleware);
app.use(passport.initialize());
app.use(passport.session());
// API rate limiting
const apiLimiter = rateLimit({
windowMs: 15 * 60 * 1000, // 15 minutes
max: 1500, // Limit each IP to 2000 requests per `window` (here, per 15 minutes)
standardHeaders: false, // Return rate limit info in the `RateLimit-*` headers
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
});
app.use((req, res, next) => {
const {send} = res;
res.send = function (obj) {
if (req.headers.accept?.includes("application/json"))
return send.call(this, `)]}',\n${JSON.stringify(obj)}`);
return send.call(this, obj);
};
next();
windowMs: 15 * 60 * 1000,
max: 1500,
standardHeaders: false,
legacyHeaders: false,
});
// Routes
app.use("/api/v1", apiLimiter, isLoggedIn, apiRouter);
app.use("/secure", authRouter);
app.use("/public", public_router);
app.use("/api/v1", isLoggedIn, apiRouter);
app.use("/", indexRouter);
if (isInternalServer())
if (isInternalServer()) {
app.use("/email-templates", emailTemplatesRouter);
}
// catch 404 and forward to error handler
app.use((req: Request, res: Response) => {
res.locals.error_title = "404 Not Found.";
res.locals.error_message = `The requested URL ${req.url} was not found on this server.`;
res.locals.error_image = "/assets/images/404.webp";
res.status(400);
res.render("error");
// CSRF error handler
app.use((err: any, req: Request, res: Response, next: NextFunction) => {
if (err.code === "EBADCSRFTOKEN") {
return res.status(403).json({
done: false,
message: "Invalid CSRF token",
body: null
});
}
next(err);
});
// error handler
app.use((err: { message: string; status: number; }, _req: Request, res: Response) => {
// set locals, only providing error in development
res.locals.error_title = "500 Internal Server Error.";
res.locals.error_message = "Oops, something went wrong.";
res.locals.error_message2 = "Try to refresh this page or feel free to contact us if the problem persists.";
res.locals.error_image = "/assets/images/500.png";
// render the error page
res.status(err.status || 500);
res.render("error");
// React app handling - serve index.html for all non-API routes
app.get("*", (req: Request, res: Response, next: NextFunction) => {
if (req.path.startsWith("/api/")) return next();
res.sendFile(path.join(__dirname, isProduction() ? "build" : "public", "index.html"));
});
export default app;
// Global error handler
app.use((err: any, _req: Request, res: Response, _next: NextFunction) => {
const status = err.status || 500;
if (res.headersSent) {
return;
}
res.status(status);
// Send structured error response
res.json({
done: false,
message: isProduction() ? "Internal Server Error" : err.message,
body: null,
...(process.env.NODE_ENV === "development" ? { stack: err.stack } : {})
});
});
export default app;

View File

@@ -95,8 +95,7 @@ function onListening() {
? `pipe ${addr}`
: `port ${addr.port}`;
startCronJobs();
// TODO - uncomment initRedis()
process.env.ENABLE_EMAIL_CRONJOBS === "true" && startCronJobs();
// void initRedis();
FileConstants.init();
void DbTaskStatusChangeListener.connect();

View File

@@ -5,8 +5,19 @@ import db from "../config/db";
import {ServerResponse} from "../models/server-response";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
import {getColor} from "../shared/utils";
import {calculateMonthDays, getColor, megabytesToBytes} from "../shared/utils";
import moment from "moment";
import {calculateStorage} from "../shared/s3";
import {checkTeamSubscriptionStatus, getActiveTeamMemberCount, getCurrentProjectsCount, getFreePlanSettings, getOwnerIdByTeam, getTeamMemberCount, getUsedStorage} from "../shared/paddle-utils";
import {
addModifier,
cancelSubscription,
changePlan,
generatePayLinkRequest,
pauseOrResumeSubscription,
updateUsers
} from "../shared/paddle-requests";
import {statusExclude} from "../shared/constants";
import {NotificationsService} from "../services/notifications/notifications.service";
import {SocketEvents} from "../socket.io/events";
import {IO} from "../shared/io";
@@ -262,6 +273,384 @@ export default class AdminCenterController extends WorklenzControllerBase {
return res.status(200).send(new ServerResponse(true, [], "Team updated successfully"));
}
@HandleExceptions()
public static async getBillingInfo(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT get_billing_info($1) AS billing_info;`;
const result = await db.query(q, [req.user?.owner_id]);
const [data] = result.rows;
const validTillDate = moment(data.billing_info.trial_expire_date);
const daysDifference = validTillDate.diff(moment(), "days");
const dateString = calculateMonthDays(moment().format("YYYY-MM-DD"), data.billing_info.trial_expire_date);
data.billing_info.expire_date_string = dateString;
if (daysDifference < 0) {
data.billing_info.expire_date_string = `Your trial plan expired ${dateString} ago`;
} else if (daysDifference === 0 && daysDifference < 7) {
data.billing_info.expire_date_string = `Your trial plan expires today`;
} else {
data.billing_info.expire_date_string = `Your trial plan expires in ${dateString}.`;
}
if (data.billing_info.billing_type === "year") data.billing_info.unit_price_per_month = data.billing_info.unit_price / 12;
const teamMemberData = await getTeamMemberCount(req.user?.owner_id ?? "");
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id ?? "");
data.billing_info.total_used = teamMemberData.user_count;
data.billing_info.total_seats = subscriptionData.quantity;
return res.status(200).send(new ServerResponse(true, data.billing_info));
}
@HandleExceptions()
public static async getBillingTransactions(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT subscription_payment_id,
event_time::date,
(next_bill_date::DATE - INTERVAL '1 day')::DATE AS next_bill_date,
currency,
receipt_url,
payment_method,
status,
payment_status
FROM licensing_payment_details
WHERE user_id = $1
ORDER BY created_at DESC;`;
const result = await db.query(q, [req.user?.owner_id]);
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async getBillingCharges(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT (SELECT name FROM licensing_pricing_plans lpp WHERE id = lus.plan_id),
unit_price::numeric,
currency,
status,
quantity,
unit_price::numeric * quantity AS amount,
(SELECT event_time
FROM licensing_payment_details lpd
WHERE lpd.user_id = lus.user_id
ORDER BY created_at DESC
LIMIT 1)::DATE AS start_date,
(next_bill_date::DATE - INTERVAL '1 day')::DATE AS end_date
FROM licensing_user_subscriptions lus
WHERE user_id = $1;`;
const result = await db.query(q, [req.user?.owner_id]);
const countQ = `SELECT subscription_id
FROM licensing_user_subscription_modifiers
WHERE subscription_id = (SELECT subscription_id
FROM licensing_user_subscriptions
WHERE user_id = $1
AND status != 'deleted'
LIMIT 1)::INT;`;
const countResult = await db.query(countQ, [req.user?.owner_id]);
return res.status(200).send(new ServerResponse(true, {plan_charges: result.rows, modifiers: countResult.rows}));
}
@HandleExceptions()
public static async getBillingModifiers(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT created_at
FROM licensing_user_subscription_modifiers
WHERE subscription_id = (SELECT subscription_id
FROM licensing_user_subscriptions
WHERE user_id = $1
AND status != 'deleted'
LIMIT 1)::INT;`;
const result = await db.query(q, [req.user?.owner_id]);
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async getBillingConfiguration(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT name,
email,
organization_name AS company_name,
contact_number AS phone,
address_line_1,
address_line_2,
city,
state,
postal_code,
country
FROM organizations
LEFT JOIN users u ON organizations.user_id = u.id
WHERE u.id = $1;`;
const result = await db.query(q, [req.user?.owner_id]);
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data));
}
@HandleExceptions()
public static async updateBillingConfiguration(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const {company_name, phone, address_line_1, address_line_2, city, state, postal_code, country} = req.body;
const q = `UPDATE organizations
SET organization_name = $1,
contact_number = $2,
address_line_1 = $3,
address_line_2 = $4,
city = $5,
state = $6,
postal_code = $7,
country = $8
WHERE user_id = $9;`;
const result = await db.query(q, [company_name, phone, address_line_1, address_line_2, city, state, postal_code, country, req.user?.owner_id]);
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data, "Configuration Updated"));
}
@HandleExceptions()
public static async upgradePlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const {plan} = req.query;
const obj = await getTeamMemberCount(req.user?.owner_id ?? "");
const axiosResponse = await generatePayLinkRequest(obj, plan as string, req.user?.owner_id, req.user?.id);
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
}
@HandleExceptions()
public static async getPlans(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT
ls.default_monthly_plan AS monthly_plan_id,
lp_monthly.name AS monthly_plan_name,
ls.default_annual_plan AS annual_plan_id,
lp_monthly.recurring_price AS monthly_price,
lp_annual.name AS annual_plan_name,
lp_annual.recurring_price AS annual_price,
ls.team_member_limit,
ls.projects_limit,
ls.free_tier_storage
FROM
licensing_settings ls
JOIN
licensing_pricing_plans lp_monthly ON ls.default_monthly_plan = lp_monthly.id
JOIN
licensing_pricing_plans lp_annual ON ls.default_annual_plan = lp_annual.id;`;
const result = await db.query(q, []);
const [data] = result.rows;
const obj = await getTeamMemberCount(req.user?.owner_id ?? "");
data.team_member_limit = data.team_member_limit === 0 ? "Unlimited" : data.team_member_limit;
data.projects_limit = data.projects_limit === 0 ? "Unlimited" : data.projects_limit;
data.free_tier_storage = `${data.free_tier_storage}MB`;
data.current_user_count = obj.user_count;
data.annual_price = (data.annual_price / 12).toFixed(2);
return res.status(200).send(new ServerResponse(true, data));
}
@HandleExceptions()
public static async purchaseStorage(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT subscription_id
FROM licensing_user_subscriptions lus
WHERE user_id = $1;`;
const result = await db.query(q, [req.user?.owner_id]);
const [data] = result.rows;
await addModifier(data.subscription_id);
return res.status(200).send(new ServerResponse(true, data));
}
@HandleExceptions()
public static async changePlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const {plan} = req.query;
const q = `SELECT subscription_id
FROM licensing_user_subscriptions lus
WHERE user_id = $1;`;
const result = await db.query(q, [req.user?.owner_id]);
const [data] = result.rows;
const axiosResponse = await changePlan(plan as string, data.subscription_id);
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
}
@HandleExceptions()
public static async cancelPlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
if (!req.user?.owner_id) return res.status(200).send(new ServerResponse(false, "Invalid Request."));
const q = `SELECT subscription_id
FROM licensing_user_subscriptions lus
WHERE user_id = $1;`;
const result = await db.query(q, [req.user?.owner_id]);
const [data] = result.rows;
const axiosResponse = await cancelSubscription(data.subscription_id, req.user?.owner_id);
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
}
@HandleExceptions()
public static async pauseSubscription(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
if (!req.user?.owner_id) return res.status(200).send(new ServerResponse(false, "Invalid Request."));
const q = `SELECT subscription_id
FROM licensing_user_subscriptions lus
WHERE user_id = $1;`;
const result = await db.query(q, [req.user?.owner_id]);
const [data] = result.rows;
const axiosResponse = await pauseOrResumeSubscription(data.subscription_id, req.user?.owner_id, true);
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
}
@HandleExceptions()
public static async resumeSubscription(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
if (!req.user?.owner_id) return res.status(200).send(new ServerResponse(false, "Invalid Request."));
const q = `SELECT subscription_id
FROM licensing_user_subscriptions lus
WHERE user_id = $1;`;
const result = await db.query(q, [req.user?.owner_id]);
const [data] = result.rows;
const axiosResponse = await pauseOrResumeSubscription(data.subscription_id, req.user?.owner_id, false);
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
}
@HandleExceptions()
public static async getBillingStorageInfo(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT trial_in_progress,
trial_expire_date,
ud.storage,
(SELECT name AS plan_name FROM licensing_pricing_plans WHERE id = lus.plan_id),
(SELECT default_trial_storage FROM licensing_settings),
(SELECT storage_addon_size FROM licensing_settings),
(SELECT storage_addon_price FROM licensing_settings)
FROM organizations ud
LEFT JOIN users u ON ud.user_id = u.id
LEFT JOIN licensing_user_subscriptions lus ON u.id = lus.user_id
WHERE ud.user_id = $1;`;
const result = await db.query(q, [req.user?.owner_id]);
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data));
}
@HandleExceptions()
public static async getAccountStorage(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const teamsQ = `SELECT id
FROM teams
WHERE user_id = $1;`;
const teamsResponse = await db.query(teamsQ, [req.user?.owner_id]);
const storageQ = `SELECT storage
FROM organizations
WHERE user_id = $1;`;
const result = await db.query(storageQ, [req.user?.owner_id]);
const [data] = result.rows;
const storage: any = {};
storage.used = 0;
storage.total = data.storage;
for (const team of teamsResponse.rows) {
storage.used += await calculateStorage(team.id);
}
storage.remaining = (storage.total * 1024 * 1024 * 1024) - storage.used;
storage.used_percent = Math.ceil((storage.used / (storage.total * 1024 * 1024 * 1024)) * 10000) / 100;
return res.status(200).send(new ServerResponse(true, storage));
}
@HandleExceptions()
public static async getCountries(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT id, name, code
FROM countries
ORDER BY name;`;
const result = await db.query(q, []);
return res.status(200).send(new ServerResponse(true, result.rows || []));
}
@HandleExceptions()
public static async switchToFreePlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { id: teamId } = req.params;
const limits = await getFreePlanSettings();
const ownerId = await getOwnerIdByTeam(teamId);
if (limits && ownerId) {
if (parseInt(limits.team_member_limit) !== 0) {
const teamMemberCount = await getTeamMemberCount(ownerId);
if (parseInt(teamMemberCount) > parseInt(limits.team_member_limit)) {
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${limits.team_member_limit} members.`));
}
}
const projectsCount = await getCurrentProjectsCount(ownerId);
if (parseInt(projectsCount) > parseInt(limits.projects_limit)) {
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${limits.projects_limit} projects.`));
}
const usedStorage = await getUsedStorage(ownerId);
if (parseInt(usedStorage) > megabytesToBytes(parseInt(limits.free_tier_storage))) {
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot exceed ${limits.free_tier_storage}MB of storage.`));
}
const update_q = `UPDATE organizations
SET license_type_id = (SELECT id FROM sys_license_types WHERE key = 'FREE'),
trial_in_progress = FALSE,
subscription_status = 'free',
storage = (SELECT free_tier_storage FROM licensing_settings)
WHERE user_id = $1;`;
await db.query(update_q, [ownerId]);
return res.status(200).send(new ServerResponse(true, [], "Your plan has been successfully switched to the Free Plan."));
}
return res.status(200).send(new ServerResponse(false, [], "Failed to switch to the Free Plan. Please try again later."));
}
@HandleExceptions()
public static async redeem(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { code } = req.body;
const q = `SELECT * FROM licensing_coupon_codes WHERE coupon_code = $1 AND is_redeemed IS FALSE AND is_refunded IS FALSE;`;
const result = await db.query(q, [code]);
const [data] = result.rows;
if (!result.rows.length)
return res.status(200).send(new ServerResponse(false, [], "Redeem Code verification Failed! Please try again."));
const checkQ = `SELECT sum(team_members_limit) AS team_member_total FROM licensing_coupon_codes WHERE redeemed_by = $1 AND is_redeemed IS TRUE;`;
const checkResult = await db.query(checkQ, [req.user?.owner_id]);
const [total] = checkResult.rows;
if (parseInt(total.team_member_total) > 50)
return res.status(200).send(new ServerResponse(false, [], "Maximum number of codes redeemed!"));
const updateQ = `UPDATE licensing_coupon_codes
SET is_redeemed = TRUE, redeemed_at = CURRENT_TIMESTAMP,
redeemed_by = $1
WHERE id = $2;`;
await db.query(updateQ, [req.user?.owner_id, data.id]);
const updateQ2 = `UPDATE organizations
SET subscription_status = 'life_time_deal',
trial_in_progress = FALSE,
storage = (SELECT sum(storage_limit) FROM licensing_coupon_codes WHERE redeemed_by = $1),
license_type_id = (SELECT id FROM sys_license_types WHERE key = 'LIFE_TIME_DEAL')
WHERE user_id = $1;`;
await db.query(updateQ2, [req.user?.owner_id]);
return res.status(200).send(new ServerResponse(true, [], "Code redeemed successfully!"));
}
@HandleExceptions()
public static async deleteTeam(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const {id} = req.params;
@@ -284,6 +673,11 @@ export default class AdminCenterController extends WorklenzControllerBase {
if (!id || !teamId) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
// check subscription status
const subscriptionData = await checkTeamSubscriptionStatus(teamId);
if (statusExclude.includes(subscriptionData.subscription_status)) {
return res.status(200).send(new ServerResponse(false, "Please check your subscription status."));
}
const q = `SELECT remove_team_member($1, $2, $3) AS member;`;
const result = await db.query(q, [id, req.user?.id, teamId]);
@@ -291,6 +685,22 @@ export default class AdminCenterController extends WorklenzControllerBase {
const message = `You have been removed from <b>${req.user?.team_name}</b> by <b>${req.user?.name}</b>`;
// if (subscriptionData.status === "trialing") break;
if (!subscriptionData.is_credit && !subscriptionData.is_custom) {
if (subscriptionData.subscription_status === "active" && subscriptionData.quantity > 0) {
const obj = await getActiveTeamMemberCount(req.user?.owner_id ?? "");
const userActiveInOtherTeams = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, req.query?.email as string);
if (!userActiveInOtherTeams) {
const response = await updateUsers(subscriptionData.subscription_id, obj.user_count);
if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
}
}
}
NotificationsService.sendNotification({
receiver_socket_id: data.socket_id,
message,
@@ -305,5 +715,49 @@ export default class AdminCenterController extends WorklenzControllerBase {
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async getFreePlanLimits(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const limits = await getFreePlanSettings();
return res.status(200).send(new ServerResponse(true, limits || {}));
}
@HandleExceptions()
public static async getOrganizationProjects(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { searchQuery, size, offset } = this.toPaginationOptions(req.query, ["p.name"]);
const countQ = `SELECT COUNT(*) AS total
FROM projects p
JOIN teams t ON p.team_id = t.id
JOIN organizations o ON t.organization_id = o.id
WHERE o.user_id = $1;`;
const countResult = await db.query(countQ, [req.user?.owner_id]);
// Query to get the project data
const dataQ = `SELECT p.id,
p.name,
t.name AS team_name,
p.created_at,
pm.member_count
FROM projects p
JOIN teams t ON p.team_id = t.id
JOIN organizations o ON t.organization_id = o.id
LEFT JOIN (
SELECT project_id, COUNT(*) AS member_count
FROM project_members
GROUP BY project_id
) pm ON p.id = pm.project_id
WHERE o.user_id = $1 ${searchQuery}
ORDER BY p.name
OFFSET $2 LIMIT $3;`;
const result = await db.query(dataQ, [req.user?.owner_id, offset, size]);
const response = {
total: countResult.rows[0]?.total ?? 0,
data: result.rows ?? []
};
return res.status(200).send(new ServerResponse(true, response));
}
}

View File

@@ -2,7 +2,8 @@ import { IWorkLenzRequest } from "../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
import db from "../config/db";
import { humanFileSize, log_error, smallId } from "../shared/utils";
import { humanFileSize, smallId } from "../shared/utils";
import { getStorageUrl } from "../shared/constants";
import { ServerResponse } from "../models/server-response";
import {
createPresignedUrlWithClient,
@@ -12,16 +13,10 @@ import {
getRootDir,
uploadBase64,
uploadBuffer
} from "../shared/s3";
} from "../shared/storage";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
const {S3_URL} = process.env;
if (!S3_URL) {
log_error("Invalid S3_URL. Please check .env file.");
}
export default class AttachmentController extends WorklenzControllerBase {
@HandleExceptions()
@@ -42,7 +37,7 @@ export default class AttachmentController extends WorklenzControllerBase {
req.user?.id,
size,
type,
`${S3_URL}/${getRootDir()}`
`${getStorageUrl()}/${getRootDir()}`
]);
const [data] = result.rows;
@@ -86,7 +81,7 @@ export default class AttachmentController extends WorklenzControllerBase {
FROM task_attachments
WHERE task_id = $1;
`;
const result = await db.query(q, [req.params.id, `${S3_URL}/${getRootDir()}`]);
const result = await db.query(q, [req.params.id, `${getStorageUrl()}/${getRootDir()}`]);
for (const item of result.rows)
item.size = humanFileSize(item.size);
@@ -121,7 +116,7 @@ export default class AttachmentController extends WorklenzControllerBase {
LEFT JOIN tasks t ON task_attachments.task_id = t.id
WHERE task_attachments.project_id = $1) rec;
`;
const result = await db.query(q, [req.params.id, `${S3_URL}/${getRootDir()}`, size, offset]);
const result = await db.query(q, [req.params.id, `${getStorageUrl()}/${getRootDir()}`, size, offset]);
const [data] = result.rows;
for (const item of data?.attachments.data || [])
@@ -135,26 +130,29 @@ export default class AttachmentController extends WorklenzControllerBase {
const q = `DELETE
FROM task_attachments
WHERE id = $1
RETURNING CONCAT($2::TEXT, '/', team_id, '/', project_id, '/', id, '.', type) AS key;`;
const result = await db.query(q, [req.params.id, getRootDir()]);
RETURNING team_id, project_id, id, type;`;
const result = await db.query(q, [req.params.id]);
const [data] = result.rows;
if (data?.key)
void deleteObject(data.key);
if (data) {
const key = getKey(data.team_id, data.project_id, data.id, data.type);
void deleteObject(key);
}
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async download(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT CONCAT($2::TEXT, '/', team_id, '/', project_id, '/', id, '.', type) AS key
const q = `SELECT team_id, project_id, id, type
FROM task_attachments
WHERE id = $1;`;
const result = await db.query(q, [req.query.id, getRootDir()]);
const result = await db.query(q, [req.query.id]);
const [data] = result.rows;
if (data?.key) {
const url = await createPresignedUrlWithClient(data.key, req.query.file as string);
if (data) {
const key = getKey(data.team_id, data.project_id, data.id, data.type);
const url = await createPresignedUrlWithClient(key, req.query.file as string);
return res.status(200).send(new ServerResponse(true, url));
}

View File

@@ -12,6 +12,9 @@ import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
import {PasswordStrengthChecker} from "../shared/password-strength-check";
import FileConstants from "../shared/file-constants";
import axios from "axios";
import {log_error} from "../shared/utils";
import {DEFAULT_ERROR_MESSAGE} from "../shared/constants";
export default class AuthController extends WorklenzControllerBase {
/** This just send ok response to the client when the request came here through the sign-up-validator */
@@ -42,11 +45,20 @@ export default class AuthController extends WorklenzControllerBase {
}
public static logout(req: IWorkLenzRequest, res: IWorkLenzResponse) {
req.logout(() => true);
req.session.destroy(() => {
res.redirect("/");
req.logout((err) => {
if (err) {
console.error("Logout error:", err);
return res.status(500).send(new AuthResponse(null, true, {}, "Logout failed", null));
}
req.session.destroy((destroyErr) => {
if (destroyErr) {
console.error("Session destroy error:", destroyErr);
}
res.status(200).send(new AuthResponse(null, req.isAuthenticated(), {}, null, null));
});
});
}
}
private static async destroyOtherSessions(userId: string, sessionId: string) {
try {
@@ -138,4 +150,25 @@ export default class AuthController extends WorklenzControllerBase {
}
return res.status(200).send(new ServerResponse(false, null, "Invalid Request. Please try again."));
}
@HandleExceptions({logWithError: "body"})
public static async verifyCaptcha(req: IWorkLenzRequest, res: IWorkLenzResponse) {
const {token} = req.body;
const secretKey = process.env.GOOGLE_CAPTCHA_SECRET_KEY;
try {
const response = await axios.post(
`https://www.google.com/recaptcha/api/siteverify?secret=${secretKey}&response=${token}`
);
const {success, score} = response.data;
if (success && score > 0.5) {
return res.status(200).send(new ServerResponse(true, null, null));
}
return res.status(400).send(new ServerResponse(false, null, "Please try again later.").withTitle("Error"));
} catch (error) {
log_error(error);
res.status(500).send(new ServerResponse(false, null, DEFAULT_ERROR_MESSAGE));
}
}
}

View File

@@ -0,0 +1,288 @@
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
import db from "../config/db";
import { ServerResponse } from "../models/server-response";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
import { getTeamMemberCount } from "../shared/paddle-utils";
import { generatePayLinkRequest, updateUsers } from "../shared/paddle-requests";
import CryptoJS from "crypto-js";
import moment from "moment";
import axios from "axios";
import crypto from "crypto";
import fs from "fs";
import path from "path";
import { log_error } from "../shared/utils";
import { sendEmail } from "../shared/email";
export default class BillingController extends WorklenzControllerBase {
public static async getInitialCharge(count: number) {
if (!count) throw new Error("No selected plan detected.");
const baseRate = 4990;
const firstTier = 15;
const secondTierEnd = 200;
if (count <= firstTier) {
return baseRate;
} else if (count <= secondTierEnd) {
return baseRate + (count - firstTier) * 300;
}
return baseRate + (secondTierEnd - firstTier) * 300 + (count - secondTierEnd) * 200;
}
public static async getBillingMonth() {
const startDate = moment().format("YYYYMMDD");
const endDate = moment().add(1, "month").subtract(1, "day").format("YYYYMMDD");
return `${startDate} - ${endDate}`;
}
public static async chargeInitialPayment(signature: string, data: any) {
const config = {
method: "post",
maxBodyLength: Infinity,
url: process.env.DP_URL,
headers: {
"Content-Type": "application/json",
"Signature": signature,
"x-api-key": process.env.DP_API_KEY
},
data
};
axios.request(config)
.then((response) => {
console.log(JSON.stringify(response.data));
})
.catch((error) => {
console.log(error);
});
}
public static async saveLocalTransaction(signature: string, data: any) {
try {
const q = `INSERT INTO transactions (status, transaction_id, transaction_status, description, date_time, reference, amount, card_number)
VALUES ($1, $2, $3);`;
const result = await db.query(q, []);
} catch (error) {
log_error(error);
}
}
@HandleExceptions()
public static async upgradeToPaidPlan(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { plan, seatCount } = req.query;
const teamMemberData = await getTeamMemberCount(req.user?.owner_id ?? "");
teamMemberData.user_count = seatCount as string;
const axiosResponse = await generatePayLinkRequest(teamMemberData, plan as string, req.user?.owner_id, req.user?.id);
return res.status(200).send(new ServerResponse(true, axiosResponse.body));
}
@HandleExceptions()
public static async addMoreSeats(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { seatCount } = req.body;
const q = `SELECT subscription_id
FROM licensing_user_subscriptions lus
WHERE user_id = $1;`;
const result = await db.query(q, [req.user?.owner_id]);
const [data] = result.rows;
const response = await updateUsers(data.subscription_id, seatCount);
if (!response.body.subscription_id) {
return res.status(200).send(new ServerResponse(false, null, response.message || "Please check your subscription."));
}
return res.status(200).send(new ServerResponse(true, null, "Your purchase has been successfully completed!").withTitle("Done"));
}
@HandleExceptions()
public static async getDirectPayObject(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { seatCount } = req.query;
if (!seatCount) return res.status(200).send(new ServerResponse(false, null));
const email = req.user?.email;
const name = req.user?.name;
const amount = await this.getInitialCharge(parseInt(seatCount as string));
const uniqueTimestamp = moment().format("YYYYMMDDHHmmss");
const billingMonth = await this.getBillingMonth();
const { DP_MERCHANT_ID, DP_SECRET_KEY, DP_STAGE } = process.env;
const payload = {
merchant_id: DP_MERCHANT_ID,
amount: 10,
type: "RECURRING",
order_id: `WORKLENZ_${email}_${uniqueTimestamp}`,
currency: "LKR",
return_url: null,
response_url: null,
first_name: name,
last_name: null,
phone: null,
email,
description: `${name} (${email})`,
page_type: "IN_APP",
logo: "https://app.worklenz.com/assets/icons/icon-96x96.png",
start_date: moment().format("YYYY-MM-DD"),
do_initial_payment: 1,
interval: 1,
};
const encodePayload = CryptoJS.enc.Base64.stringify(CryptoJS.enc.Utf8.parse(JSON.stringify(payload)));
const signature = CryptoJS.HmacSHA256(encodePayload, DP_SECRET_KEY as string);
return res.status(200).send(new ServerResponse(true, { signature: signature.toString(CryptoJS.enc.Hex), dataString: encodePayload, stage: DP_STAGE }));
}
@HandleExceptions()
public static async saveTransactionData(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { status, card, transaction, seatCount } = req.body;
const { DP_MERCHANT_ID, DP_STAGE } = process.env;
const email = req.user?.email;
const amount = await this.getInitialCharge(parseInt(seatCount as string));
const uniqueTimestamp = moment().format("YYYYMMDDHHmmss");
const billingMonth = await this.getBillingMonth();
const values = [
status,
card?.id,
card?.number,
card?.brand,
card?.type,
card?.issuer,
card?.expiry?.year,
card?.expiry?.month,
card?.walletId,
transaction?.id,
transaction?.status,
transaction?.amount || 0,
transaction?.currency || null,
transaction?.channel || null,
transaction?.dateTime || null,
transaction?.message || null,
transaction?.description || null,
req.user?.id,
req.user?.owner_id,
];
const q = `INSERT INTO licensing_lkr_payments (
status, card_id, card_number, card_brand, card_type, card_issuer,
card_expiry_year, card_expiry_month, wallet_id,
transaction_id, transaction_status, transaction_amount,
transaction_currency, transaction_channel, transaction_datetime,
transaction_message, transaction_description, user_id, owner_id
)
VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19
);`;
await db.query(q, values);
if (transaction.status === "SUCCESS") {
const payload = {
"merchantId": DP_MERCHANT_ID,
"reference": `WORKLENZ_${email}_${uniqueTimestamp}`,
"type": "CARD_PAY",
"cardId": card.id,
"refCode": req.user?.id,
amount,
"currency": "LKR"
};
const dataString = Object.values(payload).join("");
const { DP_STAGE } = process.env;
const pemFile = DP_STAGE === "PROD" ? "src/keys/PRIVATE_KEY_PROD.pem" : `src/keys/PRIVATE_KEY_DEV.pem`;
const privateKeyTest = fs.readFileSync(path.resolve(pemFile), "utf8");
const sign = crypto.createSign("SHA256");
sign.update(dataString);
sign.end();
const signature = sign.sign(privateKeyTest);
const byteArray = new Uint8Array(signature);
let byteString = "";
for (let i = 0; i < byteArray.byteLength; i++) {
byteString += String.fromCharCode(byteArray[i]);
}
const base64Signature = btoa(byteString);
this.chargeInitialPayment(base64Signature, payload);
}
return res.status(200).send(new ServerResponse(true, null, "Your purchase has been successfully completed!").withTitle("Done"));
}
@HandleExceptions()
public static async getCardList(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const payload = {
"merchantId": "RT02300",
"reference": "1234",
"type": "LIST_CARD"
};
const { DP_STAGE } = process.env;
const dataString = `RT023001234LIST_CARD`;
const pemFile = DP_STAGE === "PROD" ? "src/keys/PRIVATE_KEY_PROD.pem" : `src/keys/PRIVATE_KEY_DEV.pem`;
const privateKeyTest = fs.readFileSync(path.resolve(pemFile), "utf8");
const sign = crypto.createSign("SHA256");
sign.update(dataString);
sign.end();
const signature = sign.sign(privateKeyTest);
const byteArray = new Uint8Array(signature);
let byteString = "";
for (let i = 0; i < byteArray.byteLength; i++) {
byteString += String.fromCharCode(byteArray[i]);
}
const base64Signature = btoa(byteString);
// const signature = CryptoJS.HmacSHA256(dataString, DP_SECRET_KEY as string).toString(CryptoJS.enc.Hex);
return res.status(200).send(new ServerResponse(true, { signature: base64Signature, dataString }));
}
@HandleExceptions()
public static async contactUs(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { contactNo } = req.query;
if (!contactNo) {
return res.status(200).send(new ServerResponse(false, null, "Contact number is required!"));
}
const html = `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Worklenz Local Billing - Contact Information</title>
</head>
<body>
<div>
<h1 style="text-align: center; margin-bottom: 20px;">Worklenz Local Billing - Contact Information</h1>
<p><strong>Name:</strong> ${req.user?.name}</p>
<p><strong>Contact No:</strong> ${contactNo as string}</p>
<p><strong>Email:</strong> ${req.user?.email}</p>
</div>
</body>
</html>`;
const to = [process.env.CONTACT_US_EMAIL || "chamika@ceydigital.com"];
sendEmail({
to,
subject: "Worklenz - Local billing contact.",
html
});
return res.status(200).send(new ServerResponse(true, null, "Your contact information has been sent successfully."));
}
}

View File

@@ -12,7 +12,7 @@ export default class ClientsController extends WorklenzControllerBase {
@HandleExceptions()
public static async create(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `INSERT INTO clients (name, team_id) VALUES ($1, $2);`;
const q = `INSERT INTO clients (name, team_id) VALUES ($1, $2) RETURNING id, name;`;
const result = await db.query(q, [req.body.name, req.user?.team_id || null]);
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data));

View File

@@ -0,0 +1,531 @@
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
import db from "../config/db";
import { ServerResponse } from "../models/server-response";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
export default class CustomcolumnsController extends WorklenzControllerBase {
@HandleExceptions()
public static async create(
req: IWorkLenzRequest,
res: IWorkLenzResponse
): Promise<IWorkLenzResponse> {
const {
project_id,
name,
key,
field_type,
width = 150,
is_visible = true,
configuration,
} = req.body;
// Start a transaction since we're inserting into multiple tables
const client = await db.pool.connect();
try {
await client.query("BEGIN");
// 1. Insert the main custom column
const columnQuery = `
INSERT INTO cc_custom_columns (
project_id, name, key, field_type, width, is_visible, is_custom_column
) VALUES ($1, $2, $3, $4, $5, $6, true)
RETURNING id;
`;
const columnResult = await client.query(columnQuery, [
project_id,
name,
key,
field_type,
width,
is_visible,
]);
const columnId = columnResult.rows[0].id;
// 2. Insert the column configuration
const configQuery = `
INSERT INTO cc_column_configurations (
column_id, field_title, field_type, number_type,
decimals, label, label_position, preview_value,
expression, first_numeric_column_key, second_numeric_column_key
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)
RETURNING id;
`;
await client.query(configQuery, [
columnId,
configuration.field_title,
configuration.field_type,
configuration.number_type || null,
configuration.decimals || null,
configuration.label || null,
configuration.label_position || null,
configuration.preview_value || null,
configuration.expression || null,
configuration.first_numeric_column_key || null,
configuration.second_numeric_column_key || null,
]);
// 3. Insert selection options if present
if (
configuration.selections_list &&
configuration.selections_list.length > 0
) {
const selectionQuery = `
INSERT INTO cc_selection_options (
column_id, selection_id, selection_name, selection_color, selection_order
) VALUES ($1, $2, $3, $4, $5);
`;
for (const [
index,
selection,
] of configuration.selections_list.entries()) {
await client.query(selectionQuery, [
columnId,
selection.selection_id,
selection.selection_name,
selection.selection_color,
index,
]);
}
}
// 4. Insert label options if present
if (configuration.labels_list && configuration.labels_list.length > 0) {
const labelQuery = `
INSERT INTO cc_label_options (
column_id, label_id, label_name, label_color, label_order
) VALUES ($1, $2, $3, $4, $5);
`;
for (const [index, label] of configuration.labels_list.entries()) {
await client.query(labelQuery, [
columnId,
label.label_id,
label.label_name,
label.label_color,
index,
]);
}
}
await client.query("COMMIT");
// Fetch the complete column data
const getColumnQuery = `
SELECT
cc.*,
cf.field_title,
cf.number_type,
cf.decimals,
cf.label,
cf.label_position,
cf.preview_value,
cf.expression,
cf.first_numeric_column_key,
cf.second_numeric_column_key,
(
SELECT json_agg(
json_build_object(
'selection_id', so.selection_id,
'selection_name', so.selection_name,
'selection_color', so.selection_color
)
)
FROM cc_selection_options so
WHERE so.column_id = cc.id
) as selections_list,
(
SELECT json_agg(
json_build_object(
'label_id', lo.label_id,
'label_name', lo.label_name,
'label_color', lo.label_color
)
)
FROM cc_label_options lo
WHERE lo.column_id = cc.id
) as labels_list
FROM cc_custom_columns cc
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
WHERE cc.id = $1;
`;
const result = await client.query(getColumnQuery, [columnId]);
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data));
} catch (error) {
await client.query("ROLLBACK");
throw error;
} finally {
client.release();
}
}
@HandleExceptions()
public static async get(
req: IWorkLenzRequest,
res: IWorkLenzResponse
): Promise<IWorkLenzResponse> {
const { project_id } = req.query;
const q = `
SELECT
cc.*,
cf.field_title,
cf.number_type,
cf.decimals,
cf.label,
cf.label_position,
cf.preview_value,
cf.expression,
cf.first_numeric_column_key,
cf.second_numeric_column_key,
(
SELECT json_agg(
json_build_object(
'selection_id', so.selection_id,
'selection_name', so.selection_name,
'selection_color', so.selection_color
)
)
FROM cc_selection_options so
WHERE so.column_id = cc.id
) as selections_list,
(
SELECT json_agg(
json_build_object(
'label_id', lo.label_id,
'label_name', lo.label_name,
'label_color', lo.label_color
)
)
FROM cc_label_options lo
WHERE lo.column_id = cc.id
) as labels_list
FROM cc_custom_columns cc
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
WHERE cc.project_id = $1
ORDER BY cc.created_at DESC;
`;
const result = await db.query(q, [project_id]);
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async getById(
req: IWorkLenzRequest,
res: IWorkLenzResponse
): Promise<IWorkLenzResponse> {
const { id } = req.params;
const q = `
SELECT
cc.*,
cf.field_title,
cf.number_type,
cf.decimals,
cf.label,
cf.label_position,
cf.preview_value,
cf.expression,
cf.first_numeric_column_key,
cf.second_numeric_column_key,
(
SELECT json_agg(
json_build_object(
'selection_id', so.selection_id,
'selection_name', so.selection_name,
'selection_color', so.selection_color
)
)
FROM cc_selection_options so
WHERE so.column_id = cc.id
) as selections_list,
(
SELECT json_agg(
json_build_object(
'label_id', lo.label_id,
'label_name', lo.label_name,
'label_color', lo.label_color
)
)
FROM cc_label_options lo
WHERE lo.column_id = cc.id
) as labels_list
FROM cc_custom_columns cc
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
WHERE cc.id = $1;
`;
const result = await db.query(q, [id]);
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data));
}
@HandleExceptions()
public static async update(
req: IWorkLenzRequest,
res: IWorkLenzResponse
): Promise<IWorkLenzResponse> {
const { id } = req.params;
const { name, field_type, width, is_visible, configuration } = req.body;
const client = await db.pool.connect();
try {
await client.query("BEGIN");
// 1. Update the main custom column
const columnQuery = `
UPDATE cc_custom_columns
SET name = $1, field_type = $2, width = $3, is_visible = $4, updated_at = CURRENT_TIMESTAMP
WHERE id = $5
RETURNING id;
`;
await client.query(columnQuery, [
name,
field_type,
width,
is_visible,
id,
]);
// 2. Update the configuration
const configQuery = `
UPDATE cc_column_configurations
SET
field_title = $1,
field_type = $2,
number_type = $3,
decimals = $4,
label = $5,
label_position = $6,
preview_value = $7,
expression = $8,
first_numeric_column_key = $9,
second_numeric_column_key = $10,
updated_at = CURRENT_TIMESTAMP
WHERE column_id = $11;
`;
await client.query(configQuery, [
configuration.field_title,
configuration.field_type,
configuration.number_type || null,
configuration.decimals || null,
configuration.label || null,
configuration.label_position || null,
configuration.preview_value || null,
configuration.expression || null,
configuration.first_numeric_column_key || null,
configuration.second_numeric_column_key || null,
id,
]);
// 3. Update selections if present
if (configuration.selections_list) {
// Delete existing selections
await client.query(
"DELETE FROM cc_selection_options WHERE column_id = $1",
[id]
);
// Insert new selections
if (configuration.selections_list.length > 0) {
const selectionQuery = `
INSERT INTO cc_selection_options (
column_id, selection_id, selection_name, selection_color, selection_order
) VALUES ($1, $2, $3, $4, $5);
`;
for (const [
index,
selection,
] of configuration.selections_list.entries()) {
await client.query(selectionQuery, [
id,
selection.selection_id,
selection.selection_name,
selection.selection_color,
index,
]);
}
}
}
// 4. Update labels if present
if (configuration.labels_list) {
// Delete existing labels
await client.query("DELETE FROM cc_label_options WHERE column_id = $1", [
id,
]);
// Insert new labels
if (configuration.labels_list.length > 0) {
const labelQuery = `
INSERT INTO cc_label_options (
column_id, label_id, label_name, label_color, label_order
) VALUES ($1, $2, $3, $4, $5);
`;
for (const [index, label] of configuration.labels_list.entries()) {
await client.query(labelQuery, [
id,
label.label_id,
label.label_name,
label.label_color,
index,
]);
}
}
}
await client.query("COMMIT");
// Fetch the updated column data
const getColumnQuery = `
SELECT
cc.*,
cf.field_title,
cf.number_type,
cf.decimals,
cf.label,
cf.label_position,
cf.preview_value,
cf.expression,
cf.first_numeric_column_key,
cf.second_numeric_column_key,
(
SELECT json_agg(
json_build_object(
'selection_id', so.selection_id,
'selection_name', so.selection_name,
'selection_color', so.selection_color
)
)
FROM cc_selection_options so
WHERE so.column_id = cc.id
) as selections_list,
(
SELECT json_agg(
json_build_object(
'label_id', lo.label_id,
'label_name', lo.label_name,
'label_color', lo.label_color
)
)
FROM cc_label_options lo
WHERE lo.column_id = cc.id
) as labels_list
FROM cc_custom_columns cc
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
WHERE cc.id = $1;
`;
const result = await client.query(getColumnQuery, [id]);
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data));
} catch (error) {
await client.query("ROLLBACK");
throw error;
} finally {
client.release();
}
}
@HandleExceptions()
public static async deleteById(
req: IWorkLenzRequest,
res: IWorkLenzResponse
): Promise<IWorkLenzResponse> {
const { id } = req.params;
const q = `
DELETE FROM cc_custom_columns
WHERE id = $1
RETURNING id;
`;
const result = await db.query(q, [id]);
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async getProjectColumns(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { project_id } = req.params;
const q = `
WITH column_data AS (
SELECT
cc.id,
cc.key,
cc.name,
cc.field_type,
cc.width,
cc.is_visible,
cf.field_title,
cf.number_type,
cf.decimals,
cf.label,
cf.label_position,
cf.preview_value,
cf.expression,
cf.first_numeric_column_key,
cf.second_numeric_column_key,
(
SELECT json_agg(
json_build_object(
'selection_id', so.selection_id,
'selection_name', so.selection_name,
'selection_color', so.selection_color
)
)
FROM cc_selection_options so
WHERE so.column_id = cc.id
) as selections_list,
(
SELECT json_agg(
json_build_object(
'label_id', lo.label_id,
'label_name', lo.label_name,
'label_color', lo.label_color
)
)
FROM cc_label_options lo
WHERE lo.column_id = cc.id
) as labels_list
FROM cc_custom_columns cc
LEFT JOIN cc_column_configurations cf ON cf.column_id = cc.id
WHERE cc.project_id = $1
)
SELECT
json_agg(
json_build_object(
'key', cd.key,
'id', cd.id,
'name', cd.name,
'width', cd.width,
'pinned', cd.is_visible,
'custom_column', true,
'custom_column_obj', json_build_object(
'fieldType', cd.field_type,
'fieldTitle', cd.field_title,
'numberType', cd.number_type,
'decimals', cd.decimals,
'label', cd.label,
'labelPosition', cd.label_position,
'previewValue', cd.preview_value,
'expression', cd.expression,
'firstNumericColumnKey', cd.first_numeric_column_key,
'secondNumericColumnKey', cd.second_numeric_column_key,
'selectionsList', COALESCE(cd.selections_list, '[]'::json),
'labelsList', COALESCE(cd.labels_list, '[]'::json)
)
)
) as columns
FROM column_data cd;
`;
const result = await db.query(q, [project_id]);
const columns = result.rows[0]?.columns || [];
return res.status(200).send(new ServerResponse(true, columns));
}
}

View File

@@ -114,7 +114,7 @@ export default class HomePageController extends WorklenzControllerBase {
p.team_id,
p.name AS project_name,
p.color_code AS project_color,
(SELECT id FROM task_statuses WHERE id = t.status_id) AS status,
(SELECT name FROM task_statuses WHERE id = t.status_id) AS status,
(SELECT color_code
FROM sys_task_status_categories
WHERE id = (SELECT category_id FROM task_statuses WHERE id = t.status_id)) AS status_color,

View File

@@ -59,7 +59,7 @@ export default class IndexController extends WorklenzControllerBase {
if (req.user && !req.user.is_member)
return res.redirect("/teams");
return res.redirect("/auth");
return res.redirect(301, "/auth");
}
public static redirectToLogin(req: IWorkLenzRequest, res: IWorkLenzResponse) {

View File

@@ -195,7 +195,7 @@ export default class ProjectCommentsController extends WorklenzControllerBase {
pc.created_at,
pc.updated_at
FROM project_comments pc
WHERE pc.project_id = $1 ORDER BY pc.updated_at DESC
WHERE pc.project_id = $1 ORDER BY pc.updated_at
`;
const result = await db.query(q, [req.params.id]);

View File

@@ -7,6 +7,9 @@ import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
import {getColor} from "../shared/utils";
import TeamMembersController from "./team-members-controller";
import {checkTeamSubscriptionStatus} from "../shared/paddle-utils";
import {updateUsers} from "../shared/paddle-requests";
import {statusExclude} from "../shared/constants";
import {NotificationsService} from "../services/notifications/notifications.service";
export default class ProjectMembersController extends WorklenzControllerBase {
@@ -69,6 +72,70 @@ export default class ProjectMembersController extends WorklenzControllerBase {
if (!req.user?.team_id) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
// check the subscription status
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id);
const userExists = await this.checkIfUserAlreadyExists(req.user?.owner_id as string, req.body.email);
// Return error if user already exists
if (userExists) {
return res.status(200).send(new ServerResponse(false, null, "User already exists in the team."));
}
// Handle self-hosted subscriptions differently
if (subscriptionData.subscription_type === 'SELF_HOSTED') {
// Adding as a team member
const teamMemberReq: { team_id?: string; emails: string[], project_id?: string; } = {
team_id: req.user?.team_id,
emails: [req.body.email]
};
if (req.body.project_id)
teamMemberReq.project_id = req.body.project_id;
const [member] = await TeamMembersController.createOrInviteMembers(teamMemberReq, req.user);
if (!member)
return res.status(200).send(new ServerResponse(true, null, "Failed to add the member to the project. Please try again."));
// Adding to the project
const projectMemberReq = {
team_member_id: member.team_member_id,
team_id: req.user?.team_id,
project_id: req.body.project_id,
user_id: req.user?.id,
access_level: req.body.access_level ? req.body.access_level : "MEMBER"
};
const data = await this.createOrInviteMembers(projectMemberReq);
return res.status(200).send(new ServerResponse(true, data.member));
}
if (statusExclude.includes(subscriptionData.subscription_status)) {
return res.status(200).send(new ServerResponse(false, null, "Unable to add user! Please check your subscription status."));
}
if (!userExists && subscriptionData.is_ltd && subscriptionData.current_count && (parseInt(subscriptionData.current_count) + 1 > parseInt(subscriptionData.ltd_users))) {
return res.status(200).send(new ServerResponse(false, null, "Maximum number of life time users reached."));
}
// if (subscriptionData.status === "trialing") break;
if (!userExists && !subscriptionData.is_credit && !subscriptionData.is_custom && subscriptionData.subscription_status !== "trialing") {
// if (subscriptionData.subscription_status === "active") {
// const response = await updateUsers(subscriptionData.subscription_id, (subscriptionData.quantity + 1));
// if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, null, response.message || "Unable to add user! Please check your subscription."));
// }
const updatedCount = parseInt(subscriptionData.current_count) + 1;
const requiredSeats = updatedCount - subscriptionData.quantity;
if (updatedCount > subscriptionData.quantity) {
const obj = {
seats_enough: false,
required_count: requiredSeats,
current_seat_amount: subscriptionData.quantity
};
return res.status(200).send(new ServerResponse(false, obj, null));
}
}
// Adding as a team member
const teamMemberReq: { team_id?: string; emails: string[], project_id?: string; } = {
team_id: req.user?.team_id,

View File

@@ -8,6 +8,7 @@ import { templateData } from "./project-templates";
import ProjectTemplatesControllerBase from "./project-templates-base";
import { LOG_DESCRIPTIONS, TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA } from "../../shared/constants";
import { IO } from "../../shared/io";
import { getCurrentProjectsCount, getFreePlanSettings } from "../../shared/paddle-utils";
export default class ProjectTemplatesController extends ProjectTemplatesControllerBase {
@@ -46,10 +47,10 @@ export default class ProjectTemplatesController extends ProjectTemplatesControll
@HandleExceptions()
public static async getDefaultProjectHealth() {
const q = `SELECT id FROM sys_project_healths WHERE is_default IS TRUE`;
const result = await db.query(q, []);
const [data] = result.rows;
return data.id;
const q = `SELECT id FROM sys_project_healths WHERE is_default IS TRUE`;
const result = await db.query(q, []);
const [data] = result.rows;
return data.id;
}
@HandleExceptions()
@@ -92,6 +93,16 @@ export default class ProjectTemplatesController extends ProjectTemplatesControll
@HandleExceptions()
public static async importTemplates(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
if (req.user?.subscription_status === "free" && req.user?.owner_id) {
const limits = await getFreePlanSettings();
const projectsCount = await getCurrentProjectsCount(req.user.owner_id);
const projectsLimit = parseInt(limits.projects_limit);
if (parseInt(projectsCount) >= projectsLimit) {
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${projectsLimit} projects.`));
}
}
const { template_id } = req.body;
let project_id: string | null = null;
@@ -202,6 +213,16 @@ export default class ProjectTemplatesController extends ProjectTemplatesControll
@HandleExceptions()
public static async importCustomTemplate(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
if (req.user?.subscription_status === "free" && req.user?.owner_id) {
const limits = await getFreePlanSettings();
const projectsCount = await getCurrentProjectsCount(req.user.owner_id);
const projectsLimit = parseInt(limits.projects_limit);
if (parseInt(projectsCount) >= projectsLimit) {
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${projectsLimit} projects.`));
}
}
const { template_id } = req.body;
let project_id: string | null = null;
@@ -223,8 +244,8 @@ export default class ProjectTemplatesController extends ProjectTemplatesControll
await this.deleteDefaultStatusForProject(project_id as string);
await this.insertTeamLabels(data.labels, req.user?.team_id);
await this.insertProjectPhases(data.phases, project_id as string);
await this.insertProjectStatuses(data.status, project_id as string, data.team_id );
await this.insertProjectTasksFromCustom(data.tasks, data.team_id, project_id as string, data.user_id, IO.getSocketById(req.user?.socket_id as string));
await this.insertProjectStatuses(data.status, project_id as string, data.team_id);
await this.insertProjectTasksFromCustom(data.tasks, data.team_id, project_id as string, data.user_id, IO.getSocketById(req.user?.socket_id as string));
return res.status(200).send(new ServerResponse(true, { project_id }));
}

View File

@@ -12,6 +12,7 @@ import { NotificationsService } from "../services/notifications/notifications.se
import { IPassportSession } from "../interfaces/passport-session";
import { SocketEvents } from "../socket.io/events";
import { IO } from "../shared/io";
import { getCurrentProjectsCount, getFreePlanSettings } from "../shared/paddle-utils";
export default class ProjectsController extends WorklenzControllerBase {
@@ -61,6 +62,16 @@ export default class ProjectsController extends WorklenzControllerBase {
}
})
public static async create(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
if (req.user?.subscription_status === "free" && req.user?.owner_id) {
const limits = await getFreePlanSettings();
const projectsCount = await getCurrentProjectsCount(req.user.owner_id);
const projectsLimit = parseInt(limits.projects_limit);
if (parseInt(projectsCount) >= projectsLimit) {
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot have more than ${projectsLimit} projects.`));
}
}
const q = `SELECT create_project($1) AS project`;
req.body.team_id = req.user?.team_id || null;
@@ -689,7 +700,8 @@ export default class ProjectsController extends WorklenzControllerBase {
public static async toggleArchiveAll(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT toggle_archive_all_projects($1);`;
const result = await db.query(q, [req.params.id]);
return res.status(200).send(new ServerResponse(true, result.rows || []));
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data.toggle_archive_all_projects || []));
}
public static async getProjectManager(projectId: string) {
@@ -698,4 +710,47 @@ export default class ProjectsController extends WorklenzControllerBase {
return result.rows || [];
}
public static async updateExistPhaseColors() {
const q = `SELECT id, name FROM project_phases`;
const phases = await db.query(q);
phases.rows.forEach((phase) => {
phase.color_code = getColor(phase.name);
});
const body = {
phases: phases.rows
};
const q2 = `SELECT update_existing_phase_colors($1)`;
await db.query(q2, [JSON.stringify(body)]);
}
public static async updateExistSortOrder() {
const q = `SELECT id, project_id FROM project_phases ORDER BY name`;
const phases = await db.query(q);
const sortNumbers: any = {};
phases.rows.forEach(phase => {
const projectId = phase.project_id;
if (!sortNumbers[projectId]) {
sortNumbers[projectId] = 0;
}
phase.sort_number = sortNumbers[projectId]++;
});
const body = {
phases: phases.rows
};
const q2 = `SELECT update_existing_phase_sort_order($1)`;
await db.query(q2, [JSON.stringify(body)]);
// return phases;
}
}

View File

@@ -1,4 +1,4 @@
import { IChartObject } from "./overview/reporting-overview-base";
import * as Highcharts from "highcharts";
export interface IDuration {
label: string;
@@ -34,7 +34,7 @@ export interface IOverviewStatistics {
}
export interface IChartData {
chart: IChartObject[];
chart: Highcharts.PointOptionsObject[];
}
export interface ITasksByStatus extends IChartData {

View File

@@ -1,4 +1,5 @@
import db from "../../../config/db";
import * as Highcharts from "highcharts";
import { ITasksByDue, ITasksByPriority, ITasksByStatus } from "../interfaces";
import ReportingControllerBase from "../reporting-controller-base";
import {
@@ -15,36 +16,33 @@ import {
TASK_STATUS_TODO_COLOR
} from "../../../shared/constants";
import { formatDuration, int } from "../../../shared/utils";
import PointOptionsObject from "../point-options-object";
import moment from "moment";
export interface IChartObject {
name: string,
color: string,
y: number
}
export default class ReportingOverviewBase extends ReportingControllerBase {
private static createChartObject(name: string, color: string, y: number) {
return {
name,
color,
y
};
}
protected static async getTeamsCounts(teamId: string | null, archivedQuery = "") {
const q = `
SELECT JSON_BUILD_OBJECT(
'teams', (SELECT COUNT(*) FROM teams WHERE in_organization(id, $1)),
'projects',
(SELECT COUNT(*) FROM projects WHERE in_organization(team_id, $1) ${archivedQuery}),
'team_members', (SELECT COUNT(DISTINCT email)
FROM team_member_info_view
WHERE in_organization(team_id, $1))
) AS counts;
`;
WITH team_count AS (
SELECT COUNT(*) AS count
FROM teams
WHERE in_organization(id, $1)
),
project_count AS (
SELECT COUNT(*) AS count
FROM projects
WHERE in_organization(team_id, $1) ${archivedQuery}
),
team_member_count AS (
SELECT COUNT(DISTINCT email) AS count
FROM team_member_info_view
WHERE in_organization(team_id, $1)
)
SELECT JSON_BUILD_OBJECT(
'teams', (SELECT count FROM team_count),
'projects', (SELECT count FROM project_count),
'team_members', (SELECT count FROM team_member_count)
) AS counts;`;
const res = await db.query(q, [teamId]);
const [data] = res.rows;
@@ -173,7 +171,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
const doing = int(data?.counts.doing);
const done = int(data?.counts.done);
const chart: IChartObject[] = [];
const chart: Highcharts.PointOptionsObject[] = [];
return {
all,
@@ -209,7 +207,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
const medium = int(data?.counts.medium);
const high = int(data?.counts.high);
const chart: IChartObject[] = [];
const chart: Highcharts.PointOptionsObject[] = [];
return {
all: 0,
@@ -237,7 +235,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
const res = await db.query(q, [projectId]);
const [data] = res.rows;
const chart: IChartObject[] = [];
const chart: Highcharts.PointOptionsObject[] = [];
return {
all: 0,
@@ -251,26 +249,26 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
protected static createByStatusChartData(body: ITasksByStatus) {
body.chart = [
this.createChartObject("Todo", TASK_STATUS_TODO_COLOR, body.todo),
this.createChartObject("Doing", TASK_STATUS_DOING_COLOR, body.doing),
this.createChartObject("Done", TASK_STATUS_DONE_COLOR, body.done),
new PointOptionsObject("Todo", TASK_STATUS_TODO_COLOR, body.todo),
new PointOptionsObject("Doing", TASK_STATUS_DOING_COLOR, body.doing),
new PointOptionsObject("Done", TASK_STATUS_DONE_COLOR, body.done),
];
}
protected static createByPriorityChartData(body: ITasksByPriority) {
body.chart = [
this.createChartObject("Low", TASK_PRIORITY_LOW_COLOR, body.low),
this.createChartObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, body.medium),
this.createChartObject("High", TASK_PRIORITY_HIGH_COLOR, body.high),
new PointOptionsObject("Low", TASK_PRIORITY_LOW_COLOR, body.low),
new PointOptionsObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, body.medium),
new PointOptionsObject("High", TASK_PRIORITY_HIGH_COLOR, body.high),
];
}
protected static createByDueDateChartData(body: ITasksByDue) {
body.chart = [
this.createChartObject("Completed", TASK_DUE_COMPLETED_COLOR, body.completed),
this.createChartObject("Upcoming", TASK_DUE_UPCOMING_COLOR, body.upcoming),
this.createChartObject("Overdue", TASK_DUE_OVERDUE_COLOR, body.overdue),
this.createChartObject("No due date", TASK_DUE_NO_DUE_COLOR, body.no_due),
new PointOptionsObject("Completed", TASK_DUE_COMPLETED_COLOR, body.completed),
new PointOptionsObject("Upcoming", TASK_DUE_UPCOMING_COLOR, body.upcoming),
new PointOptionsObject("Overdue", TASK_DUE_OVERDUE_COLOR, body.overdue),
new PointOptionsObject("No due date", TASK_DUE_NO_DUE_COLOR, body.no_due),
];
}
@@ -581,7 +579,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
`;
const result = await db.query(q, [teamMemberId]);
const chart: IChartObject[] = [];
const chart: Highcharts.PointOptionsObject[] = [];
const total = result.rows.reduce((accumulator: number, current: {
count: number
@@ -589,7 +587,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
for (const project of result.rows) {
project.count = int(project.count);
chart.push(this.createChartObject(project.label, project.color, project.count));
chart.push(new PointOptionsObject(project.label, project.color, project.count));
}
return { chart, total, data: result.rows };
@@ -635,7 +633,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
`;
const result = await db.query(q, [teamMemberId]);
const chart: IChartObject[] = [];
const chart: Highcharts.PointOptionsObject[] = [];
const total = result.rows.reduce((accumulator: number, current: {
count: number
@@ -643,7 +641,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
for (const project of result.rows) {
project.count = int(project.count);
chart.push(this.createChartObject(project.label, project.color, project.count));
chart.push(new PointOptionsObject(project.label, project.color, project.count));
}
return { chart, total, data: result.rows };
@@ -673,10 +671,10 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
const total = int(d.low) + int(d.medium) + int(d.high);
const chart = [
this.createChartObject("Low", TASK_PRIORITY_LOW_COLOR, d.low),
this.createChartObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, d.medium),
this.createChartObject("High", TASK_PRIORITY_HIGH_COLOR, d.high),
const chart: Highcharts.PointOptionsObject[] = [
new PointOptionsObject("Low", TASK_PRIORITY_LOW_COLOR, d.low),
new PointOptionsObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, d.medium),
new PointOptionsObject("High", TASK_PRIORITY_HIGH_COLOR, d.high),
];
const data = [
@@ -730,10 +728,10 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
const total = int(d.low) + int(d.medium) + int(d.high);
const chart = [
this.createChartObject("Low", TASK_PRIORITY_LOW_COLOR, d.low),
this.createChartObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, d.medium),
this.createChartObject("High", TASK_PRIORITY_HIGH_COLOR, d.high),
const chart: Highcharts.PointOptionsObject[] = [
new PointOptionsObject("Low", TASK_PRIORITY_LOW_COLOR, d.low),
new PointOptionsObject("Medium", TASK_PRIORITY_MEDIUM_COLOR, d.medium),
new PointOptionsObject("High", TASK_PRIORITY_HIGH_COLOR, d.high),
];
const data = [
@@ -784,10 +782,10 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
const total = int(d.total);
const chart = [
this.createChartObject("Todo", TASK_STATUS_TODO_COLOR, d.todo),
this.createChartObject("Doing", TASK_STATUS_DOING_COLOR, d.doing),
this.createChartObject("Done", TASK_STATUS_DONE_COLOR, d.done),
const chart: Highcharts.PointOptionsObject[] = [
new PointOptionsObject("Todo", TASK_STATUS_TODO_COLOR, d.todo),
new PointOptionsObject("Doing", TASK_STATUS_DOING_COLOR, d.doing),
new PointOptionsObject("Done", TASK_STATUS_DONE_COLOR, d.done),
];
const data = [
@@ -826,10 +824,10 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
const total = int(d.todo) + int(d.doing) + int(d.done);
const chart = [
this.createChartObject("Todo", TASK_STATUS_TODO_COLOR, d.todo),
this.createChartObject("Doing", TASK_STATUS_DOING_COLOR, d.doing),
this.createChartObject("Done", TASK_STATUS_DONE_COLOR, d.done),
const chart: Highcharts.PointOptionsObject[] = [
new PointOptionsObject("Todo", TASK_STATUS_TODO_COLOR, d.todo),
new PointOptionsObject("Doing", TASK_STATUS_DOING_COLOR, d.doing),
new PointOptionsObject("Done", TASK_STATUS_DONE_COLOR, d.done),
];
const data = [
@@ -878,7 +876,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
const in_progress = int(data?.counts.in_progress);
const completed = int(data?.counts.completed);
const chart : IChartObject[] = [];
const chart: Highcharts.PointOptionsObject[] = [];
return {
all,
@@ -908,7 +906,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
`;
const result = await db.query(q, [teamId]);
const chart: IChartObject[] = [];
const chart: Highcharts.PointOptionsObject[] = [];
const total = result.rows.reduce((accumulator: number, current: {
count: number
@@ -916,11 +914,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
for (const category of result.rows) {
category.count = int(category.count);
chart.push({
name: category.label,
color: category.color,
y: category.count
});
chart.push(new PointOptionsObject(category.label, category.color, category.count));
}
return { chart, total, data: result.rows };
@@ -956,7 +950,7 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
const at_risk = int(data?.counts.at_risk);
const good = int(data?.counts.good);
const chart: IChartObject[] = [];
const chart: Highcharts.PointOptionsObject[] = [];
return {
not_set,
@@ -971,22 +965,22 @@ export default class ReportingOverviewBase extends ReportingControllerBase {
// Team Overview
protected static createByProjectStatusChartData(body: any) {
body.chart = [
this.createChartObject("Cancelled", "#f37070", body.cancelled),
this.createChartObject("Blocked", "#cbc8a1", body.blocked),
this.createChartObject("On Hold", "#cbc8a1", body.on_hold),
this.createChartObject("Proposed", "#cbc8a1", body.proposed),
this.createChartObject("In Planning", "#cbc8a1", body.in_planning),
this.createChartObject("In Progress", "#80ca79", body.in_progress),
this.createChartObject("Completed", "#80ca79", body.completed)
new PointOptionsObject("Cancelled", "#f37070", body.cancelled),
new PointOptionsObject("Blocked", "#cbc8a1", body.blocked),
new PointOptionsObject("On Hold", "#cbc8a1", body.on_hold),
new PointOptionsObject("Proposed", "#cbc8a1", body.proposed),
new PointOptionsObject("In Planning", "#cbc8a1", body.in_planning),
new PointOptionsObject("In Progress", "#80ca79", body.in_progress),
new PointOptionsObject("Completed", "#80ca79", body.completed),
];
}
protected static createByProjectHealthChartData(body: any) {
body.chart = [
this.createChartObject("Not Set", "#a9a9a9", body.not_set),
this.createChartObject("Needs Attention", "#f37070", body.needs_attention),
this.createChartObject("At Risk", "#fbc84c", body.at_risk),
this.createChartObject("Good", "#75c997", body.good)
new PointOptionsObject("Not Set", "#a9a9a9", body.not_set),
new PointOptionsObject("Needs Attention", "#f37070", body.needs_attention),
new PointOptionsObject("At Risk", "#fbc84c", body.at_risk),
new PointOptionsObject("Good", "#75c997", body.good)
];
}

View File

@@ -0,0 +1,13 @@
import * as Highcharts from "highcharts";
export default class PointOptionsObject implements Highcharts.PointOptionsObject {
name!: string;
color!: string;
y!: number;
constructor(name: string, color: string, y: number) {
this.name = name;
this.color = color;
this.y = y;
}
}

View File

@@ -8,13 +8,14 @@ import { getColor, int, log_error } from "../../shared/utils";
import ReportingControllerBase from "./reporting-controller-base";
import { DATE_RANGES } from "../../shared/constants";
import Excel from "exceljs";
import ChartJsImage from "chartjs-to-image";
enum IToggleOptions {
'WORKING_DAYS' = 'WORKING_DAYS', 'MAN_DAYS' = 'MAN_DAYS'
}
export default class ReportingAllocationController extends ReportingControllerBase {
private static async getTimeLoggedByProjects(projects: string[], users: string[], key: string, dateRange: string[], archived = false, user_id = ""): Promise<any> {
private static async getTimeLoggedByProjects(projects: string[], users: string[], key: string, dateRange: string[], archived = false, user_id = "", billable: { billable: boolean; nonBillable: boolean }): Promise<any> {
try {
const projectIds = projects.map(p => `'${p}'`).join(",");
const userIds = users.map(u => `'${u}'`).join(",");
@@ -24,8 +25,10 @@ export default class ReportingAllocationController extends ReportingControllerBa
? ""
: `AND projects.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = projects.id AND user_id = '${user_id}') `;
const projectTimeLogs = await this.getTotalTimeLogsByProject(archived, duration, projectIds, userIds, archivedClause);
const userTimeLogs = await this.getTotalTimeLogsByUser(archived, duration, projectIds, userIds);
const billableQuery = this.buildBillableQuery(billable);
const projectTimeLogs = await this.getTotalTimeLogsByProject(archived, duration, projectIds, userIds, archivedClause, billableQuery);
const userTimeLogs = await this.getTotalTimeLogsByUser(archived, duration, projectIds, userIds, billableQuery);
const format = (seconds: number) => {
if (seconds === 0) return "-";
@@ -65,7 +68,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
return [];
}
private static async getTotalTimeLogsByProject(archived: boolean, duration: string, projectIds: string, userIds: string, archivedClause = "") {
private static async getTotalTimeLogsByProject(archived: boolean, duration: string, projectIds: string, userIds: string, archivedClause = "", billableQuery = '') {
try {
const q = `SELECT projects.name,
projects.color_code,
@@ -74,12 +77,12 @@ export default class ReportingAllocationController extends ReportingControllerBa
sps.icon AS status_icon,
(SELECT COUNT(*)
FROM tasks
WHERE CASE WHEN ($1 IS TRUE) THEN project_id IS NOT NULL ELSE archived = FALSE END
WHERE CASE WHEN ($1 IS TRUE) THEN project_id IS NOT NULL ELSE archived = FALSE END ${billableQuery}
AND project_id = projects.id) AS all_tasks_count,
(SELECT COUNT(*)
FROM tasks
WHERE CASE WHEN ($1 IS TRUE) THEN project_id IS NOT NULL ELSE archived = FALSE END
AND project_id = projects.id
AND project_id = projects.id ${billableQuery}
AND status_id IN (SELECT id
FROM task_statuses
WHERE project_id = projects.id
@@ -91,10 +94,10 @@ export default class ReportingAllocationController extends ReportingControllerBa
SELECT name,
(SELECT COALESCE(SUM(time_spent), 0)
FROM task_work_log
LEFT JOIN tasks t ON task_work_log.task_id = t.id
WHERE user_id = users.id
AND CASE WHEN ($1 IS TRUE) THEN t.project_id IS NOT NULL ELSE t.archived = FALSE END
AND t.project_id = projects.id
LEFT JOIN tasks ON task_work_log.task_id = tasks.id
WHERE user_id = users.id ${billableQuery}
AND CASE WHEN ($1 IS TRUE) THEN tasks.project_id IS NOT NULL ELSE tasks.archived = FALSE END
AND tasks.project_id = projects.id
${duration}) AS time_logged
FROM users
WHERE id IN (${userIds})
@@ -113,15 +116,15 @@ export default class ReportingAllocationController extends ReportingControllerBa
}
}
private static async getTotalTimeLogsByUser(archived: boolean, duration: string, projectIds: string, userIds: string) {
private static async getTotalTimeLogsByUser(archived: boolean, duration: string, projectIds: string, userIds: string, billableQuery = "") {
try {
const q = `(SELECT id,
(SELECT COALESCE(SUM(time_spent), 0)
FROM task_work_log
LEFT JOIN tasks t ON task_work_log.task_id = t.id
LEFT JOIN tasks ON task_work_log.task_id = tasks.id ${billableQuery}
WHERE user_id = users.id
AND CASE WHEN ($1 IS TRUE) THEN t.project_id IS NOT NULL ELSE t.archived = FALSE END
AND t.project_id IN (${projectIds})
AND CASE WHEN ($1 IS TRUE) THEN tasks.project_id IS NOT NULL ELSE tasks.archived = FALSE END
AND tasks.project_id IN (${projectIds})
${duration}) AS time_logged
FROM users
WHERE id IN (${userIds})
@@ -154,6 +157,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
@HandleExceptions()
public static async getAllocation(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const teams = (req.body.teams || []) as string[]; // ids
const billable = req.body.billable;
const teamIds = teams.map(id => `'${id}'`).join(",");
const projectIds = (req.body.projects || []) as string[];
@@ -164,7 +168,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
const users = await this.getUserIds(teamIds);
const userIds = users.map((u: any) => u.id);
const { projectTimeLogs, userTimeLogs } = await this.getTimeLoggedByProjects(projectIds, userIds, req.body.duration, req.body.date_range, (req.query.archived === "true"), req.user?.id);
const { projectTimeLogs, userTimeLogs } = await this.getTimeLoggedByProjects(projectIds, userIds, req.body.duration, req.body.date_range, (req.query.archived === "true"), req.user?.id, billable);
for (const [i, user] of users.entries()) {
user.total_time = userTimeLogs[i].time_logged;
@@ -184,6 +188,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
public static async export(req: IWorkLenzRequest, res: IWorkLenzResponse) {
const teams = (req.query.teams as string)?.split(",");
const teamIds = teams.map(t => `'${t}'`).join(",");
const billable = req.body.billable ? req.body.billable : { billable: req.query.billable === "true", nonBillable: req.query.nonBillable === "true" };
const projectIds = (req.query.projects as string)?.split(",");
@@ -218,7 +223,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
const users = await this.getUserIds(teamIds);
const userIds = users.map((u: any) => u.id);
const { projectTimeLogs, userTimeLogs } = await this.getTimeLoggedByProjects(projectIds, userIds, duration as string, dateRange, (req.query.include_archived === "true"), req.user?.id);
const { projectTimeLogs, userTimeLogs } = await this.getTimeLoggedByProjects(projectIds, userIds, duration as string, dateRange, (req.query.include_archived === "true"), req.user?.id, billable);
for (const [i, user] of users.entries()) {
user.total_time = userTimeLogs[i].time_logged;
@@ -341,6 +346,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
const projects = (req.body.projects || []) as string[];
const projectIds = projects.map(p => `'${p}'`).join(",");
const billable = req.body.billable;
if (!teamIds || !projectIds.length)
return res.status(200).send(new ServerResponse(true, { users: [], projects: [] }));
@@ -352,6 +359,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
? ""
: `AND p.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = p.id AND user_id = '${req.user?.id}') `;
const billableQuery = this.buildBillableQuery(billable);
const q = `
SELECT p.id,
p.name,
@@ -359,8 +368,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
SUM(total_minutes) AS estimated,
color_code
FROM projects p
LEFT JOIN tasks t ON t.project_id = p.id
LEFT JOIN task_work_log ON task_work_log.task_id = t.id
LEFT JOIN tasks ON tasks.project_id = p.id ${billableQuery}
LEFT JOIN task_work_log ON task_work_log.task_id = tasks.id
WHERE p.id IN (${projectIds}) ${durationClause} ${archivedClause}
GROUP BY p.id, p.name
ORDER BY logged_time DESC;`;
@@ -372,7 +381,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
project.value = project.logged_time ? parseFloat(moment.duration(project.logged_time, "seconds").asHours().toFixed(2)) : 0;
project.estimated_value = project.estimated ? parseFloat(moment.duration(project.estimated, "minutes").asHours().toFixed(2)) : 0;
if (project.value > 0 ) {
if (project.value > 0) {
data.push(project);
}
@@ -392,6 +401,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
const projects = (req.body.projects || []) as string[];
const projectIds = projects.map(p => `'${p}'`).join(",");
const billable = req.body.billable;
if (!teamIds || !projectIds.length)
return res.status(200).send(new ServerResponse(true, { users: [], projects: [] }));
@@ -402,12 +413,14 @@ export default class ReportingAllocationController extends ReportingControllerBa
? ""
: `AND p.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = p.id AND user_id = '${req.user?.id}') `;
const billableQuery = this.buildBillableQuery(billable);
const q = `
SELECT tmiv.email, tmiv.name, SUM(time_spent) AS logged_time
FROM team_member_info_view tmiv
LEFT JOIN task_work_log ON task_work_log.user_id = tmiv.user_id
LEFT JOIN tasks t ON t.id = task_work_log.task_id
LEFT JOIN projects p ON p.id = t.project_id AND p.team_id = tmiv.team_id
LEFT JOIN tasks ON tasks.id = task_work_log.task_id ${billableQuery}
LEFT JOIN projects p ON p.id = tasks.project_id AND p.team_id = tmiv.team_id
WHERE p.id IN (${projectIds})
${durationClause} ${archivedClause}
GROUP BY tmiv.email, tmiv.name
@@ -422,7 +435,64 @@ export default class ReportingAllocationController extends ReportingControllerBa
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async exportTest(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const archived = req.query.archived === "true";
const teamId = this.getCurrentTeamId(req);
const { duration, date_range } = req.query;
const durationClause = this.getDateRangeClause(duration as string || DATE_RANGES.LAST_WEEK, date_range as string[]);
const archivedClause = archived
? ""
: `AND p.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = p.id AND user_id = '${req.user?.id}') `;
const q = `
SELECT p.id,
p.name,
(SELECT SUM(time_spent)) AS logged_time,
SUM(total_minutes) AS estimated,
color_code
FROM projects p
LEFT JOIN tasks t ON t.project_id = p.id
LEFT JOIN task_work_log ON task_work_log.task_id = t.id
WHERE in_organization(p.team_id, $1)
${durationClause} ${archivedClause}
GROUP BY p.id, p.name
ORDER BY p.name ASC;`;
const result = await db.query(q, [teamId]);
const labelsX = [];
const dataX = [];
for (const project of result.rows) {
project.value = project.logged_time ? parseFloat(moment.duration(project.logged_time, "seconds").asHours().toFixed(2)) : 0;
project.estimated_value = project.estimated ? parseFloat(moment.duration(project.estimated, "minutes").asHours().toFixed(2)) : 0;
labelsX.push(project.name);
dataX.push(project.value || 0);
}
const chart = new ChartJsImage();
chart.setConfig({
type: "bar",
data: {
labels: labelsX,
datasets: [
{ label: "", data: dataX }
]
},
});
chart.setWidth(1920).setHeight(1080).setBackgroundColor("transparent");
const url = chart.getUrl();
chart.toFile("test.png");
return res.status(200).send(new ServerResponse(true, url));
}
private static getEstimated(project: any, type: string) {
// if (project.estimated_man_days === 0 || project.estimated_working_days === 0) {
// return (parseFloat(moment.duration(project.estimated, "minutes").asHours().toFixed(2)) / int(project.hours_per_day)).toFixed(2)
// }
switch (type) {
case IToggleOptions.MAN_DAYS:
@@ -445,7 +515,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
const projects = (req.body.projects || []) as string[];
const projectIds = projects.map(p => `'${p}'`).join(",");
const { type } = req.body;
const { type, billable } = req.body;
if (!teamIds || !projectIds.length)
return res.status(200).send(new ServerResponse(true, { users: [], projects: [] }));
@@ -458,6 +528,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
? ""
: `AND p.id NOT IN (SELECT project_id FROM archived_projects WHERE project_id = p.id AND user_id = '${req.user?.id}') `;
const billableQuery = this.buildBillableQuery(billable);
const q = `
SELECT p.id,
p.name,
@@ -471,8 +543,8 @@ export default class ReportingAllocationController extends ReportingControllerBa
WHERE project_id = p.id) AS estimated,
color_code
FROM projects p
LEFT JOIN tasks t ON t.project_id = p.id
LEFT JOIN task_work_log ON task_work_log.task_id = t.id
LEFT JOIN tasks ON tasks.project_id = p.id ${billableQuery}
LEFT JOIN task_work_log ON task_work_log.task_id = tasks.id
WHERE p.id IN (${projectIds}) ${durationClause} ${archivedClause}
GROUP BY p.id, p.name
ORDER BY logged_time DESC;`;
@@ -491,7 +563,7 @@ export default class ReportingAllocationController extends ReportingControllerBa
project.estimated_working_days = project.estimated_working_days ?? 0;
project.hours_per_day = project.hours_per_day ?? 0;
if (project.value > 0 || project.estimated_value > 0 ) {
if (project.value > 0 || project.estimated_value > 0) {
data.push(project);
}

View File

@@ -109,6 +109,23 @@ export default abstract class ReportingControllerBase extends WorklenzController
return "";
}
protected static buildBillableQuery(selectedStatuses: { billable: boolean; nonBillable: boolean }): string {
const { billable, nonBillable } = selectedStatuses;
if (billable && nonBillable) {
// Both are enabled, no need to filter
return "";
} else if (billable) {
// Only billable is enabled
return " AND tasks.billable IS TRUE";
} else if (nonBillable) {
// Only non-billable is enabled
return " AND tasks.billable IS FALSE";
}
return "";
}
protected static formatEndDate(endDate: string) {
const end = moment(endDate).format("YYYY-MM-DD");
const fEndDate = moment(end);
@@ -173,6 +190,9 @@ export default abstract class ReportingControllerBase extends WorklenzController
(SELECT color_code
FROM sys_project_healths
WHERE sys_project_healths.id = p.health_id) AS health_color,
(SELECT name
FROM sys_project_healths
WHERE sys_project_healths.id = p.health_id) AS health_name,
pc.id AS category_id,
pc.name AS category_name,

View File

@@ -862,7 +862,7 @@ export default class ReportingMembersController extends ReportingControllerBase
}
private static async memberTimeLogsData(durationClause: string, minMaxDateClause: string, team_id: string, team_member_id: string, includeArchived: boolean, userId: string) {
private static async memberTimeLogsData(durationClause: string, minMaxDateClause: string, team_id: string, team_member_id: string, includeArchived: boolean, userId: string, billableQuery = "") {
const archivedClause = includeArchived
? ""
@@ -884,7 +884,7 @@ export default class ReportingMembersController extends ReportingControllerBase
FROM task_work_log twl
WHERE twl.user_id = tmiv.user_id
${durationClause}
AND task_id IN (SELECT id FROM tasks WHERE project_id IN (SELECT id FROM projects WHERE team_id = $1) ${archivedClause} )
AND task_id IN (SELECT id FROM tasks WHERE project_id IN (SELECT id FROM projects WHERE team_id = $1) ${archivedClause} ${billableQuery})
ORDER BY twl.updated_at DESC) tl) AS time_logs
${minMaxDateClause}
FROM team_member_info_view tmiv
@@ -1017,14 +1017,33 @@ export default class ReportingMembersController extends ReportingControllerBase
}
protected static buildBillableQuery(selectedStatuses: { billable: boolean; nonBillable: boolean }): string {
const { billable, nonBillable } = selectedStatuses;
if (billable && nonBillable) {
// Both are enabled, no need to filter
return "";
} else if (billable) {
// Only billable is enabled
return " AND tasks.billable IS TRUE";
} else if (nonBillable) {
// Only non-billable is enabled
return " AND tasks.billable IS FALSE";
}
return "";
}
@HandleExceptions()
public static async getMemberTimelogs(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { team_member_id, team_id, duration, date_range, archived } = req.body;
const { team_member_id, team_id, duration, date_range, archived, billable } = req.body;
const durationClause = ReportingMembersController.getDateRangeClauseMembers(duration || DATE_RANGES.LAST_WEEK, date_range, "twl");
const minMaxDateClause = this.getMinMaxDates(duration || DATE_RANGES.LAST_WEEK, date_range, "task_work_log");
const logGroups = await this.memberTimeLogsData(durationClause, minMaxDateClause, team_id, team_member_id, archived, req.user?.id as string);
const billableQuery = this.buildBillableQuery(billable);
const logGroups = await this.memberTimeLogsData(durationClause, minMaxDateClause, team_id, team_member_id, archived, req.user?.id as string, billableQuery);
return res.status(200).send(new ServerResponse(true, logGroups));
}
@@ -1049,6 +1068,7 @@ export default class ReportingMembersController extends ReportingControllerBase
const completedDurationClasue = this.completedDurationFilter(duration as string, dateRange);
const overdueClauseByDate = this.getActivityLogsOverdue(duration as string, dateRange);
const taskSelectorClause = this.getTaskSelectorClause();
const durationFilter = this.memberTasksDurationFilter(duration as string, dateRange);
const q = `
SELECT name AS team_member_name,
@@ -1059,6 +1079,12 @@ export default class ReportingMembersController extends ReportingControllerBase
LEFT JOIN tasks_assignees ta ON t.id = ta.task_id
WHERE ta.team_member_id = $1 ${assignClause} ${archivedClause}) assigned) AS assigned,
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(assigned))), '[]')
FROM (${taskSelectorClause}
FROM tasks t
LEFT JOIN tasks_assignees ta ON t.id = ta.task_id
WHERE ta.team_member_id = $1 ${durationFilter} ${assignClause} ${archivedClause}) assigned) AS total,
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(completed))), '[]')
FROM (${taskSelectorClause}
FROM tasks t
@@ -1095,6 +1121,11 @@ export default class ReportingMembersController extends ReportingControllerBase
const body = {
team_member_name: data.team_member_name,
groups: [
{
name: "Total Tasks",
color_code: "#7590c9",
tasks: data.total ? data.total : 0
},
{
name: "Tasks Assigned",
color_code: "#7590c9",
@@ -1114,7 +1145,7 @@ export default class ReportingMembersController extends ReportingControllerBase
name: "Tasks Ongoing",
color_code: "#7cb5ec",
tasks: data.ongoing ? data.ongoing : 0
}
},
]
};

View File

@@ -0,0 +1,407 @@
import db from "../../config/db";
import { ParsedQs } from "qs";
import HandleExceptions from "../../decorators/handle-exceptions";
import { IWorkLenzRequest } from "../../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../../interfaces/worklenz-response";
import { ServerResponse } from "../../models/server-response";
import { TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA, UNMAPPED } from "../../shared/constants";
import { getColor } from "../../shared/utils";
import moment, { Moment } from "moment";
import momentTime from "moment-timezone";
import WorklenzControllerBase from "../worklenz-controller-base";
interface IDateUnions {
date_union: {
start_date: string | null;
end_date: string | null;
},
logs_date_union: {
start_date: string | null;
end_date: string | null;
},
allocated_date_union: {
start_date: string | null;
end_date: string | null;
}
}
interface IDatesPair {
start_date: string | null,
end_date: string | null
}
export default class ScheduleControllerV2 extends WorklenzControllerBase {
@HandleExceptions()
public static async getSettings(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
// get organization working days
const getDataq = `SELECT organization_id, array_agg(initcap(day)) AS working_days
FROM (
SELECT organization_id,
unnest(ARRAY['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']) AS day,
unnest(ARRAY[monday, tuesday, wednesday, thursday, friday, saturday, sunday]) AS is_working
FROM public.organization_working_days
WHERE organization_id IN (
SELECT id FROM organizations
WHERE user_id = $1
)
) t
WHERE t.is_working
GROUP BY organization_id LIMIT 1;`;
const workingDaysResults = await db.query(getDataq, [req.user?.owner_id]);
const [workingDays] = workingDaysResults.rows;
// get organization working hours
const getDataHoursq = `SELECT working_hours FROM organizations WHERE user_id = $1 GROUP BY id LIMIT 1;`;
const workingHoursResults = await db.query(getDataHoursq, [req.user?.owner_id]);
const [workingHours] = workingHoursResults.rows;
return res.status(200).send(new ServerResponse(true, { workingDays: workingDays?.working_days, workingHours: workingHours?.working_hours }));
}
@HandleExceptions()
public static async updateSettings(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { workingDays, workingHours } = req.body;
// Days of the week
const days = ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday"];
// Generate the SET clause dynamically
const setClause = days
.map(day => `${day.toLowerCase()} = ${workingDays.includes(day)}`)
.join(", ");
const updateQuery = `
UPDATE public.organization_working_days
SET ${setClause}, updated_at = CURRENT_TIMESTAMP
WHERE organization_id IN (
SELECT organization_id FROM organizations
WHERE user_id = $1
);
`;
await db.query(updateQuery, [req.user?.owner_id]);
const getDataHoursq = `UPDATE organizations SET working_hours = $1 WHERE user_id = $2;`;
await db.query(getDataHoursq, [workingHours, req.user?.owner_id]);
return res.status(200).send(new ServerResponse(true, {}));
}
@HandleExceptions()
public static async getDates(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { date, type } = req.params;
if (type === "week") {
const getDataq = `WITH input_date AS (
SELECT
$1::DATE AS given_date,
(SELECT id FROM organizations WHERE user_id=$2 LIMIT 1) AS organization_id
),
week_range AS (
SELECT
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7)::DATE AS start_date, -- Current week start date
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7 + 6)::DATE AS end_date, -- Current week end date
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7 + 7)::DATE AS next_week_start, -- Next week start date
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7 + 13)::DATE AS next_week_end, -- Next week end date
TO_CHAR(given_date, 'Mon YYYY') AS month_year, -- Format the month as 'Jan 2025'
EXTRACT(DAY FROM given_date) AS day_number, -- Extract the day from the date
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7)::DATE AS chart_start, -- First week start date
(given_date - (EXTRACT(DOW FROM given_date)::INT + 6) % 7 + 13)::DATE AS chart_end, -- Second week end date
CURRENT_DATE::DATE AS today,
organization_id
FROM input_date
),
org_working_days AS (
SELECT
organization_id,
monday, tuesday, wednesday, thursday, friday, saturday, sunday
FROM organization_working_days
WHERE organization_id = (SELECT organization_id FROM week_range)
),
days AS (
SELECT
generate_series((SELECT start_date FROM week_range), (SELECT next_week_end FROM week_range), '1 day'::INTERVAL)::DATE AS date
),
formatted_days AS (
SELECT
d.date,
TO_CHAR(d.date, 'Dy') AS day_name,
EXTRACT(DAY FROM d.date) AS day,
TO_CHAR(d.date, 'Mon YYYY') AS month, -- Format the month as 'Jan 2025'
CASE
WHEN EXTRACT(DOW FROM d.date) = 0 THEN (SELECT sunday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 1 THEN (SELECT monday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 2 THEN (SELECT tuesday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 3 THEN (SELECT wednesday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 4 THEN (SELECT thursday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 5 THEN (SELECT friday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 6 THEN (SELECT saturday FROM org_working_days)
END AS is_weekend,
CASE WHEN d.date = (SELECT today FROM week_range) THEN TRUE ELSE FALSE END AS is_today
FROM days d
),
aggregated_days AS (
SELECT
jsonb_agg(
jsonb_build_object(
'day', day,
'month', month, -- Include formatted month
'name', day_name,
'isWeekend', NOT is_weekend,
'isToday', is_today
) ORDER BY date
) AS days_json
FROM formatted_days
)
SELECT jsonb_build_object(
'date_data', jsonb_agg(
jsonb_build_object(
'month', (SELECT month_year FROM week_range), -- Formatted month-year (e.g., Jan 2025)
'day', (SELECT day_number FROM week_range), -- Dynamic day number
'weeks', '[]', -- Empty weeks array for now
'days', (SELECT days_json FROM aggregated_days) -- Aggregated days data
)
),
'chart_start', (SELECT chart_start FROM week_range), -- First week start date
'chart_end', (SELECT chart_end FROM week_range) -- Second week end date
) AS result_json;`;
const results = await db.query(getDataq, [date, req.user?.owner_id]);
const [data] = results.rows;
return res.status(200).send(new ServerResponse(true, data.result_json));
} else if (type === "month") {
const getDataq = `WITH params AS (
SELECT
DATE_TRUNC('month', $1::DATE)::DATE AS start_date, -- First day of the month
(DATE_TRUNC('month', $1::DATE) + INTERVAL '1 month' - INTERVAL '1 day')::DATE AS end_date, -- Last day of the month
CURRENT_DATE::DATE AS today,
(SELECT id FROM organizations WHERE user_id = $2 LIMIT 1) AS org_id
),
days AS (
SELECT
generate_series(
(SELECT start_date FROM params),
(SELECT end_date FROM params),
'1 day'::INTERVAL
)::DATE AS date
),
org_working_days AS (
SELECT
monday, tuesday, wednesday, thursday, friday, saturday, sunday
FROM organization_working_days
WHERE organization_id = (SELECT org_id FROM params)
LIMIT 1
),
formatted_days AS (
SELECT
d.date,
TO_CHAR(d.date, 'Dy') AS day_name,
EXTRACT(DAY FROM d.date) AS day,
-- Dynamically check if the day is a weekend based on the organization's settings
CASE
WHEN EXTRACT(DOW FROM d.date) = 0 THEN NOT (SELECT sunday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 1 THEN NOT (SELECT monday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 2 THEN NOT (SELECT tuesday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 3 THEN NOT (SELECT wednesday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 4 THEN NOT (SELECT thursday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 5 THEN NOT (SELECT friday FROM org_working_days)
WHEN EXTRACT(DOW FROM d.date) = 6 THEN NOT (SELECT saturday FROM org_working_days)
END AS is_weekend,
CASE WHEN d.date = (SELECT today FROM params) THEN TRUE ELSE FALSE END AS is_today
FROM days d
),
grouped_by_month AS (
SELECT
TO_CHAR(date, 'Mon YYYY') AS month_name,
jsonb_agg(
jsonb_build_object(
'day', day,
'name', day_name,
'isWeekend', is_weekend,
'isToday', is_today
) ORDER BY date
) AS days
FROM formatted_days
GROUP BY month_name
)
SELECT jsonb_build_object(
'date_data', jsonb_agg(
jsonb_build_object(
'month', month_name,
'weeks', '[]'::JSONB, -- Placeholder for weeks data
'days', days
) ORDER BY month_name
),
'chart_start', (SELECT start_date FROM params),
'chart_end', (SELECT end_date FROM params)
) AS result_json
FROM grouped_by_month;`;
const results = await db.query(getDataq, [date, req.user?.owner_id]);
const [data] = results.rows;
return res.status(200).send(new ServerResponse(true, data.result_json));
}
return res.status(200).send(new ServerResponse(true, []));
}
@HandleExceptions()
public static async getOrganizationMembers(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const getDataq = `SELECT DISTINCT ON (users.email)
team_members.id AS team_member_id,
users.id AS id,
users.name AS name,
users.email AS email,
'[]'::JSONB AS projects
FROM team_members
INNER JOIN users ON users.id = team_members.user_id
WHERE team_members.team_id IN (
SELECT id FROM teams
WHERE organization_id IN (
SELECT id FROM organizations
WHERE user_id = $1
LIMIT 1
)
)
ORDER BY users.email ASC, users.name ASC;`;
const results = await db.query(getDataq, [req.user?.owner_id]);
return res.status(200).send(new ServerResponse(true, results.rows));
}
@HandleExceptions()
public static async getOrganizationMemberProjects(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { id } = req.params;
const getDataq = `WITH project_dates AS (
SELECT
pm.project_id,
MIN(pm.allocated_from) AS start_date,
MAX(pm.allocated_to) AS end_date,
MAX(pm.seconds_per_day) / 3600 AS hours_per_day, -- Convert max seconds per day to hours per day
(
-- Calculate total working days between start and end dates
SELECT COUNT(*)
FROM generate_series(MIN(pm.allocated_from), MAX(pm.allocated_to), '1 day'::interval) AS day
JOIN public.organization_working_days owd ON owd.organization_id = t.organization_id
WHERE
(EXTRACT(ISODOW FROM day) = 1 AND owd.monday = true) OR
(EXTRACT(ISODOW FROM day) = 2 AND owd.tuesday = true) OR
(EXTRACT(ISODOW FROM day) = 3 AND owd.wednesday = true) OR
(EXTRACT(ISODOW FROM day) = 4 AND owd.thursday = true) OR
(EXTRACT(ISODOW FROM day) = 5 AND owd.friday = true) OR
(EXTRACT(ISODOW FROM day) = 6 AND owd.saturday = true) OR
(EXTRACT(ISODOW FROM day) = 7 AND owd.sunday = true)
) * (MAX(pm.seconds_per_day) / 3600) AS total_hours -- Multiply by hours per day
FROM public.project_member_allocations pm
JOIN public.projects p ON pm.project_id = p.id
JOIN public.teams t ON p.team_id = t.id
GROUP BY pm.project_id, t.organization_id
),
projects_with_offsets AS (
SELECT
p.name AS project_name,
p.id AS project_id,
COALESCE(pd.hours_per_day, 0) AS hours_per_day, -- Default to 8 if not available in project_member_allocations
COALESCE(pd.total_hours, 0) AS total_hours, -- Calculated total hours based on working days
pd.start_date,
pd.end_date,
p.team_id,
tm.user_id,
-- Calculate indicator_offset dynamically: days difference from earliest project start date * 75px
COALESCE(
(DATE_PART('day', pd.start_date - MIN(pd.start_date) OVER ())) * 75,
0
) AS indicator_offset,
-- Calculate indicator_width as the number of days * 75 pixels per day
COALESCE((DATE_PART('day', pd.end_date - pd.start_date) + 1) * 75, 75) AS indicator_width, -- Fallback to 75 if no dates exist
75 AS min_width -- 75px minimum width for a 1-day project
FROM public.projects p
LEFT JOIN project_dates pd ON p.id = pd.project_id
JOIN public.team_members tm ON tm.team_id = p.team_id
JOIN public.teams t ON p.team_id = t.id
WHERE tm.user_id = $2
AND tm.team_id = $1
ORDER BY pd.start_date, pd.end_date -- Order by start and end date
)
SELECT jsonb_agg(jsonb_build_object(
'name', project_name,
'id', project_id,
'hours_per_day', hours_per_day,
'total_hours', total_hours,
'date_union', jsonb_build_object(
'start', start_date::DATE,
'end', end_date::DATE
),
'indicator_offset', indicator_offset,
'indicator_width', indicator_width,
'tasks', '[]'::jsonb, -- Empty tasks array for now,
'default_values', jsonb_build_object(
'allocated_from', start_date::DATE,
'allocated_to', end_date::DATE,
'seconds_per_day', hours_per_day,
'total_seconds', total_hours
)
)) AS projects
FROM projects_with_offsets;`;
const results = await db.query(getDataq, [req.user?.team_id, id]);
const [data] = results.rows;
return res.status(200).send(new ServerResponse(true, { projects: data.projects, id }));
}
@HandleExceptions()
public static async createSchedule(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { allocated_from, allocated_to, project_id, team_member_id, seconds_per_day } = req.body;
const fromFormat = moment(allocated_from).format("YYYY-MM-DD");
const toFormat = moment(allocated_to).format("YYYY-MM-DD");
const getDataq1 = `
SELECT id
FROM project_member_allocations
WHERE project_id = $1
AND team_member_id = $2
AND (
-- Case 1: The given range starts inside an existing range
($3 BETWEEN allocated_from AND allocated_to)
OR
-- Case 2: The given range ends inside an existing range
($4 BETWEEN allocated_from AND allocated_to)
OR
-- Case 3: The given range fully covers an existing range
(allocated_from BETWEEN $3 AND $4 AND allocated_to BETWEEN $3 AND $4)
OR
-- Case 4: The existing range fully covers the given range
(allocated_from <= $3 AND allocated_to >= $4)
);`;
const results1 = await db.query(getDataq1, [project_id, team_member_id, fromFormat, toFormat]);
const [data] = results1.rows;
if (data) {
return res.status(200).send(new ServerResponse(false, null, "Allocation already exists!"));
}
const getDataq = `INSERT INTO public.project_member_allocations(
project_id, team_member_id, allocated_from, allocated_to, seconds_per_day)
VALUES ($1, $2, $3, $4, $5);`;
const results = await db.query(getDataq, [project_id, team_member_id, allocated_from, allocated_to, Number(seconds_per_day) * 60 * 60]);
return res.status(200).send(new ServerResponse(true, null, "Allocated successfully!"));
}
}

View File

@@ -52,6 +52,83 @@ export default class ScheduleControllerV2 extends ScheduleTasksControllerBase {
private static GLOBAL_START_DATE = moment().format("YYYY-MM-DD");
private static GLOBAL_END_DATE = moment().format("YYYY-MM-DD");
// Migrate data
@HandleExceptions()
public static async migrate(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const getDataq = `SELECT p.id,
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec))), '[]'::JSON)
FROM (SELECT tmiv.team_member_id,
tmiv.user_id,
LEAST(
(SELECT MIN(LEAST(start_date, end_date)) AS start_date
FROM tasks
INNER JOIN tasks_assignees ta ON tasks.id = ta.task_id
WHERE archived IS FALSE
AND project_id = p.id
AND ta.team_member_id = tmiv.team_member_id),
(SELECT MIN(twl.created_at - INTERVAL '1 second' * twl.time_spent) AS ll_start_date
FROM task_work_log twl
INNER JOIN tasks t ON twl.task_id = t.id AND t.archived IS FALSE
WHERE t.project_id = p.id
AND twl.user_id = tmiv.user_id)
) AS lowest_date,
GREATEST(
(SELECT MAX(GREATEST(start_date, end_date)) AS end_date
FROM tasks
INNER JOIN tasks_assignees ta ON tasks.id = ta.task_id
WHERE archived IS FALSE
AND project_id = p.id
AND ta.team_member_id = tmiv.team_member_id),
(SELECT MAX(twl.created_at - INTERVAL '1 second' * twl.time_spent) AS ll_end_date
FROM task_work_log twl
INNER JOIN tasks t ON twl.task_id = t.id AND t.archived IS FALSE
WHERE t.project_id = p.id
AND twl.user_id = tmiv.user_id)
) AS greatest_date
FROM project_members pm
INNER JOIN team_member_info_view tmiv
ON pm.team_member_id = tmiv.team_member_id
WHERE project_id = p.id) rec) AS members
FROM projects p
WHERE team_id IS NOT NULL
AND p.id NOT IN (SELECT project_id FROM archived_projects)`;
const projectMembersResults = await db.query(getDataq);
const projectMemberData = projectMembersResults.rows;
const arrayToInsert = [];
for (const data of projectMemberData) {
if (data.members.length) {
for (const member of data.members) {
const body = {
project_id: data.id,
team_member_id: member.team_member_id,
allocated_from: member.lowest_date ? member.lowest_date : null,
allocated_to: member.greatest_date ? member.greatest_date : null
};
if (body.allocated_from && body.allocated_to) arrayToInsert.push(body);
}
}
}
const insertArray = JSON.stringify(arrayToInsert);
const insertFunctionCall = `SELECT migrate_member_allocations($1)`;
await db.query(insertFunctionCall, [insertArray]);
return res.status(200).send(new ServerResponse(true, ""));
}
private static async getFirstLastDates(teamId: string, userId: string) {
const q = `SELECT MIN(LEAST(allocated_from, allocated_to)) AS start_date,
MAX(GREATEST(allocated_from, allocated_to)) AS end_date,

View File

@@ -5,7 +5,7 @@ import {IWorkLenzResponse} from "../interfaces/worklenz-response";
import db from "../config/db";
import {ServerResponse} from "../models/server-response";
import {PriorityColorCodes, TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA} from "../shared/constants";
import {PriorityColorCodes, PriorityColorCodesDark, TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA} from "../shared/constants";
import {getColor} from "../shared/utils";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
@@ -33,6 +33,7 @@ export default class SubTasksController extends WorklenzControllerBase {
(ts.name) AS status_name,
TRUE AS is_sub_task,
(tsc.color_code) AS status_color,
(tsc.color_code_dark) AS status_color_dark,
(SELECT name FROM projects WHERE id = t.project_id) AS project_name,
(SELECT value FROM task_priorities WHERE id = t.priority_id) AS priority_value,
total_minutes,
@@ -46,11 +47,12 @@ export default class SubTasksController extends WorklenzControllerBase {
WHERE task_id = t.id
ORDER BY name) r) AS labels,
(SELECT COALESCE(ARRAY_TO_JSON(ARRAY_AGG(ROW_TO_JSON(rec))), '[]'::JSON)
FROM (SELECT task_statuses.id, task_statuses.name, stsc.color_code
FROM (SELECT task_statuses.id, task_statuses.name, stsc.color_code, stsc.color_code_dark
FROM task_statuses
INNER JOIN sys_task_status_categories stsc ON task_statuses.category_id = stsc.id
WHERE project_id = t.project_id
ORDER BY task_statuses.name) rec) AS statuses
ORDER BY task_statuses.name) rec) AS statuses,
t.completed_at
FROM tasks t
INNER JOIN task_statuses ts ON ts.id = t.status_id
INNER JOIN task_priorities tp ON tp.id = t.priority_id
@@ -62,6 +64,7 @@ export default class SubTasksController extends WorklenzControllerBase {
for (const task of result.rows) {
task.priority_color = PriorityColorCodes[task.priority_value] || null;
task.priority_color_dark = PriorityColorCodesDark[task.priority_value] || null;
task.time_spent = {hours: Math.floor(task.total_minutes_spent / 60), minutes: task.total_minutes_spent % 60};
task.time_spent_string = `${task.time_spent.hours}h ${task.time_spent.minutes}m`;
@@ -72,6 +75,7 @@ export default class SubTasksController extends WorklenzControllerBase {
task.labels = this.createTagList(task.labels, 2);
task.status_color = task.status_color + TASK_STATUS_COLOR_ALPHA;
task.status_color_dark = task.status_color_dark + TASK_STATUS_COLOR_ALPHA;
task.priority_color = task.priority_color + TASK_PRIORITY_COLOR_ALPHA;
}

View File

@@ -6,11 +6,13 @@ import { ServerResponse } from "../models/server-response";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
import { NotificationsService } from "../services/notifications/notifications.service";
import { log_error } from "../shared/utils";
import { HTML_TAG_REGEXP } from "../shared/constants";
import { humanFileSize, log_error, megabytesToBytes } from "../shared/utils";
import { HTML_TAG_REGEXP, S3_URL } from "../shared/constants";
import { getBaseUrl } from "../cron_jobs/helpers";
import { ICommentEmailNotification } from "../interfaces/comment-email-notification";
import { sendTaskComment } from "../shared/email-notifications";
import { getRootDir, uploadBase64, getKey, getTaskAttachmentKey, createPresignedUrlWithClient } from "../shared/s3";
import { getFreePlanSettings, getUsedStorage } from "../shared/paddle-utils";
interface ITaskAssignee {
team_member_id: string;
@@ -99,11 +101,134 @@ export default class TaskCommentsController extends WorklenzControllerBase {
public static async create(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
req.body.user_id = req.user?.id;
req.body.team_id = req.user?.team_id;
const {mentions} = req.body;
const { mentions, attachments, task_id } = req.body;
const url = `${S3_URL}/${getRootDir()}`;
let commentContent = req.body.content;
if (mentions.length > 0) {
commentContent = await this.replaceContent(commentContent, mentions);
commentContent = this.replaceContent(commentContent, mentions);
}
req.body.content = commentContent;
const q = `SELECT create_task_comment($1) AS comment;`;
const result = await db.query(q, [JSON.stringify(req.body)]);
const [data] = result.rows;
const response = data.comment;
const commentId = response.id;
if (attachments.length !== 0) {
for (const attachment of attachments) {
const q = `
INSERT INTO task_comment_attachments (name, type, size, task_id, comment_id, team_id, project_id)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING id, name, type, task_id, comment_id, created_at,
CONCAT($8::TEXT, '/', team_id, '/', project_id, '/', task_id, '/', comment_id, '/', id, '.', type) AS url;
`;
const result = await db.query(q, [
attachment.file_name,
attachment.file_name.split(".").pop(),
attachment.size,
task_id,
commentId,
req.user?.team_id,
attachment.project_id,
url
]);
const [data] = result.rows;
const s3Url = await uploadBase64(attachment.file, getTaskAttachmentKey(req.user?.team_id as string, attachment.project_id, task_id, commentId, data.id, data.type));
if (!data?.id || !s3Url)
return res.status(200).send(new ServerResponse(false, null, "Attachment upload failed"));
}
}
const mentionMessage = `<b>${req.user?.name}</b> has mentioned you in a comment on <b>${response.task_name}</b> (${response.team_name})`;
// const mentions = [...new Set(req.body.mentions || [])] as string[]; // remove duplicates
const assignees = await getAssignees(req.body.task_id);
const commentMessage = `<b>${req.user?.name}</b> added a comment on <b>${response.task_name}</b> (${response.team_name})`;
for (const member of assignees || []) {
if (member.user_id && member.user_id === req.user?.id) continue;
void NotificationsService.createNotification({
userId: member.user_id,
teamId: req.user?.team_id as string,
socketId: member.socket_id,
message: commentMessage,
taskId: req.body.task_id,
projectId: response.project_id
});
if (member.email_notifications_enabled)
await this.sendMail({
message: commentMessage,
receiverEmail: member.email,
receiverName: member.name,
content: req.body.content,
commentId: response.id,
projectId: response.project_id,
taskId: req.body.task_id,
teamName: response.team_name,
projectName: response.project_name,
taskName: response.task_name
});
}
const senderUserId = req.user?.id as string;
for (const mention of mentions) {
if (mention) {
const member = await this.getUserDataByTeamMemberId(senderUserId, mention.team_member_id, response.project_id);
if (member) {
NotificationsService.sendNotification({
team: member.team,
receiver_socket_id: member.socket_id,
message: mentionMessage,
task_id: req.body.task_id,
project_id: response.project_id,
project: member.project,
project_color: member.project_color,
team_id: req.user?.team_id as string
});
if (member.email_notifications_enabled)
await this.sendMail({
message: mentionMessage,
receiverEmail: member.email,
receiverName: member.user_name,
content: req.body.content,
commentId: response.id,
projectId: response.project_id,
taskId: req.body.task_id,
teamName: response.team_name,
projectName: response.project_name,
taskName: response.task_name
});
}
}
}
return res.status(200).send(new ServerResponse(true, data.comment));
}
@HandleExceptions()
public static async update(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
req.body.user_id = req.user?.id;
req.body.team_id = req.user?.team_id;
const { mentions, comment_id } = req.body;
let commentContent = req.body.content;
if (mentions.length > 0) {
commentContent = await this.replaceContent(commentContent, mentions);
}
req.body.content = commentContent;
@@ -210,46 +335,90 @@ export default class TaskCommentsController extends WorklenzControllerBase {
@HandleExceptions()
public static async getByTaskId(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `
SELECT task_comments.id,
tc.text_content AS content,
task_comments.user_id,
task_comments.team_member_id,
(SELECT name FROM team_member_info_view WHERE team_member_info_view.team_member_id = tm.id) AS member_name,
u.avatar_url,
task_comments.created_at,
(SELECT COALESCE(JSON_AGG(rec), '[]'::JSON)
FROM (SELECT tmiv.name AS user_name,
tmiv.email AS user_email
FROM task_comment_mentions tcm
LEFT JOIN team_member_info_view tmiv ON tcm.informed_by = tmiv.team_member_id
WHERE tcm.comment_id = task_comments.id) rec) AS mentions
FROM task_comments
INNER JOIN task_comment_contents tc ON task_comments.id = tc.comment_id
INNER JOIN team_members tm ON task_comments.team_member_id = tm.id
LEFT JOIN users u ON tm.user_id = u.id
WHERE task_comments.task_id = $1
ORDER BY task_comments.created_at DESC;
`;
const result = await db.query(q, [req.params.id]); // task id
const result = await TaskCommentsController.getTaskComments(req.params.id); // task id
return res.status(200).send(new ServerResponse(true, result.rows));
}
private static async getTaskComments(taskId: string) {
const url = `${S3_URL}/${getRootDir()}`;
const q = `SELECT task_comments.id,
tc.text_content AS content,
task_comments.user_id,
task_comments.team_member_id,
(SELECT name FROM team_member_info_view WHERE team_member_info_view.team_member_id = tm.id) AS member_name,
u.avatar_url,
task_comments.created_at,
(SELECT COALESCE(JSON_AGG(rec), '[]'::JSON)
FROM (SELECT tmiv.name AS user_name,
tmiv.email AS user_email
FROM task_comment_mentions tcm
LEFT JOIN team_member_info_view tmiv ON tcm.informed_by = tmiv.team_member_id
WHERE tcm.comment_id = task_comments.id) rec) AS mentions,
(SELECT JSON_BUILD_OBJECT(
'likes',
JSON_BUILD_OBJECT(
'count', (SELECT COUNT(*)
FROM task_comment_reactions tcr
WHERE tcr.comment_id = task_comments.id
AND reaction_type = 'like'),
'liked_members', COALESCE(
(SELECT JSON_AGG(tmiv.name)
FROM task_comment_reactions tcr
JOIN team_member_info_view tmiv ON tcr.team_member_id = tmiv.team_member_id
WHERE tcr.comment_id = task_comments.id
AND tcr.reaction_type = 'like'),
'[]'::JSON
),
'liked_member_ids', COALESCE(
(SELECT JSON_AGG(tmiv.team_member_id)
FROM task_comment_reactions tcr
JOIN team_member_info_view tmiv ON tcr.team_member_id = tmiv.team_member_id
WHERE tcr.comment_id = task_comments.id
AND tcr.reaction_type = 'like'),
'[]'::JSON
)
)
)) AS reactions,
(SELECT COALESCE(JSON_AGG(rec), '[]'::JSON)
FROM (SELECT id, created_at, name, size, type, (CONCAT('/', team_id, '/', project_id, '/', task_id, '/', comment_id, '/', id, '.', type)) AS url
FROM task_comment_attachments tca
WHERE tca.comment_id = task_comments.id) rec) AS attachments
FROM task_comments
LEFT JOIN task_comment_contents tc ON task_comments.id = tc.comment_id
INNER JOIN team_members tm ON task_comments.team_member_id = tm.id
LEFT JOIN users u ON tm.user_id = u.id
WHERE task_comments.task_id = $1
ORDER BY task_comments.created_at;`;
const result = await db.query(q, [taskId]); // task id
for (const comment of result.rows) {
if (!comment.content) comment.content = "";
comment.rawContent = await comment.content;
comment.content = await comment.content.replace(/\n/g, "</br>");
const {mentions} = comment;
comment.edit = false;
const { mentions } = comment;
if (mentions.length > 0) {
const placeHolders = comment.content.match(/{\d+}/g);
if (placeHolders) {
placeHolders.forEach((placeHolder: { match: (arg0: RegExp) => string[]; }) => {
const index = parseInt(placeHolder.match(/\d+/)[0]);
if (index >= 0 && index < comment.mentions.length) {
comment.content = comment.content.replace(placeHolder, `<span class="mentions"> @${comment.mentions[index].user_name} </span>`);
}
const index = parseInt(placeHolder.match(/\d+/)[0]);
if (index >= 0 && index < comment.mentions.length) {
comment.rawContent = comment.rawContent.replace(placeHolder, `@${comment.mentions[index].user_name}`);
comment.content = comment.content.replace(placeHolder, `<span class="mentions"> @${comment.mentions[index].user_name} </span>`);
}
});
}
}
for (const attachment of comment.attachments) {
attachment.size = humanFileSize(attachment.size);
attachment.url = url + attachment.url;
}
}
return res.status(200).send(new ServerResponse(true, result.rows));
return result;
}
@HandleExceptions()
@@ -262,4 +431,186 @@ export default class TaskCommentsController extends WorklenzControllerBase {
const result = await db.query(q, [req.params.id, req.params.taskId, req.user?.id || null]);
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async deleteAttachmentById(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `DELETE
FROM task_comment_attachments
WHERE id = $1;`;
const result = await db.query(q, [req.params.id]);
return res.status(200).send(new ServerResponse(true, result.rows));
}
private static async checkIfAlreadyExists(commentId: string, teamMemberId: string | undefined, reaction_type: string) {
if (!teamMemberId) return;
try {
const q = `SELECT EXISTS(SELECT 1 FROM task_comment_reactions WHERE comment_id = $1 AND team_member_id = $2 AND reaction_type = $3)`;
const result = await db.query(q, [commentId, teamMemberId, reaction_type]);
const [data] = result.rows;
return data.exists;
} catch (error) {
log_error(error);
}
}
private static async getTaskCommentData(commentId: string) {
if (!commentId) return;
try {
const q = `SELECT tc.user_id,
t.project_id,
t.name AS task_name,
(SELECT team_id FROM projects p WHERE p.id = t.project_id) AS team_id,
(SELECT name FROM teams te WHERE id = (SELECT team_id FROM projects p WHERE p.id = t.project_id)) AS team_name,
(SELECT u.socket_id FROM users u WHERE u.id = tc.user_id) AS socket_id,
(SELECT name FROM team_member_info_view tmiv WHERE tmiv.team_member_id = tcr.team_member_id) AS reactor_name
FROM task_comments tc
LEFT JOIN tasks t ON t.id = tc.task_id
LEFT JOIN task_comment_reactions tcr ON tc.id = tcr.comment_id
WHERE tc.id = $1;`;
const result = await db.query(q, [commentId]);
const [data] = result.rows;
return data;
} catch (error) {
log_error(error);
}
}
@HandleExceptions()
public static async updateReaction(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { id } = req.params;
const { reaction_type, task_id } = req.query;
const exists = await this.checkIfAlreadyExists(id, req.user?.team_member_id, reaction_type as string);
if (exists) {
const deleteQ = `DELETE FROM task_comment_reactions WHERE comment_id = $1 AND team_member_id = $2;`;
await db.query(deleteQ, [id, req.user?.team_member_id]);
} else {
const q = `INSERT INTO task_comment_reactions (comment_id, user_id, team_member_id) VALUES ($1, $2, $3);`;
await db.query(q, [id, req.user?.id, req.user?.team_member_id]);
const getTaskCommentData = await TaskCommentsController.getTaskCommentData(id);
const commentMessage = `<b>${getTaskCommentData.reactor_name}</b> liked your comment on <b>${getTaskCommentData.task_name}</b> (${getTaskCommentData.team_name})`;
if (getTaskCommentData && getTaskCommentData.user_id !== req.user?.id) {
void NotificationsService.createNotification({
userId: getTaskCommentData.user_id,
teamId: req.user?.team_id as string,
socketId: getTaskCommentData.socket_id,
message: commentMessage,
taskId: req.body.task_id,
projectId: getTaskCommentData.project_id
});
}
}
const result = await TaskCommentsController.getTaskComments(task_id as string);
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async createAttachment(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
req.body.user_id = req.user?.id;
req.body.team_id = req.user?.team_id;
const { attachments, task_id } = req.body;
const q = `INSERT INTO task_comments (user_id, team_member_id, task_id)
VALUES ($1, (SELECT id
FROM team_members
WHERE user_id = $1
AND team_id = $2::UUID), $3)
RETURNING id;`;
const result = await db.query(q, [req.user?.id, req.user?.team_id, task_id]);
const [data] = result.rows;
const commentId = data.id;
const url = `${S3_URL}/${getRootDir()}`;
for (const attachment of attachments) {
if (req.user?.subscription_status === "free" && req.user?.owner_id) {
const limits = await getFreePlanSettings();
const usedStorage = await getUsedStorage(req.user?.owner_id);
if ((parseInt(usedStorage) + attachment.size) > megabytesToBytes(parseInt(limits.free_tier_storage))) {
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot exceed ${limits.free_tier_storage}MB of storage.`));
}
}
const q = `
INSERT INTO task_comment_attachments (name, type, size, task_id, comment_id, team_id, project_id)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING id, name, type, task_id, comment_id, created_at,
CONCAT($8::TEXT, '/', team_id, '/', project_id, '/', task_id, '/', comment_id, '/', id, '.', type) AS url;
`;
const result = await db.query(q, [
attachment.file_name,
attachment.size,
attachment.file_name.split(".").pop(),
task_id,
commentId,
req.user?.team_id,
attachment.project_id,
url
]);
const [data] = result.rows;
const s3Url = await uploadBase64(attachment.file, getTaskAttachmentKey(req.user?.team_id as string, attachment.project_id, task_id, commentId, data.id, data.type));
if (!data?.id || !s3Url)
return res.status(200).send(new ServerResponse(false, null, "Attachment upload failed"));
}
const assignees = await getAssignees(task_id);
const commentMessage = `<b>${req.user?.name}</b> added a new attachment as a comment on <b>${commentId.task_name}</b> (${commentId.team_name})`;
for (const member of assignees || []) {
if (member.user_id && member.user_id === req.user?.id) continue;
void NotificationsService.createNotification({
userId: member.user_id,
teamId: req.user?.team_id as string,
socketId: member.socket_id,
message: commentMessage,
taskId: task_id,
projectId: commentId.project_id
});
if (member.email_notifications_enabled)
await this.sendMail({
message: commentMessage,
receiverEmail: member.email,
receiverName: member.name,
content: req.body.content,
commentId: commentId.id,
projectId: commentId.project_id,
taskId: task_id,
teamName: commentId.team_name,
projectName: commentId.project_name,
taskName: commentId.task_name
});
}
return res.status(200).send(new ServerResponse(true, []));
}
@HandleExceptions()
public static async download(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT CONCAT($2::TEXT, '/', team_id, '/', project_id, '/', task_id, '/', comment_id, '/', id, '.', type) AS key
FROM task_comment_attachments
WHERE id = $1;`;
const result = await db.query(q, [req.query.id, getRootDir()]);
const [data] = result.rows;
if (data?.key) {
const url = await createPresignedUrlWithClient(data.key, req.query.file as string);
return res.status(200).send(new ServerResponse(true, url));
}
return res.status(200).send(new ServerResponse(true, null));
}
}

View File

@@ -0,0 +1,52 @@
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
import db from "../config/db";
import { ServerResponse } from "../models/server-response";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
export default class TaskdependenciesController extends WorklenzControllerBase {
@HandleExceptions({
raisedExceptions: {
"DEPENDENCY_EXISTS": `Task dependency already exists.`
}
})
public static async saveTaskDependency(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const {task_id, related_task_id, dependency_type } = req.body;
const q = `SELECT insert_task_dependency($1, $2, $3);`;
const result = await db.query(q, [task_id, related_task_id, dependency_type]);
return res.status(200).send(new ServerResponse(true, result.rows));
}
@HandleExceptions()
public static async getTaskDependencies(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { id } = req.params;
const q = `SELECT
td.id,
t2.name AS task_name,
td.dependency_type,
CONCAT(p.key, '-', t2.task_no) AS task_key
FROM
task_dependencies td
LEFT JOIN
tasks t ON td.task_id = t.id
LEFT JOIN
tasks t2 ON td.related_task_id = t2.id
LEFT JOIN
projects p ON t.project_id = p.id
WHERE
td.task_id = $1;`;
const result = await db.query(q, [id]);
return res.status(200).send(new ServerResponse(true, result.rows));
}
public static async deleteById(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const {id} = req.params;
const q = `DELETE FROM task_dependencies WHERE id = $1;`;
const result = await db.query(q, [id]);
return res.status(200).send(new ServerResponse(true, result.rows));
}
}

View File

@@ -32,8 +32,9 @@ export default class TaskListColumnsController extends WorklenzControllerBase {
const q = `UPDATE project_task_list_cols
SET pinned = $3
WHERE project_id = $1
AND key = $2;`;
AND key = $2 RETURNING *;`;
const result = await db.query(q, [req.params.id, req.body.key, !!req.body.pinned]);
return res.status(200).send(new ServerResponse(true, result.rows));
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data));
}
}

View File

@@ -5,15 +5,17 @@ import db from "../config/db";
import {ServerResponse} from "../models/server-response";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
import {PriorityColorCodes} from "../shared/constants";
import {PriorityColorCodes, PriorityColorCodesDark} from "../shared/constants";
export default class TaskPrioritiesController extends WorklenzControllerBase {
@HandleExceptions()
public static async get(_req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT id, name, value From task_priorities ORDER BY value;`;
const result = await db.query(q, []);
for (const item of result.rows)
for (const item of result.rows) {
item.color_code = PriorityColorCodes[item.value] || PriorityColorCodes["0"];
item.color_code_dark = PriorityColorCodesDark[item.value] || PriorityColorCodesDark["0"];
}
return res.status(200).send(new ServerResponse(true, result.rows));
}

View File

@@ -0,0 +1,108 @@
import db from "../config/db";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
import { ServerResponse } from "../models/server-response";
import { calculateNextEndDate, log_error } from "../shared/utils";
export default class TaskRecurringController extends WorklenzControllerBase {
@HandleExceptions()
public static async getById(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { id } = req.params;
const q = `SELECT id,
schedule_type,
days_of_week,
date_of_month,
day_of_month,
week_of_month,
interval_days,
interval_weeks,
interval_months,
created_at
FROM task_recurring_schedules WHERE id = $1;`;
const result = await db.query(q, [id]);
const [data] = result.rows;
return res.status(200).send(new ServerResponse(true, data));
}
private static async insertTaskRecurringTemplate(taskId: string, scheduleId: string) {
const q = `SELECT create_recurring_task_template($1, $2);`;
await db.query(q, [taskId, scheduleId]);
}
@HandleExceptions()
public static async createTaskSchedule(taskId: string) {
const q = `INSERT INTO task_recurring_schedules (schedule_type) VALUES ('daily') RETURNING id, schedule_type;`;
const result = await db.query(q, []);
const [data] = result.rows;
const updateQ = `UPDATE tasks SET schedule_id = $1 WHERE id = $2;`;
await db.query(updateQ, [data.id, taskId]);
await TaskRecurringController.insertTaskRecurringTemplate(taskId, data.id);
return data;
}
@HandleExceptions()
public static async removeTaskSchedule(scheduleId: string) {
const deleteQ = `DELETE FROM task_recurring_schedules WHERE id = $1;`;
await db.query(deleteQ, [scheduleId]);
}
@HandleExceptions()
public static async updateSchedule(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { id } = req.params;
const { schedule_type, days_of_week, day_of_month, week_of_month, interval_days, interval_weeks, interval_months, date_of_month } = req.body;
const deleteQ = `UPDATE task_recurring_schedules
SET schedule_type = $1,
days_of_week = $2,
date_of_month = $3,
day_of_month = $4,
week_of_month = $5,
interval_days = $6,
interval_weeks = $7,
interval_months = $8
WHERE id = $9;`;
await db.query(deleteQ, [schedule_type, days_of_week, date_of_month, day_of_month, week_of_month, interval_days, interval_weeks, interval_months, id]);
return res.status(200).send(new ServerResponse(true, null));
}
// Function to create the next task in the recurring schedule
private static async createNextRecurringTask(scheduleId: string, lastTask: any, taskTemplate: any) {
try {
const q = "SELECT * FROM task_recurring_schedules WHERE id = $1";
const { rows: schedules } = await db.query(q, [scheduleId]);
if (schedules.length === 0) {
log_error("No schedule found");
return;
}
const [schedule] = schedules;
// Define the next start date based on the schedule
const nextStartDate = calculateNextEndDate(schedule, lastTask.start_date);
const result = await db.query(
`INSERT INTO tasks (name, start_date, end_date, priority_id, project_id, reporter_id, description, total_minutes, status_id, schedule_id)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15) RETURNING id;`,
[
taskTemplate.name, nextStartDate, null, taskTemplate.priority_id,
lastTask.project_id, lastTask.reporter_id, taskTemplate.description,
0, taskTemplate.status_id, scheduleId
]
);
const [data] = result.rows;
log_error(`Next task created with id: ${data.id}`);
} catch (error) {
log_error("Error creating next recurring task:", error);
}
}
}

View File

@@ -54,7 +54,7 @@ export default class TaskStatusesController extends WorklenzControllerBase {
@HandleExceptions()
public static async getCategories(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT id, name, color_code, description
const q = `SELECT id, name, color_code, color_code_dark, description
FROM sys_task_status_categories
ORDER BY index;`;
const result = await db.query(q, []);
@@ -73,7 +73,7 @@ export default class TaskStatusesController extends WorklenzControllerBase {
@HandleExceptions()
public static async getById(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `
SELECT task_statuses.id, task_statuses.name, stsc.color_code
SELECT task_statuses.id, task_statuses.name, stsc.color_code, stsc.color_code_dark
FROM task_statuses
INNER JOIN sys_task_status_categories stsc ON task_statuses.category_id = stsc.id
WHERE task_statuses.id = $1
@@ -113,7 +113,7 @@ export default class TaskStatusesController extends WorklenzControllerBase {
category_id = COALESCE($4, (SELECT id FROM sys_task_status_categories WHERE is_todo IS TRUE))
WHERE id = $1
AND project_id = $3
RETURNING (SELECT color_code FROM sys_task_status_categories WHERE id = task_statuses.category_id);
RETURNING (SELECT color_code FROM sys_task_status_categories WHERE id = task_statuses.category_id), (SELECT color_code_dark FROM sys_task_status_categories WHERE id = task_statuses.category_id);
`;
const result = await db.query(q, [req.params.id, req.body.name, req.body.project_id, req.body.category_id]);
const [data] = result.rows;

View File

@@ -234,4 +234,25 @@ export default class TaskWorklogController extends WorklenzControllerBase {
res.end();
});
}
@HandleExceptions()
public static async getAllRunningTimers(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `SELECT
tt.task_id,
tt.start_time,
t1.name AS task_name,
pr.id AS project_id,
pr.name AS project_name,
t1.parent_task_id,
t2.name AS parent_task_name
FROM task_timers tt
LEFT JOIN public.tasks t1 ON tt.task_id = t1.id
LEFT JOIN public.tasks t2 ON t1.parent_task_id = t2.id -- Optimized join for parent task name
INNER JOIN projects pr ON t1.project_id = pr.id -- INNER JOIN ensures project-team match
WHERE tt.user_id = $1
AND pr.team_id = $2;`;
const params = [req.user?.id, req.user?.team_id];
const result = await db.query(q, params);
return res.status(200).send(new ServerResponse(true, result.rows));
}
}

View File

@@ -73,8 +73,8 @@ export default class TasksControllerBase extends WorklenzControllerBase {
if (task.timer_start_time)
task.timer_start_time = moment(task.timer_start_time).valueOf();
const totalCompleted = +task.completed_sub_tasks + +task.parent_task_completed;
const totalTasks = +task.sub_tasks_count + 1; // +1 for parent
const totalCompleted = (+task.completed_sub_tasks + +task.parent_task_completed) || 0;
const totalTasks = +task.sub_tasks_count || 0; // if needed add +1 for parent
task.complete_ratio = TasksControllerBase.calculateTaskCompleteRatio(totalCompleted, totalTasks);
task.completed_count = totalCompleted;
task.total_tasks_count = totalTasks;

View File

@@ -1,18 +1,19 @@
import {ParsedQs} from "qs";
import { ParsedQs } from "qs";
import db from "../config/db";
import HandleExceptions from "../decorators/handle-exceptions";
import {IWorkLenzRequest} from "../interfaces/worklenz-request";
import {IWorkLenzResponse} from "../interfaces/worklenz-response";
import {ServerResponse} from "../models/server-response";
import {TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA, UNMAPPED} from "../shared/constants";
import {getColor} from "../shared/utils";
import TasksControllerBase, {GroupBy, ITaskGroup} from "./tasks-controller-base";
import { IWorkLenzRequest } from "../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../interfaces/worklenz-response";
import { ServerResponse } from "../models/server-response";
import { TASK_PRIORITY_COLOR_ALPHA, TASK_STATUS_COLOR_ALPHA, UNMAPPED } from "../shared/constants";
import { getColor, log_error } from "../shared/utils";
import TasksControllerBase, { GroupBy, ITaskGroup } from "./tasks-controller-base";
export class TaskListGroup implements ITaskGroup {
name: string;
category_id: string | null;
color_code: string;
color_code_dark: string;
start_date?: string;
end_date?: string;
todo_progress: number;
@@ -26,6 +27,7 @@ export class TaskListGroup implements ITaskGroup {
this.start_date = group.start_date || null;
this.end_date = group.end_date || null;
this.color_code = group.color_code + TASK_STATUS_COLOR_ALPHA;
this.color_code_dark = group.color_code_dark;
this.todo_progress = 0;
this.doing_progress = 0;
this.done_progress = 0;
@@ -104,7 +106,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
private static getQuery(userId: string, options: ParsedQs) {
const searchField = options.search ? "t.name" : "sort_order";
const {searchQuery, sortField} = TasksControllerV2.toPaginationOptions(options, searchField);
const { searchQuery, sortField } = TasksControllerV2.toPaginationOptions(options, searchField);
const isSubTasks = !!options.parent_task;
@@ -124,6 +126,33 @@ export default class TasksControllerV2 extends TasksControllerBase {
const filterByAssignee = TasksControllerV2.getFilterByAssignee(options.filterBy as string);
// Returns statuses of each task as a json array if filterBy === "member"
const statusesQuery = TasksControllerV2.getStatusesQuery(options.filterBy as string);
// Custom columns data query
const customColumnsQuery = options.customColumns
? `, (SELECT COALESCE(
jsonb_object_agg(
custom_cols.key,
custom_cols.value
),
'{}'::JSONB
)
FROM (
SELECT
cc.key,
CASE
WHEN ccv.text_value IS NOT NULL THEN to_jsonb(ccv.text_value)
WHEN ccv.number_value IS NOT NULL THEN to_jsonb(ccv.number_value)
WHEN ccv.boolean_value IS NOT NULL THEN to_jsonb(ccv.boolean_value)
WHEN ccv.date_value IS NOT NULL THEN to_jsonb(ccv.date_value)
WHEN ccv.json_value IS NOT NULL THEN ccv.json_value
ELSE NULL::JSONB
END AS value
FROM cc_column_values ccv
JOIN cc_custom_columns cc ON ccv.column_id = cc.id
WHERE ccv.task_id = t.id
) AS custom_cols
WHERE custom_cols.value IS NOT NULL) AS custom_column_values`
: "";
const archivedFilter = options.archived === "true" ? "archived IS TRUE" : "archived IS FALSE";
@@ -173,7 +202,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
WHERE id = (SELECT phase_id FROM task_phase WHERE task_id = t.id)) AS phase_color_code,
(EXISTS(SELECT 1 FROM task_subscribers WHERE task_id = t.id)) AS has_subscribers,
(EXISTS(SELECT 1 FROM task_dependencies td WHERE td.task_id = t.id)) AS has_dependencies,
(SELECT start_time
FROM task_timers
WHERE task_id = t.id
@@ -183,6 +212,10 @@ export default class TasksControllerV2 extends TasksControllerBase {
FROM sys_task_status_categories
WHERE id = (SELECT category_id FROM task_statuses WHERE id = t.status_id)) AS status_color,
(SELECT color_code_dark
FROM sys_task_status_categories
WHERE id = (SELECT category_id FROM task_statuses WHERE id = t.status_id)) AS status_color_dark,
(SELECT COALESCE(ROW_TO_JSON(r), '{}'::JSON)
FROM (SELECT is_done, is_doing, is_todo
FROM sys_task_status_categories
@@ -209,7 +242,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
(SELECT color_code FROM team_labels WHERE id = task_labels.label_id)
FROM task_labels
WHERE task_id = t.id) r) AS labels,
(SELECT is_completed(status_id, project_id)) AS is_complete,
(SELECT name FROM users WHERE id = t.reporter_id) AS reporter,
(SELECT id FROM task_priorities WHERE id = t.priority_id) AS priority,
(SELECT value FROM task_priorities WHERE id = t.priority_id) AS priority_value,
@@ -219,7 +252,9 @@ export default class TasksControllerV2 extends TasksControllerBase {
updated_at,
completed_at,
start_date,
END_DATE ${statusesQuery}
billable,
schedule_id,
END_DATE ${customColumnsQuery} ${statusesQuery}
FROM tasks t
WHERE ${filters} ${searchQuery}
ORDER BY ${sortFields}
@@ -235,6 +270,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
SELECT id,
name,
(SELECT color_code FROM sys_task_status_categories WHERE id = task_statuses.category_id),
(SELECT color_code_dark FROM sys_task_status_categories WHERE id = task_statuses.category_id),
category_id
FROM task_statuses
WHERE project_id = $1
@@ -243,7 +279,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
params = [projectId];
break;
case GroupBy.PRIORITY:
q = `SELECT id, name, color_code
q = `SELECT id, name, color_code, color_code_dark
FROM task_priorities
ORDER BY value DESC;`;
break;
@@ -261,7 +297,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
break;
case GroupBy.PHASE:
q = `
SELECT id, name, color_code, start_date, end_date, sort_index
SELECT id, name, color_code, color_code AS color_code_dark, start_date, end_date, sort_index
FROM project_phases
WHERE project_id = $1
ORDER BY sort_index DESC;
@@ -281,6 +317,9 @@ export default class TasksControllerV2 extends TasksControllerBase {
public static async getList(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const isSubTasks = !!req.query.parent_task;
const groupBy = (req.query.group || GroupBy.STATUS) as string;
// Add customColumns flag to query params
req.query.customColumns = "true";
const q = TasksControllerV2.getQuery(req.user?.id as string, req.query);
const params = isSubTasks ? [req.params.id || null, req.query.parent_task] : [req.params.id || null];
@@ -356,6 +395,10 @@ export default class TasksControllerV2 extends TasksControllerBase {
@HandleExceptions()
public static async getTasksOnly(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const isSubTasks = !!req.query.parent_task;
// Add customColumns flag to query params
req.query.customColumns = "true";
const q = TasksControllerV2.getQuery(req.user?.id as string, req.query);
const params = isSubTasks ? [req.params.id || null, req.query.parent_task] : [req.params.id || null];
const result = await db.query(q, params);
@@ -393,7 +436,7 @@ export default class TasksControllerV2 extends TasksControllerBase {
@HandleExceptions()
public static async getNewKanbanTask(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const {id} = req.params;
const { id } = req.params;
const result = await db.query("SELECT get_single_task($1) AS task;", [id]);
const [data] = result.rows;
const task = TasksControllerV2.updateTaskViewModel(data.task);
@@ -474,9 +517,211 @@ export default class TasksControllerV2 extends TasksControllerBase {
}
public static async getTasksByName(searchString: string, projectId: string, taskId: string) {
const q = `SELECT id AS value ,
name AS label,
CONCAT((SELECT key FROM projects WHERE id = t.project_id), '-', task_no) AS task_key
FROM tasks t
WHERE t.name ILIKE '%${searchString}%'
AND t.project_id = $1 AND t.id != $2
LIMIT 15;`;
const result = await db.query(q, [projectId, taskId]);
return result.rows;
}
@HandleExceptions()
public static async getSubscribers(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const subscribers = await this.getTaskSubscribers(req.params.id);
return res.status(200).send(new ServerResponse(true, subscribers));
}
@HandleExceptions()
public static async searchTasks(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { projectId, taskId, searchQuery } = req.query;
const tasks = await this.getTasksByName(searchQuery as string, projectId as string, taskId as string);
return res.status(200).send(new ServerResponse(true, tasks));
}
@HandleExceptions()
public static async getTaskDependencyStatus(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { statusId, taskId } = req.query;
const canContinue = await TasksControllerV2.checkForCompletedDependencies(taskId as string, statusId as string);
return res.status(200).send(new ServerResponse(true, { can_continue: canContinue }));
}
@HandleExceptions()
public static async checkForCompletedDependencies(taskId: string, nextStatusId: string): Promise<IWorkLenzResponse> {
const q = `SELECT
CASE
WHEN EXISTS (
-- Check if the status id is not in the "done" category
SELECT 1
FROM task_statuses ts
WHERE ts.id = $2
AND ts.project_id = (SELECT project_id FROM tasks WHERE id = $1)
AND ts.category_id IN (
SELECT id FROM sys_task_status_categories WHERE is_done IS FALSE
)
) THEN TRUE -- If status is not in the "done" category, continue immediately (TRUE)
WHEN EXISTS (
-- Check if any dependent tasks are not completed
SELECT 1
FROM task_dependencies td
LEFT JOIN public.tasks t ON t.id = td.related_task_id
WHERE td.task_id = $1
AND t.status_id NOT IN (
SELECT id
FROM task_statuses ts
WHERE t.project_id = ts.project_id
AND ts.category_id IN (
SELECT id FROM sys_task_status_categories WHERE is_done IS TRUE
)
)
) THEN FALSE -- If there are incomplete dependent tasks, do not continue (FALSE)
ELSE TRUE -- Continue if no other conditions block the process
END AS can_continue;`;
const result = await db.query(q, [taskId, nextStatusId]);
const [data] = result.rows;
return data.can_continue;
}
public static async getTaskStatusColor(status_id: string) {
try {
const q = `SELECT color_code, color_code_dark
FROM sys_task_status_categories
WHERE id = (SELECT category_id FROM task_statuses WHERE id = $1)`;
const result = await db.query(q, [status_id]);
const [data] = result.rows;
return data;
} catch (e) {
log_error(e);
}
}
@HandleExceptions()
public static async assignLabelsToTask(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const { id } = req.params;
const { labels }: { labels: string[] } = req.body;
labels.forEach(async (label: string) => {
const q = `SELECT add_or_remove_task_label($1, $2) AS labels;`;
await db.query(q, [id, label]);
});
return res.status(200).send(new ServerResponse(true, null, "Labels assigned successfully"));
}
/**
* Updates a custom column value for a task
* @param req The request object
* @param res The response object
*/
@HandleExceptions()
public static async updateCustomColumnValue(
req: IWorkLenzRequest,
res: IWorkLenzResponse
): Promise<IWorkLenzResponse> {
const { taskId } = req.params;
const { column_key, value, project_id } = req.body;
if (!taskId || !column_key || value === undefined || !project_id) {
return res.status(400).send(new ServerResponse(false, "Missing required parameters"));
}
// Get column information
const columnQuery = `
SELECT id, field_type
FROM cc_custom_columns
WHERE project_id = $1 AND key = $2
`;
const columnResult = await db.query(columnQuery, [project_id, column_key]);
if (columnResult.rowCount === 0) {
return res.status(404).send(new ServerResponse(false, "Custom column not found"));
}
const column = columnResult.rows[0];
const columnId = column.id;
const fieldType = column.field_type;
// Determine which value field to use based on the field_type
let textValue = null;
let numberValue = null;
let dateValue = null;
let booleanValue = null;
let jsonValue = null;
switch (fieldType) {
case "number":
numberValue = parseFloat(String(value));
break;
case "date":
dateValue = new Date(String(value));
break;
case "checkbox":
booleanValue = Boolean(value);
break;
case "people":
jsonValue = JSON.stringify(Array.isArray(value) ? value : [value]);
break;
default:
textValue = String(value);
}
// Check if a value already exists
const existingValueQuery = `
SELECT id
FROM cc_column_values
WHERE task_id = $1 AND column_id = $2
`;
const existingValueResult = await db.query(existingValueQuery, [taskId, columnId]);
if (existingValueResult.rowCount && existingValueResult.rowCount > 0) {
// Update existing value
const updateQuery = `
UPDATE cc_column_values
SET text_value = $1,
number_value = $2,
date_value = $3,
boolean_value = $4,
json_value = $5,
updated_at = NOW()
WHERE task_id = $6 AND column_id = $7
`;
await db.query(updateQuery, [
textValue,
numberValue,
dateValue,
booleanValue,
jsonValue,
taskId,
columnId
]);
} else {
// Insert new value
const insertQuery = `
INSERT INTO cc_column_values
(task_id, column_id, text_value, number_value, date_value, boolean_value, json_value, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, NOW(), NOW())
`;
await db.query(insertQuery, [
taskId,
columnId,
textValue,
numberValue,
dateValue,
booleanValue,
jsonValue
]);
}
return res.status(200).send(new ServerResponse(true, {
task_id: taskId,
column_key,
value
}));
}
}

View File

@@ -6,9 +6,9 @@ import { IWorkLenzResponse } from "../interfaces/worklenz-response";
import db from "../config/db";
import { ServerResponse } from "../models/server-response";
import { TASK_STATUS_COLOR_ALPHA } from "../shared/constants";
import { S3_URL, TASK_STATUS_COLOR_ALPHA } from "../shared/constants";
import { getDates, getMinMaxOfTaskDates, getMonthRange, getWeekRange } from "../shared/tasks-controller-utils";
import { getColor, getRandomColorCode, log_error, toMinutes } from "../shared/utils";
import { getColor, getRandomColorCode, humanFileSize, log_error, toMinutes } from "../shared/utils";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
import { NotificationsService } from "../services/notifications/notifications.service";
@@ -18,9 +18,9 @@ import TasksControllerV2 from "./tasks-controller-v2";
import { IO } from "../shared/io";
import { SocketEvents } from "../socket.io/events";
import TasksControllerBase from "./tasks-controller-base";
import { insertToActivityLogs, logStatusChange } from "../services/activity-logs/activity-logs.service";
import { forEach } from "lodash";
import { insertToActivityLogs } from "../services/activity-logs/activity-logs.service";
import { IActivityLog } from "../services/activity-logs/interfaces";
import { getKey, getRootDir, uploadBase64 } from "../shared/s3";
export default class TasksController extends TasksControllerBase {
private static notifyProjectUpdates(socketId: string, projectId: string) {
@@ -29,14 +29,54 @@ export default class TasksController extends TasksControllerBase {
.emit(SocketEvents.PROJECT_UPDATES_AVAILABLE.toString());
}
public static async uploadAttachment(attachments: any, teamId: string, userId: string) {
try {
const promises = attachments.map(async (attachment: any) => {
const { file, file_name, project_id, size } = attachment;
const type = file_name.split(".").pop();
const q = `
INSERT INTO task_attachments (name, task_id, team_id, project_id, uploaded_by, size, type)
VALUES ($1, $2, $3, $4, $5, $6, $7)
RETURNING id, name, size, type, created_at, CONCAT($8::TEXT, '/', team_id, '/', project_id, '/', id, '.', type) AS url;
`;
const result = await db.query(q, [
file_name,
null,
teamId,
project_id,
userId,
size,
type,
`${S3_URL}/${getRootDir()}`
]);
const [data] = result.rows;
await uploadBase64(file, getKey(teamId, project_id, data.id, data.type));
return data.id;
});
const attachmentIds = await Promise.all(promises);
return attachmentIds;
} catch (error) {
log_error(error);
}
}
@HandleExceptions()
public static async create(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const userId = req.user?.id as string;
const teamId = req.user?.team_id as string;
if (req.body.attachments_raw) {
req.body.attachments = await this.uploadAttachment(req.body.attachments_raw, teamId, userId);
}
const q = `SELECT create_task($1) AS task;`;
const result = await db.query(q, [JSON.stringify(req.body)]);
const [data] = result.rows;
const userId = req.user?.id as string;
for (const member of data?.task.assignees || []) {
NotificationsService.createTaskUpdate(
"ASSIGN",
@@ -468,7 +508,7 @@ export default class TasksController extends TasksControllerBase {
TasksController.notifyProjectUpdates(req.user?.socket_id as string, req.query.project as string);
return res.status(200).send(new ServerResponse(true, data));
return res.status(200).send(new ServerResponse(true, { failed_tasks: data.task }));
}
@HandleExceptions()

View File

@@ -13,7 +13,9 @@ import { SocketEvents } from "../socket.io/events";
import WorklenzControllerBase from "./worklenz-controller-base";
import HandleExceptions from "../decorators/handle-exceptions";
import { formatDuration, getColor } from "../shared/utils";
import { TEAM_MEMBER_TREE_MAP_COLOR_ALPHA } from "../shared/constants";
import { statusExclude, TEAM_MEMBER_TREE_MAP_COLOR_ALPHA } from "../shared/constants";
import { checkTeamSubscriptionStatus } from "../shared/paddle-utils";
import { updateUsers } from "../shared/paddle-requests";
import { NotificationsService } from "../services/notifications/notifications.service";
export default class TeamMembersController extends WorklenzControllerBase {
@@ -80,6 +82,98 @@ export default class TeamMembersController extends WorklenzControllerBase {
return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
}
/**
* Checks the subscription status of the team.
* @type {Object} subscriptionData - Object containing subscription information
*/
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id);
let incrementBy = 0;
// Handle self-hosted subscriptions differently
if (subscriptionData.subscription_type === 'SELF_HOSTED') {
// Check if users exist and add them if they don't
await Promise.all(req.body.emails.map(async (email: string) => {
const trimmedEmail = email.trim();
const userExists = await this.checkIfUserAlreadyExists(req.user?.owner_id as string, trimmedEmail);
if (!userExists) {
incrementBy = incrementBy + 1;
}
}));
// Create or invite new members
const newMembers = await this.createOrInviteMembers(req.body, req.user);
return res.status(200).send(new ServerResponse(true, newMembers, `Your teammates will get an email that gives them access to your team.`).withTitle("Invitations sent"));
}
/**
* Iterates through each email in the request body and checks if the user already exists.
* If the user doesn't exist, increments the counter.
* @param {string} email - Email address to check
*/
await Promise.all(req.body.emails.map(async (email: string) => {
const trimmedEmail = email.trim();
const userExists = await this.checkIfUserAlreadyExists(req.user?.owner_id as string, trimmedEmail);
const isUserActive = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, trimmedEmail);
if (!userExists || !isUserActive) {
incrementBy = incrementBy + 1;
}
}));
/**
* Checks various conditions to determine if the maximum number of lifetime users is exceeded.
* Sends a response if the limit is reached.
*/
if (
incrementBy > 0
&& subscriptionData.is_ltd
&& subscriptionData.current_count
&& ((parseInt(subscriptionData.current_count) + req.body.emails.length) > parseInt(subscriptionData.ltd_users))) {
return res.status(200).send(new ServerResponse(false, null, "Cannot exceed the maximum number of life time users."));
}
if (
subscriptionData.is_ltd
&& subscriptionData.current_count
&& ((parseInt(subscriptionData.current_count) + incrementBy) > parseInt(subscriptionData.ltd_users))) {
return res.status(200).send(new ServerResponse(false, null, "Cannot exceed the maximum number of life time users."));
}
/**
* Checks subscription details and updates the user count if applicable.
* Sends a response if there is an issue with the subscription.
*/
// if (!subscriptionData.is_credit && !subscriptionData.is_custom && subscriptionData.subscription_status === "active") {
// const response = await updateUsers(subscriptionData.subscription_id, (subscriptionData.quantity + incrementBy));
// if (!response.body.subscription_id) {
// return res.status(200).send(new ServerResponse(false, null, response.message || "Please check your subscription."));
// }
// }
if (!subscriptionData.is_credit && !subscriptionData.is_custom && subscriptionData.subscription_status === "active") {
const updatedCount = parseInt(subscriptionData.current_count) + incrementBy;
const requiredSeats = updatedCount - subscriptionData.quantity;
if (updatedCount > subscriptionData.quantity) {
const obj = {
seats_enough: false,
required_count: requiredSeats,
current_seat_amount: subscriptionData.quantity
};
return res.status(200).send(new ServerResponse(false, obj, null));
}
}
/**
* Checks if the subscription status is in the exclusion list.
* Sends a response if the status is excluded.
*/
if (statusExclude.includes(subscriptionData.subscription_status)) {
return res.status(200).send(new ServerResponse(false, null, "Unable to add user! Please check your subscription status."));
}
/**
* Creates or invites new members based on the request body and user information.
* Sends a response with the result.
@@ -93,12 +187,24 @@ export default class TeamMembersController extends WorklenzControllerBase {
req.query.field = ["is_owner", "active", "u.name", "u.email"];
req.query.order = "descend";
// Helper function to check for encoded components
function containsEncodedComponents(x: string) {
return decodeURI(x) !== decodeURIComponent(x);
}
// Decode search parameter if it contains encoded components
if (req.query.search && typeof req.query.search === 'string') {
if (containsEncodedComponents(req.query.search)) {
req.query.search = decodeURIComponent(req.query.search);
}
}
const {
searchQuery,
sortField,
sortOrder,
size,
offset
searchQuery,
sortField,
sortOrder,
size,
offset
} = this.toPaginationOptions(req.query, ["u.name", "u.email"], true);
const paginate = req.query.all === "false" ? `LIMIT ${size} OFFSET ${offset}` : "";
@@ -126,7 +232,7 @@ export default class TeamMembersController extends WorklenzControllerBase {
ELSE FALSE END) AS is_owner,
(SELECT email
FROM team_member_info_view
WHERE team_member_info_view.team_member_id = team_members.id),
WHERE team_member_info_view.team_member_id = team_members.id) AS email,
EXISTS(SELECT email
FROM email_invitations
WHERE team_member_id = team_members.id
@@ -277,12 +383,33 @@ export default class TeamMembersController extends WorklenzControllerBase {
if (!id || !req.user?.team_id) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
// check subscription status
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id);
if (statusExclude.includes(subscriptionData.subscription_status)) {
return res.status(200).send(new ServerResponse(false, "Please check your subscription status."));
}
const q = `SELECT remove_team_member($1, $2, $3) AS member;`;
const result = await db.query(q, [id, req.user?.id, req.user?.team_id]);
const [data] = result.rows;
const message = `You have been removed from <b>${req.user?.team_name}</b> by <b>${req.user?.name}</b>`;
// if (subscriptionData.status === "trialing") break;
// if (!subscriptionData.is_credit && !subscriptionData.is_custom) {
// if (subscriptionData.subscription_status === "active" && subscriptionData.quantity > 0) {
// const obj = await getActiveTeamMemberCount(req.user?.owner_id ?? "");
// // const activeObj = await getActiveTeamMemberCount(req.user?.owner_id ?? "");
// const userActiveInOtherTeams = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, req.query?.email as string);
// if (!userActiveInOtherTeams) {
// const response = await updateUsers(subscriptionData.subscription_id, obj.user_count);
// if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
// }
// }
// }
NotificationsService.sendNotification({
receiver_socket_id: data.socket_id,
message,
@@ -871,20 +998,68 @@ export default class TeamMembersController extends WorklenzControllerBase {
public static async toggleMemberActiveStatus(req: IWorkLenzRequest, res: IWorkLenzResponse) {
if (!req.user?.team_id) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
const q1 = `SELECT active FROM team_members WHERE id = $1;`;
const result1 = await db.query(q1, [req.params?.id]);
const [status] = result1.rows;
if (status.active) {
const updateQ1 = `UPDATE users
SET active_team = (SELECT id FROM teams WHERE user_id = users.id ORDER BY created_at DESC LIMIT 1)
WHERE id = (SELECT user_id FROM team_members WHERE id = $1 AND active IS TRUE LIMIT 1);`;
await db.query(updateQ1, [req.params?.id]);
// check subscription status
const subscriptionData = await checkTeamSubscriptionStatus(req.user?.team_id);
if (statusExclude.includes(subscriptionData.subscription_status)) {
return res.status(200).send(new ServerResponse(false, "Please check your subscription status."));
}
const q = `UPDATE team_members SET active = NOT active WHERE id = $1 RETURNING active;`;
const result = await db.query(q, [req.params?.id]);
const [data] = result.rows;
let data: any;
if (req.query.active === "true") {
const q1 = `SELECT active FROM team_members WHERE id = $1;`;
const result1 = await db.query(q1, [req.params?.id]);
const [status] = result1.rows;
if (status.active) {
const updateQ1 = `UPDATE users
SET active_team = (SELECT id FROM teams WHERE user_id = users.id ORDER BY created_at DESC LIMIT 1)
WHERE id = (SELECT user_id FROM team_members WHERE id = $1 AND active IS TRUE LIMIT 1);`;
await db.query(updateQ1, [req.params?.id]);
}
const q = `UPDATE team_members SET active = NOT active WHERE id = $1 RETURNING active;`;
const result = await db.query(q, [req.params?.id]);
data = result.rows[0];
// const userExists = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, req.query?.email as string);
// if (subscriptionData.status === "trialing") break;
// if (!userExists && !subscriptionData.is_credit && !subscriptionData.is_custom) {
// if (subscriptionData.subscription_status === "active" && subscriptionData.quantity > 0) {
// const operator = req.query.active === "true" ? - 1 : + 1;
// const response = await updateUsers(subscriptionData.subscription_id, subscriptionData.quantity + operator);
// if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
// }
// }
} else {
const userExists = await this.checkIfUserActiveInOtherTeams(req.user?.owner_id as string, req.query?.email as string);
// if (subscriptionData.status === "trialing") break;
// if (!userExists && !subscriptionData.is_credit && !subscriptionData.is_custom) {
// if (subscriptionData.subscription_status === "active" && subscriptionData.quantity > 0) {
// const operator = req.query.active === "true" ? - 1 : + 1;
// const response = await updateUsers(subscriptionData.subscription_id, subscriptionData.quantity + operator);
// if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
// }
// }
const q1 = `SELECT active FROM team_members WHERE id = $1;`;
const result1 = await db.query(q1, [req.params?.id]);
const [status] = result1.rows;
if (status.active) {
const updateQ1 = `UPDATE users
SET active_team = (SELECT id FROM teams WHERE user_id = users.id ORDER BY created_at DESC LIMIT 1)
WHERE id = (SELECT user_id FROM team_members WHERE id = $1 AND active IS TRUE LIMIT 1);`;
await db.query(updateQ1, [req.params?.id]);
}
const q = `UPDATE team_members SET active = NOT active WHERE id = $1 RETURNING active;`;
const result = await db.query(q, [req.params?.id]);
data = result.rows[0];
}
return res.status(200).send(new ServerResponse(true, [], `Team member ${data.active ? " activated" : " deactivated"} successfully.`));
}
@@ -899,6 +1074,21 @@ export default class TeamMembersController extends WorklenzControllerBase {
if (!req.body.team_id || !req.user?.id) return res.status(200).send(new ServerResponse(false, "Required fields are missing."));
// check the subscription status
const subscriptionData = await checkTeamSubscriptionStatus(req.body.team_id);
if (statusExclude.includes(subscriptionData.subscription_status)) {
return res.status(200).send(new ServerResponse(false, "Please check your subscription status."));
}
// if (subscriptionData.status === "trialing") break;
if (!subscriptionData.is_credit && !subscriptionData.is_custom) {
if (subscriptionData.subscription_status === "active") {
const response = await updateUsers(subscriptionData.subscription_id, subscriptionData.quantity + (req.body.emails.length || 1));
if (!response.body.subscription_id) return res.status(200).send(new ServerResponse(false, response.message || "Please check your subscription."));
}
}
const newMembers = await this.createOrInviteMembers(req.body, req.user);
return res.status(200).send(new ServerResponse(true, newMembers, `Your teammates will get an email that gives them access to your team.`).withTitle("Invitations sent"));
}

View File

@@ -16,8 +16,8 @@ export default class TimezonesController extends WorklenzControllerBase {
@HandleExceptions()
public static async update(req: IWorkLenzRequest, res: IWorkLenzResponse): Promise<IWorkLenzResponse> {
const q = `UPDATE users SET timezone_id = $2 WHERE id = $1;`;
const result = await db.query(q, [req.user?.id, req.body.timezone]);
return res.status(200).send(new ServerResponse(true, result.rows, "Timezone updated"));
const q = `UPDATE users SET timezone_id = $2, language = $3 WHERE id = $1;`;
const result = await db.query(q, [req.user?.id, req.body.timezone, req.body.language]);
return res.status(200).send(new ServerResponse(true, result.rows, "Updated successfully"));
}
}

View File

@@ -12,8 +12,8 @@ export function mapMembersWithAnd(members: string) {
}
export function getBaseUrl() {
if (isLocalServer()) return `http://${process.env.HOSTNAME}`;
return `https://${process.env.HOSTNAME}`;
if (isLocalServer()) return `http://${process.env.FRONTEND_URL}`;
return `https://${process.env.FRONTEND_URL}`;
}
function mapMembers(project: ITaskAssignmentModelProject) {

View File

@@ -1,9 +1,11 @@
import {startDailyDigestJob} from "./daily-digest-job";
import {startNotificationsJob} from "./notifications-job";
import {startProjectDigestJob} from "./project-digest-job";
import { startRecurringTasksJob } from "./recurring-tasks";
export function startCronJobs() {
startNotificationsJob();
startDailyDigestJob();
startProjectDigestJob();
// startRecurringTasksJob();
}

View File

@@ -7,6 +7,8 @@ import {sendProjectDailyDigest} from "../shared/email-notifications";
// At 11:00+00 (4.30pm+530) on every day-of-month if it's on every day-of-week from Monday through Friday.
const TIME = "0 11 */1 * 1-5";
// const TIME = "0/10 * * * *";
// const TIME = "* * * * *";
const log = (value: any) => console.log("project-digest-cron-job:", value);

View File

@@ -0,0 +1,113 @@
import { CronJob } from "cron";
import { calculateNextEndDate, log_error } from "../shared/utils";
import db from "../config/db";
import { IRecurringSchedule, ITaskTemplate } from "../interfaces/recurring-tasks";
import moment from "moment";
import TasksController from "../controllers/tasks-controller";
// At 11:00+00 (4.30pm+530) on every day-of-month if it's on every day-of-week from Monday through Friday.
// const TIME = "0 11 */1 * 1-5";
const TIME = "*/2 * * * *";
const TIME_FORMAT = "YYYY-MM-DD";
// const TIME = "0 0 * * *"; // Runs at midnight every day
const log = (value: any) => console.log("recurring-task-cron-job:", value);
async function onRecurringTaskJobTick() {
try {
log("(cron) Recurring tasks job started.");
const templatesQuery = `
SELECT t.*, s.*, (SELECT MAX(end_date) FROM tasks WHERE schedule_id = s.id) as last_task_end_date
FROM task_recurring_templates t
JOIN task_recurring_schedules s ON t.schedule_id = s.id;
`;
const templatesResult = await db.query(templatesQuery);
const templates = templatesResult.rows as (ITaskTemplate & IRecurringSchedule)[];
const now = moment();
let createdTaskCount = 0;
for (const template of templates) {
const lastTaskEndDate = template.last_task_end_date
? moment(template.last_task_end_date)
: moment(template.created_at);
const futureLimit = moment(template.last_checked_at || template.created_at).add(1, "week");
let nextEndDate = calculateNextEndDate(template, lastTaskEndDate);
// Find the next future occurrence
while (nextEndDate.isSameOrBefore(now)) {
nextEndDate = calculateNextEndDate(template, nextEndDate);
}
// Only create a task if it's within the future limit
if (nextEndDate.isSameOrBefore(futureLimit)) {
const existingTaskQuery = `
SELECT id FROM tasks
WHERE schedule_id = $1 AND end_date::DATE = $2::DATE;
`;
const existingTaskResult = await db.query(existingTaskQuery, [template.schedule_id, nextEndDate.format(TIME_FORMAT)]);
if (existingTaskResult.rows.length === 0) {
const createTaskQuery = `SELECT create_quick_task($1::json) as task;`;
const taskData = {
name: template.name,
priority_id: template.priority_id,
project_id: template.project_id,
reporter_id: template.reporter_id,
status_id: template.status_id || null,
end_date: nextEndDate.format(TIME_FORMAT),
schedule_id: template.schedule_id
};
const createTaskResult = await db.query(createTaskQuery, [JSON.stringify(taskData)]);
const createdTask = createTaskResult.rows[0].task;
if (createdTask) {
createdTaskCount++;
for (const assignee of template.assignees) {
await TasksController.createTaskBulkAssignees(assignee.team_member_id, template.project_id, createdTask.id, assignee.assigned_by);
}
for (const label of template.labels) {
const q = `SELECT add_or_remove_task_label($1, $2) AS labels;`;
await db.query(q, [createdTask.id, label.label_id]);
}
console.log(`Created task for template ${template.name} with end date ${nextEndDate.format(TIME_FORMAT)}`);
}
} else {
console.log(`Skipped creating task for template ${template.name} with end date ${nextEndDate.format(TIME_FORMAT)} - task already exists`);
}
} else {
console.log(`No task created for template ${template.name} - next occurrence is beyond the future limit`);
}
// Update the last_checked_at in the schedule
const updateScheduleQuery = `
UPDATE task_recurring_schedules
SET last_checked_at = $1::DATE, last_created_task_end_date = $2
WHERE id = $3;
`;
await db.query(updateScheduleQuery, [moment(template.last_checked_at || template.created_at).add(1, "day").format(TIME_FORMAT), nextEndDate.format(TIME_FORMAT), template.schedule_id]);
}
log(`(cron) Recurring tasks job ended with ${createdTaskCount} new tasks created.`);
} catch (error) {
log_error(error);
log("(cron) Recurring task job ended with errors.");
}
}
export function startRecurringTasksJob() {
log("(cron) Recurring task job ready.");
const job = new CronJob(
TIME,
() => void onRecurringTaskJobTick(),
() => log("(cron) Recurring task job successfully executed."),
true
);
job.start();
}

View File

@@ -17,4 +17,5 @@ export interface IPassportSession extends IUser {
socket_id?: string;
is_expired?: boolean;
owner_id?: string;
subscription_status?: string;
}

View File

@@ -0,0 +1,38 @@
export interface IRecurringSchedule {
id: string;
schedule_type: "daily" | "weekly" | "monthly" | "yearly" | "every_x_days" | "every_x_weeks" | "every_x_months";
days_of_week: number[] | null;
day_of_month: number | null;
date_of_month: number | null;
week_of_month: number | null;
interval_days: number | null;
interval_weeks: number | null;
interval_months: number | null;
last_created_task_end_date: Date | null;
last_checked_at: Date | null;
last_task_end_date: Date | null;
created_at: Date;
}
interface ITaskTemplateAssignee {
team_member_id: string;
assigned_by: string
}
interface ITaskTemplateLabel {
label_id: string;
}
export interface ITaskTemplate {
task_id: string;
schedule_id: string;
created_at: Date;
name: string;
priority_id: string;
project_id: string;
reporter_id: string;
status_id: string;
assignees: ITaskTemplateAssignee[];
labels: ITaskTemplateLabel[]
}

View File

@@ -1,3 +1,3 @@
export interface ISerializeCallback {
(error: string | null, id: string | null): void;
(error: string | null, user: { id: string | null } | null): void;
}

View File

@@ -1,5 +1,5 @@
export interface ISocketSession {
session?: {
passport?: { user?: string; }
passport?: { user?: { id: string } }
}
}

View File

@@ -0,0 +1,9 @@
-----BEGIN RSA PRIVATE KEY-----
MIIBOgIBAAJBAMe4wKg0OazdVWEyLCnTxubXHqpp6U7S7MiIE96Iufe+T4fe1EJl
2+7UJ0Vh0iO9vy/dr03Y9Mjm/IxgiaLEqFECAwEAAQJBAIV17jf4fjoHxZAnyN9C
h32mbvWNxLxJsrTmSfDBCRSFRv+ME7WAb7wGhfeDPZcxC+sDZv5EhTnDwQoVl0+3
tOECIQDzAbIUX6IS401UKISr8rk9dmPa+i89z5JAyiuhX8sQdQIhANJmkUYjHJtp
do/4dmDC6Dgv6SPr9zrNFg2A9Hgu3zztAiBpSHDJFu33VPep4Kwqe0z6bhKxSvew
xf/NhkoE7qXiCQIgEltslWf+2PhspccR3QNka3KSrtWprnGyWN9FdS7xv0kCIDje
m2QMP/tkiyGlX4cxpDvoB3syPEsbnH+3iaGMlD1T
-----END RSA PRIVATE KEY-----

View File

@@ -0,0 +1,9 @@
-----BEGIN RSA PRIVATE KEY-----
MIIBOwIBAAJBALdfkpZY9GkPSezqNtNP70SDc5ovnB8NttBxheDecIXRiKkGQaTc
QuDq19IlDPr+jPvJ6VyMZXtK1UQ09ewUZQ0CAwEAAQJBAKIKkaXMW8bPHNt/qQ0Y
kO4xXyF8OvDyFH+kIdMxnauRm8Z28EC4S8F9sfaqL/haj8lMDDDUEhJJB5P3l4XW
3WECIQDbZBsfv5+++ie08FzW4K0IrTeFrkanbuV9fhx9sqpgNQIhANX43uuGl7qE
RfGEesIfK3FurZhNUXBzYwpZoGC4Drx5AiANK18tcrVGI4IKrHsGMwpwAOXaUnHP
Tyrbc5yGNxlfGQIgGgFGLn/MHvoGeiTsun0JTZ7y8Citdio/5jkgWcDk4ZkCIQCk
TLAHaLJHiN63o3F/lTwyMib/3xQrsjcxs6k/Y9VEHw==
-----END RSA PRIVATE KEY-----

View File

@@ -1,12 +1,13 @@
import session from "express-session";
import db from "../config/db";
import { isProduction } from "../shared/utils";
// eslint-disable-next-line @typescript-eslint/no-var-requires
const pgSession = require("connect-pg-simple")(session);
export default session({
name: process.env.SESSION_NAME,
secret: process.env.SESSION_SECRET || [], // session secret
secret: process.env.SESSION_SECRET || "development-secret-key",
proxy: false,
resave: false,
saveUninitialized: true,
@@ -17,10 +18,10 @@ export default session({
}),
cookie: {
path: "/",
// secure: true,
// httpOnly: true,
// sameSite: true,
// domain: process.env.HOSTNAME,
// secure: isProduction(),
// httpOnly: isProduction(),
// sameSite: "none",
// domain: isProduction() ? ".worklenz.com" : undefined,
maxAge: 30 * 24 * 60 * 60 * 1000 // 30 days
}
});
});

View File

@@ -5,9 +5,11 @@ import {isValidateEmail} from "../../shared/utils";
export default function (req: Request, res: Response, next: NextFunction) {
const {name, email} = req.body;
if (!name) return res.status(200).send(new ServerResponse(false, null, "Name is required"));
if (!email) return res.status(200).send(new ServerResponse(false, null, "Email is required"));
if (!isValidateEmail(email)) return res.status(200).send(new ServerResponse(false, null, "Invalid email address"));
req.body.team_name = name.trim();
return next();
}

View File

@@ -1,11 +1,13 @@
import {NextFunction} from "express";
import { NextFunction } from "express";
import {IWorkLenzRequest} from "../../interfaces/worklenz-request";
import {IWorkLenzResponse} from "../../interfaces/worklenz-response";
import {ServerResponse} from "../../models/server-response";
import { IWorkLenzRequest } from "../../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../../interfaces/worklenz-response";
import { ServerResponse } from "../../models/server-response";
import { getFreePlanSettings, getUsedStorage } from "../../shared/paddle-utils";
import { megabytesToBytes } from "../../shared/utils";
export default function (req: IWorkLenzRequest, res: IWorkLenzResponse, next: NextFunction): IWorkLenzResponse | void {
const {file, file_name, project_id, size} = req.body;
export default async function (req: IWorkLenzRequest, res: IWorkLenzResponse, next: NextFunction): Promise<IWorkLenzResponse | void> {
const { file, file_name, project_id, size } = req.body;
if (!file || !file_name || !project_id || !size)
return res.status(200).send(new ServerResponse(false, null, "Upload failed"));
@@ -13,6 +15,15 @@ export default function (req: IWorkLenzRequest, res: IWorkLenzResponse, next: Ne
if (size > 5.243e+7)
return res.status(200).send(new ServerResponse(false, null, "Max file size for attachments is 50 MB.").withTitle("Upload failed!"));
if (req.user?.subscription_status === "free" && req.user?.owner_id) {
const limits = await getFreePlanSettings();
const usedStorage = await getUsedStorage(req.user?.owner_id);
if ((parseInt(usedStorage) + size) > megabytesToBytes(parseInt(limits.free_tier_storage))) {
return res.status(200).send(new ServerResponse(false, [], `Sorry, the free plan cannot exceed ${limits.free_tier_storage}MB of storage.`));
}
}
req.body.type = file_name.split(".").pop();
req.body.task_id = req.body.task_id || null;

View File

@@ -0,0 +1,17 @@
import { NextFunction } from "express";
import { IWorkLenzRequest } from "../../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../../interfaces/worklenz-response";
import { ServerResponse } from "../../models/server-response";
export default function (req: IWorkLenzRequest, res: IWorkLenzResponse, next: NextFunction): IWorkLenzResponse | void {
const { attachments, task_id } = req.body;
if (attachments.length === 0)
return res.status(200).send(new ServerResponse(false, null, "Attachments are required!"));
if (!task_id)
return res.status(200).send(new ServerResponse(false, null, "Task ID is required!"));
return next();
}

View File

@@ -6,11 +6,11 @@ import {ServerResponse} from "../../models/server-response";
export default function (req: IWorkLenzRequest, res: IWorkLenzResponse, next: NextFunction): IWorkLenzResponse | void {
const {content, task_id} = req.body;
if (!content)
return res.status(200).send(new ServerResponse(false, null, "Comment message is required"));
// if (!content)
// return res.status(200).send(new ServerResponse(false, null, "Comment message is required"));
if (!task_id)
return res.status(200).send(new ServerResponse(false, null, "Unable to create comment"));
if (content.length > 2000)
if (content.length > 5000)
return res.status(200).send(new ServerResponse(false, null, "Message length exceeded"));
req.body.mentions = Array.isArray(req.body.mentions)

View File

@@ -0,0 +1,48 @@
import {NextFunction} from "express";
import {IWorkLenzRequest} from "../../interfaces/worklenz-request";
import {IWorkLenzResponse} from "../../interfaces/worklenz-response";
import {ServerResponse} from "../../models/server-response";
import {getRandomColorCode, sanitize, toMinutes, toRound} from "../../shared/utils";
export default function (req: IWorkLenzRequest, res: IWorkLenzResponse, next: NextFunction): IWorkLenzResponse | void {
const {name, assignees, project_id, labels} = req.body;
if (!name?.trim()?.length)
return res.status(200).send(new ServerResponse(false, null, "Name is required"));
if (!project_id)
return res.status(200).send(new ServerResponse(false, null, "Project is required"));
req.body.total_hours = isNaN(+req.body.total_hours) || req.body.total_hours > 1000 ? 0 : toRound(req.body.total_hours);
req.body.total_minutes = isNaN(+req.body.total_minutes) || req.body.total_minutes > 1000 ? 0 : toRound(req.body.total_minutes);
req.body.assignees = Array.isArray(assignees) ? assignees : [];
req.body.labels = Array.isArray(labels) ? labels : [];
req.body.reporter_id = req.user?.id || null;
req.body.total_minutes = toMinutes(req.body.total_hours, req.body.total_minutes);
req.body.team_id = req.user?.team_id || null;
req.body.inline = req.query.inline || false;
const labelsJson = [];
for (const label of req.body.labels) {
labelsJson.push({
name: label,
color: getRandomColorCode()
});
}
req.body.labels = labelsJson;
if (req.body.description) {
if (req.body.description.length > 4000)
return res.status(200).send(new ServerResponse(false, null, "Task description length exceeded!"));
req.body.description = sanitize(req.body.description);
}
if (req.body.name.length > 100)
return res.status(200).send(new ServerResponse(false, null, "Task name length exceeded!"));
return next();
}

View File

@@ -0,0 +1,12 @@
import { NextFunction } from "express";
import { IWorkLenzRequest } from "../../interfaces/worklenz-request";
import { IWorkLenzResponse } from "../../interfaces/worklenz-response";
import { ServerResponse } from "../../models/server-response";
export default function (req: IWorkLenzRequest, res: IWorkLenzResponse, next: NextFunction): IWorkLenzResponse | void {
const { example_name } = req.body;
if (!example_name)
return res.status(200).send(new ServerResponse(false, null, "Name is required"));
return next();
}

View File

@@ -12,7 +12,7 @@ export default function (req: IWorkLenzRequest, res: IWorkLenzResponse, next: Ne
return res.status(200).send(new ServerResponse(false, null, "Email addresses cannot be empty"));
for (const email of emails) {
if (!isValidateEmail(email))
if (!isValidateEmail(email.trim()))
return res.status(200).send(new ServerResponse(false, null, "Invalid email address"));
}

View File

@@ -20,14 +20,24 @@ async function clearEmailInvitations(email: string, teamId: string) {
}
// Check whether the user still exists on the database
export async function deserialize(id: string, done: IDeserializeCallback) {
export async function deserialize(user: { id: string | null }, done: IDeserializeCallback) {
try {
if (!user || !user.id) {
return done(null, null);
}
const {id} = user;
const excludedSubscriptionTypes = ["TRIAL", "PADDLE"];
const q = `SELECT deserialize_user($1) AS user;`;
const result = await db.query(q, [id]);
if (result.rows.length) {
const [data] = result.rows;
if (data?.user) {
const realExpiredDate = moment(data.user.valid_till_date).add(7, "days");
data.user.is_expired = false;
data.user.is_member = !!data.user.team_member_id;
if (excludedSubscriptionTypes.includes(data.user.subscription_type)) data.user.is_expired = realExpiredDate.isBefore(moment(), "days");
void setLastActive(data.user.id);
void clearEmailInvitations(data.user.email, data.user.team_id);

View File

@@ -12,11 +12,11 @@ async function handleGoogleLogin(req: Request, _accessToken: string, _refreshTok
if (Array.isArray(profile.photos) && profile.photos.length) body.picture = profile.photos[0].value;
// Check for existing accounts signed up using OAuth
const localAccountResult = await db.query("SELECT 1 FROM users WHERE email = $1 AND password IS NOT NULL;", [body.email]);
const localAccountResult = await db.query("SELECT 1 FROM users WHERE email = $1 AND password IS NOT NULL AND is_deleted IS FALSE;", [body.email]);
if (localAccountResult.rowCount) {
const message = `No Google account exists for email ${body.email}.`;
(req.session as any).error = message;
return done(null, undefined, req.flash(ERROR_KEY, message));
return done(null, undefined, { message: req.flash(ERROR_KEY, message) });
}
// If the user came from an invitation, this exists

View File

@@ -1,46 +1,50 @@
import bcrypt from "bcrypt";
import {Strategy as LocalStrategy} from "passport-local";
import {log_error} from "../../shared/utils";
import { Strategy as LocalStrategy } from "passport-local";
import { log_error } from "../../shared/utils";
import db from "../../config/db";
import {Request} from "express";
import { Request } from "express";
async function handleLogin(req: Request, email: string, password: string, done: any) {
(req.session as any).flash = {};
console.log("Login attempt for:", email);
if (!email || !password)
return done(null, false, {message: "Invalid credentials."});
if (!email || !password) {
console.log("Missing credentials");
return done(null, false, { message: "Please enter both email and password" });
}
try {
// select the user from the database based on the username
const q = `SELECT id, email, google_id, password
FROM users
WHERE email = $1
AND google_id IS NULL;`;
AND google_id IS NULL
AND is_deleted IS FALSE;`;
const result = await db.query(q, [email]);
console.log("User query result count:", result.rowCount);
const [data] = result.rows;
// Check user existence
if (!data?.password)
return done(null, false, {message: "Invalid credentials."});
// Compare the password & email
if (bcrypt.compareSync(password, data.password) && email === data.email) {
delete data.password;
req.logout(() => true);
return done(false, data, {message: "User successfully logged in"});
if (!data?.password) {
console.log("No account found");
return done(null, false, { message: "No account found with this email" });
}
return done(null, false, {message: "Invalid credentials."});
const passwordMatch = bcrypt.compareSync(password, data.password);
console.log("Password match:", passwordMatch);
if (passwordMatch && email === data.email) {
delete data.password;
return done(null, data, {message: "User successfully logged in"});
}
return done(null, false, { message: "Incorrect email or password" });
} catch (error) {
console.error("Login error:", error);
log_error(error, req.body);
return done(error);
}
}
export default new LocalStrategy({
usernameField: "email", // = email
usernameField: "email",
passwordField: "password",
passReqToCallback: true
}, (req, email, password, done) => void handleLogin(req, email, password, done));
}, (req, email, password, done) => void handleLogin(req, email, password, done));

View File

@@ -56,11 +56,7 @@ async function handleSignUp(req: Request, email: string, password: string, done:
try {
const user = await registerUser(password, team_id, name, team_name, email, timezone, team_member_id);
sendWelcomeEmail(email, name);
setTimeout(() => {
return done(null, user, req.flash(SUCCESS_KEY, "Registration successful. Please check your email for verification."));
}, 500);
return done(null, user, req.flash(SUCCESS_KEY, "Registration successful. Please check your email for verification."));
} catch (error: any) {
const message = (error?.message) || "";

View File

@@ -3,5 +3,5 @@ import {IPassportSession} from "../interfaces/passport-session";
// Parse the user id to deserialize function
export function serialize($user: IPassportSession, done: ISerializeCallback) {
done(null, $user?.id ?? null);
done(null, { id: $user?.id ?? null });
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More