Update environment configuration, Docker setup, and frontend/backend dependencies

- Updated .env.example and .env files for backend and frontend with placeholder values.
- Enhanced .gitignore to include additional files and directories.
- Modified docker-compose.yml to change image names and improve service health checks.
- Updated README.md and SETUP_THE_PROJECT.md for clearer setup instructions.
- Added database initialization scripts and SQL files for structured database setup.
- Updated frontend Dockerfile to use Node.js 22 and adjusted package.json scripts.
- Improved error handling and logging in start scripts for better debugging.
- Added reCAPTCHA support in the signup page with conditional loading based on environment variables.
This commit is contained in:
chamikaJ
2025-04-18 17:10:56 +05:30
parent 8825b0410a
commit e42819ef64
34 changed files with 948 additions and 376 deletions

View File

@@ -1,8 +1,8 @@
# Database configuration
DB_USER=postgres
DB_PASSWORD=password
DB_PASSWORD=your_db_password
DB_NAME=worklenz_db
DB_HOST=db
DB_HOST=localhost
DB_PORT=5432
DB_MAX_CLIENTS=50
@@ -10,25 +10,25 @@ DB_MAX_CLIENTS=50
NODE_ENV=development
PORT=3000
SESSION_NAME=worklenz.sid
SESSION_SECRET=worklenz-session-secret
COOKIE_SECRET=worklenz-cookie-secret
SESSION_SECRET=your_session_secret
COOKIE_SECRET=your_cookie_secret
# CORS
SOCKET_IO_CORS=http://localhost:5000
SERVER_CORS=*
# Storage configuration (MinIO)
# Storage configuration
STORAGE_PROVIDER=s3
AWS_REGION=us-east-1
AWS_BUCKET=worklenz-bucket
S3_ACCESS_KEY_ID=minioadmin
S3_SECRET_ACCESS_KEY=minioadmin
S3_URL=http://minio:9000
AWS_REGION=your_aws_region
AWS_BUCKET=your_bucket_name
S3_ACCESS_KEY_ID=your_access_key_id
S3_SECRET_ACCESS_KEY=your_secret_access_key
S3_URL=your_s3_url
# Application URLs
HOSTNAME=localhost:5000
FRONTEND_URL=http://localhost:5000
# For local development, set these to the frontend service
# For local development
LOGIN_FAILURE_REDIRECT=http://localhost:5000
LOGIN_SUCCESS_REDIRECT=http://localhost:5000/auth/authenticate

79
.gitignore vendored
View File

@@ -1,4 +1,79 @@
.idea
.vscode
# Dependencies
node_modules/
.pnp/
.pnp.js
# Build outputs
dist/
build/
out/
.next/
.nuxt/
.cache/
# Environment variables
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
.env.development
.env.production
.env.*
!.env.example
!.env.template
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea/
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
*.sublime-workspace
# Testing
coverage/
.nyc_output/
# Temp files
.temp/
.tmp/
temp/
tmp/
# Debug
.debug/
# Misc
.DS_Store
Thumbs.db
.thumbs.db
ehthumbs.db
Desktop.ini
$RECYCLE.BIN/
# Yarn
.yarn/*
!.yarn/patches
!.yarn/plugins
!.yarn/releases
!.yarn/sdks
!.yarn/versions
# TypeScript
*.tsbuildinfo

View File

@@ -19,7 +19,7 @@ We have adopted a Code of Conduct to ensure a welcoming and inclusive environmen
## Coding Standards
- Follow the [Angular Style Guide](https://angular.io/guide/styleguide) for the frontend code.
- Follow the [React Documentation](https://react.dev/learn) for best practices in React development.
- Use [TypeScript](https://www.typescriptlang.org/) for both frontend and backend code.
- Follow the [Conventional Commits](https://www.conventionalcommits.org/) specification for commit messages.

203
README.md
View File

@@ -1,3 +1,172 @@
# Worklenz
Worklenz is an open-source project management platform designed to help teams collaborate efficiently on tasks and projects.
## Features
- **Project Planning**: Create and organize projects, assign tasks to team members.
- **Task Management**: Break down projects into smaller tasks, set due dates, priorities, and track progress.
- **Collaboration**: Share files, leave comments, and communicate seamlessly with your team members.
- **Time Tracking**: Monitor time spent on tasks and projects for better resource allocation and billing.
- **Reporting**: Generate detailed reports on project status, team workload, and performance metrics.
## Tech Stack
This repository contains the frontend and backend code for Worklenz.
- **Frontend**: Built using React with Ant Design as the UI library.
- **Backend**: Built using TypeScript, Express.js, with PostgreSQL as the database.
## Requirements
- Node.js version v16 or newer
- PostgreSQL version v15 or newer
- Docker and Docker Compose (for containerized setup)
## Getting Started
These instructions will help you set up and run the Worklenz project on your local machine for development and testing purposes.
### Prerequisites
- Node.js (version 16 or higher)
- PostgreSQL database
- An S3-compatible storage service (like MinIO) or Azure Blob Storage
### Option 1: Manual Installation
1. Clone the repository
```bash
git clone https://github.com/yourusername/worklenz.git
cd worklenz
```
2. Set up environment variables
- Copy the example environment files
```bash
cp .env.example .env
cp worklenz-backend/.env.example worklenz-backend/.env
```
- Update the environment variables with your configuration
3. Install dependencies
```bash
# Install backend dependencies
cd worklenz-backend
npm install
# Install frontend dependencies
cd ../worklenz-frontend
npm install
```
4. Set up the database
```bash
# Create a PostgreSQL database named worklenz_db
cd worklenz-backend
# Execute the SQL setup files in the correct order
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
```
5. Start the development servers
```bash
# In one terminal, start the backend
cd worklenz-backend
npm run dev
# In another terminal, start the frontend
cd worklenz-frontend
npm run dev
```
6. Access the application at http://localhost:5000
### Option 2: Docker Setup
The project includes a fully configured Docker setup with:
- Frontend React application
- Backend server
- PostgreSQL database
- MinIO for S3-compatible storage
1. Clone the repository:
```bash
git clone https://github.com/yourusername/worklenz.git
cd worklenz
```
2. Start the Docker containers (choose one option):
**Using Docker Compose directly**
```bash
docker-compose up -d
```
3. The application will be available at:
- Frontend: http://localhost:5000
- Backend API: http://localhost:3000
- MinIO Console: http://localhost:9001 (login with minioadmin/minioadmin)
4. To stop the services:
```bash
docker-compose down
```
## Configuration
### Environment Variables
Worklenz requires several environment variables to be configured for proper operation. These include:
- Database credentials
- Session secrets
- Storage configuration (S3 or Azure)
- Authentication settings
Please refer to the `.env.example` files for a full list of required variables.
### MinIO Integration
The project uses MinIO as an S3-compatible object storage service, which provides an open-source alternative to AWS S3 for development and production.
- **MinIO Console**: http://localhost:9001
- Username: minioadmin
- Password: minioadmin
- **Default Bucket**: worklenz-bucket (created automatically when the containers start)
### Security Considerations
For production deployments:
1. Use strong, unique passwords and keys for all services
2. Do not commit `.env` files to version control
3. Use a production-grade PostgreSQL setup with proper backup procedures
4. Enable HTTPS for all public endpoints
5. Review and update dependencies regularly
## Contributing
We welcome contributions from the community! If you'd like to contribute, please follow our [contributing guidelines](CONTRIBUTING.md).
## Security
If you believe you have found a security vulnerability in Worklenz, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports.
Email [your-security-email@example.com](mailto:your-security-email@example.com) to disclose any security vulnerabilities.
## License
This project is licensed under the [MIT License](LICENSE).
<h1 align="center">
<a href="https://worklenz.com" target="_blank" rel="noopener noreferrer">
<img src="https://app.worklenz.com/assets/icons/icon-144x144.png" alt="Worklenz Logo" width="75">
@@ -27,33 +196,6 @@
Worklenz is a project management tool designed to help organizations improve their efficiency. It provides a
comprehensive solution for managing projects, tasks, and collaboration within teams.
## Features
- **Project Planning**: Create and organize projects, assign tasks to team members.
- **Task Management**: Break down projects into smaller tasks, set due dates, priorities, and track progress.
- **Collaboration**: Share files, leave comments, and communicate seamlessly with your team members.
- **Time Tracking**: Monitor time spent on tasks and projects for better resource allocation and billing.
- **Reporting**: Generate detailed reports on project status, team workload, and performance metrics.
## Tech Stack
This repository contains the frontend and backend code for Worklenz.
- **Frontend**: Built using Angular, with [Ant Design of Angular](https://ng.ant.design/docs/introduce/en) as the UI
library..
- **Backend**: Built using a custom TypeScript implementation of ExpressJS, with PostgreSQL as the database, providing a
robust, scalable, and type-safe backend.
## Requirements
- Node.js version v18 or newer
- Postgres version v15.6
- Redis version v4.6.7 (not used yet. setup only.)
## Getting started with Worklenz.
- Containerized Installation - Use docker to deploy Worklenz in production or development environments.
- Manual installation - To get started with Worklenz, please follow this guide [worklenz setup guidelines](SETUP_THE_PROJECT.md).
## Screenshots
<p align="center">
@@ -107,13 +249,6 @@ This repository contains the frontend and backend code for Worklenz.
We welcome contributions from the community! If you'd like to contribute, please follow
our [contributing guidelines](CONTRIBUTING.md).
### Security
If you believe you have found a security vulnerability in Worklenz, we encourage you to responsibly disclose this and
not open a public issue. We will investigate all legitimate reports.
Email [info@worklenz.com](mailto:info@worklenz.com) to disclose any security vulnerabilities.
### License
Worklenz is open source and released under the [GNU Affero General Public License Version 3 (AGPLv3)](LICENSE).

View File

@@ -4,21 +4,20 @@ Getting started with development is a breeze! Follow these steps and you'll be c
## Requirements
- Node.js version v18 or newer - [Node.js](https://nodejs.org/en/download/current)
- Postgres version v15.6 - [PostgreSQL](https://www.postgresql.org/download/)
- Redis version v4.6.7 (not used yet. setup only.)
- Node.js version v16 or newer - [Node.js](https://nodejs.org/en/download/)
- PostgreSQL version v15 or newer - [PostgreSQL](https://www.postgresql.org/download/)
- S3-compatible storage (like MinIO) for file storage
## Prerequisites
- `$ npm install -g ts-node`
- `$ npm install -g typescript`
- `$ npm install -g grunt grunt-cli`
- `$ npm install -g typescript` (optional, but recommended)
## Installation
**Clone the repository:**
```bash
git clone https://github.com/Worklenz/worklenz.git
cd worklenz
```
### Frontend installation
@@ -32,13 +31,14 @@ Getting started with development is a breeze! Follow these steps and you'll be c
```bash
npm install
```
3. **Run the frontend:**
```bash
npm start
```
4. Navigate to [http://localhost:4200](http://localhost:4200)
4. Navigate to [http://localhost:5173](http://localhost:5173)
### Backend installation
@@ -54,13 +54,34 @@ Getting started with development is a breeze! Follow these steps and you'll be c
3. **Configure Environment Variables:**
- Create a copy of the `.env.template` file and name it `.env`.
- Update the required fields in `.env` with the specific information.
- Create a copy of the `.env.example` file and name it `.env`.
- Update the required fields in `.env` with your specific configuration.
4. **Restore Database**
4. **Set up Database**
- Create a new database named `worklenz_db` on your local PostgreSQL server.
- Update the `DATABASE_NAME` and `PASSWORD` in the `database/6_user_permission.sql` with your DB credentials.
- Open a query console and execute the queries from the .sql files in the `database` directories, following the provided order.
- Update the database connection details in your `.env` file.
- Execute the SQL setup files in the correct order:
```bash
# From your PostgreSQL client or command line
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
```
Alternatively, you can use the provided shell script:
```bash
# Make sure the script is executable
chmod +x database/00-init-db.sh
# Run the script (may need modifications for local execution)
./database/00-init-db.sh
```
5. **Install Dependencies:**
@@ -68,48 +89,49 @@ Getting started with development is a breeze! Follow these steps and you'll be c
npm install
```
This command installs all the necessary libraries required to run the project.
6. **Run the Development Server:**
**a. Start the TypeScript compiler:**
Open a new terminal window and run the following command:
```bash
grunt dev
```
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript.
**b. Start the development server:**
Open another separate terminal window and run the following command:
```bash
npm start
npm run dev
```
This starts the development server allowing you to work on the project.
7. **Run the Production Server:**
**a. Compile TypeScript to JavaScript:**
Open a new terminal window and run the following command:
**a. Build the project:**
```bash
grunt build
npm run build
```
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript for production use.
This will compile the TypeScript code into JavaScript for production use.
**b. Start the production server:**
Once the compilation is complete, run the following command in the same terminal window:
```bash
npm start
```
This starts the production server for your application.
## Docker Setup (Alternative)
For an easier setup, you can use Docker and Docker Compose:
1. Make sure you have Docker and Docker Compose installed on your system.
2. From the root directory, run:
```bash
docker-compose up -d
```
3. Access the application:
- Frontend: http://localhost:5000
- Backend API: http://localhost:3000
- MinIO Console: http://localhost:9001 (login with minioadmin/minioadmin)
4. To stop the services:
```bash
docker-compose down
```

View File

@@ -1,6 +1,6 @@
services:
frontend:
image: docker.io/kithceydigital/worklenz_frontend:latest
image: docker.io/chamikajaycey/worklenz-frontend:latest
container_name: worklenz_frontend
ports:
- "5000:5000"
@@ -11,7 +11,7 @@ services:
- worklenz
backend:
image: docker.io/kithceydigital/worklenz_backend:react
image: docker.io/chamikajaycey/worklenz-backend:latest
container_name: worklenz_backend
ports:
- "3000:3000"
@@ -19,7 +19,7 @@ services:
db:
condition: service_healthy
minio:
condition: service_healthy
condition: service_started
environment:
- AWS_REGION=${AWS_REGION:-us-east-1}
- BACKEND_PUBLIC_DIR
@@ -65,12 +65,6 @@ services:
volumes:
- worklenz_minio_data:/data
command: server /data --console-address ":9001"
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 10s
timeout: 5s
retries: 3
start_period: 10s
networks:
- worklenz
@@ -79,14 +73,26 @@ services:
image: minio/mc
container_name: worklenz_createbuckets
depends_on:
minio:
condition: service_healthy
- minio
entrypoint: >
/bin/sh -c "
/usr/bin/mc config host add myminio http://minio:9000 minioadmin minioadmin;
# Wait for MinIO to be available
echo 'Waiting for MinIO to start...'
sleep 15;
# Retry up to 5 times
for i in 1 2 3 4 5; do
echo \"Attempt $$i to connect to MinIO...\"
if /usr/bin/mc config host add myminio http://minio:9000 minioadmin minioadmin; then
echo \"Successfully connected to MinIO!\"
/usr/bin/mc mb --ignore-existing myminio/worklenz-bucket;
/usr/bin/mc policy set public myminio/worklenz-bucket;
exit 0;
fi
echo \"Connection failed, retrying in 5 seconds...\"
sleep 5;
done
echo \"Failed to connect to MinIO after 5 attempts\"
exit 1;
"
networks:
- worklenz

136
start.bat
View File

@@ -1,4 +1,6 @@
@echo off
echo Starting Worklenz setup... > worklenz_startup.log
echo %DATE% %TIME% >> worklenz_startup.log
echo.
echo " __ __ _ _"
echo " \ \ / / | | | |"
@@ -12,58 +14,174 @@ echo.
echo Starting Worklenz Docker Environment...
echo.
REM Check for Docker installation
echo Checking for Docker installation...
where docker >nul 2>>worklenz_startup.log
IF %ERRORLEVEL% NEQ 0 (
echo [91mWarning: Docker is not installed or not in PATH[0m
echo Warning: Docker is not installed or not in PATH >> worklenz_startup.log
echo Please install Docker first: https://docs.docker.com/get-docker/
echo [93mContinuing for debugging purposes...[0m
) ELSE (
echo [92m^✓[0m Docker is installed
echo Docker is installed >> worklenz_startup.log
)
REM Check for docker-compose installation
echo Checking for docker-compose...
where docker-compose >nul 2>>worklenz_startup.log
IF %ERRORLEVEL% NEQ 0 (
echo [91mWarning: docker-compose is not installed or not in PATH[0m
echo Warning: docker-compose is not installed or not in PATH >> worklenz_startup.log
echo [93mContinuing for debugging purposes...[0m
) ELSE (
echo [92m^✓[0m docker-compose is installed
echo docker-compose is installed >> worklenz_startup.log
)
REM Run preflight checks
echo Running Docker daemon check...
docker info >nul 2>>worklenz_startup.log
IF %ERRORLEVEL% NEQ 0 (
echo [91mWarning: Docker daemon is not running[0m
echo Warning: Docker daemon is not running >> worklenz_startup.log
echo Please start Docker and try again
echo [93mContinuing for debugging purposes...[0m
) ELSE (
echo [92m^✓[0m Docker daemon is running
echo Docker daemon is running >> worklenz_startup.log
)
REM Check if .env file exists
IF NOT EXIST .env (
echo Warning: .env file not found. Using default configuration.
echo Warning: .env file not found. Using default configuration. >> worklenz_startup.log
IF EXIST .env.example (
copy .env.example .env
echo Created .env file from .env.example
echo Created .env file from .env.example >> worklenz_startup.log
)
)
REM Stop any running containers
docker-compose down
echo Stopping any running containers...
docker-compose down > nul 2>>worklenz_startup.log
IF %ERRORLEVEL% NEQ 0 (
echo [91mWarning: Error stopping containers[0m
echo Warning: Error stopping containers >> worklenz_startup.log
echo [93mContinuing anyway...[0m
)
REM Start the containers
docker-compose up -d
echo Starting containers...
echo Attempting to start containers... >> worklenz_startup.log
REM Start with docker-compose
docker-compose up -d > docker_up_output.txt 2>&1
type docker_up_output.txt >> worklenz_startup.log
REM Check for errors in output
findstr /C:"Error" docker_up_output.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [91mErrors detected during startup[0m
echo Errors detected during startup >> worklenz_startup.log
type docker_up_output.txt
)
del docker_up_output.txt > nul 2>&1
REM Wait for services to be ready
echo Waiting for services to start...
timeout /t 5 /nobreak > nul
timeout /t 10 /nobreak > nul
echo After timeout, checking services >> worklenz_startup.log
REM Check if services are running
docker ps | findstr "worklenz_frontend" > nul
REM Check service status using docker-compose
echo Checking service status...
echo Checking service status... >> worklenz_startup.log
docker-compose ps --services --filter "status=running" > running_services.txt 2>>worklenz_startup.log
REM Log services output
type running_services.txt >> worklenz_startup.log
echo.
echo Checking individual services:
echo Checking individual services: >> worklenz_startup.log
REM Check frontend
findstr /C:"frontend" running_services.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [92m^✓[0m Frontend is running
echo Frontend URL: http://localhost:5000
echo Frontend is running >> worklenz_startup.log
) ELSE (
echo [91m^✗[0m Frontend service failed to start
echo Frontend service failed to start >> worklenz_startup.log
)
docker ps | findstr "worklenz_backend" > nul
REM Check backend
findstr /C:"backend" running_services.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [92m^✓[0m Backend is running
echo Backend URL: http://localhost:3000
echo Backend is running >> worklenz_startup.log
) ELSE (
echo [91m^✗[0m Backend service failed to start
echo Backend service failed to start >> worklenz_startup.log
)
docker ps | findstr "worklenz_minio" > nul
REM Check MinIO
findstr /C:"minio" running_services.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [92m^✓[0m MinIO is running
echo MinIO Console URL: http://localhost:9001 (login: minioadmin/minioadmin)
echo MinIO is running >> worklenz_startup.log
) ELSE (
echo [91m^✗[0m MinIO service failed to start
echo MinIO service failed to start >> worklenz_startup.log
REM Check MinIO logs
echo Checking MinIO logs for errors:
docker-compose logs minio --tail=20 > minio_logs.txt
type minio_logs.txt
type minio_logs.txt >> worklenz_startup.log
del minio_logs.txt > nul 2>&1
)
docker ps | findstr "worklenz_db" > nul
REM Check Database
findstr /C:"db" running_services.txt > nul
IF %ERRORLEVEL% EQU 0 (
echo [92m^✓[0m Database is running
echo Database is running >> worklenz_startup.log
) ELSE (
echo [91m^✗[0m Database service failed to start
echo Database service failed to start >> worklenz_startup.log
)
del running_services.txt > nul 2>&1
REM Check if all services are running
set allRunning=1
docker-compose ps --services | findstr /V /C:"frontend" /C:"backend" /C:"minio" /C:"db" > remaining_services.txt
FOR /F "tokens=*" %%s IN (remaining_services.txt) DO (
findstr /C:"%%s" running_services.txt > nul || set allRunning=0
)
del remaining_services.txt > nul 2>&1
IF %allRunning% EQU 1 (
echo.
echo [92mWorklenz is now running![0m
echo [92mWorklenz setup completed![0m
echo Setup completed successfully >> worklenz_startup.log
) ELSE (
echo.
echo [93mWarning: Some services may not be running correctly.[0m
echo Warning: Some services may not be running correctly >> worklenz_startup.log
echo Run 'docker-compose logs' to check for errors.
)
echo You can access the application at: http://localhost:5000
echo To stop the services, run: stop.bat
echo.
echo For any errors, check worklenz_startup.log file
echo.
echo Press any key to exit...
pause > nul

View File

@@ -3,6 +3,7 @@
# Colors for terminal output
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m' # No Color
# Print banner
@@ -30,11 +31,20 @@ fi
# Check if Docker is installed
if ! command -v docker &> /dev/null; then
echo "Error: Docker is not installed or not in PATH"
echo -e "${RED}Error: Docker is not installed or not in PATH${NC}"
echo "Please install Docker first: https://docs.docker.com/get-docker/"
exit 1
fi
# Check if Docker daemon is running
echo "Running preflight checks..."
if ! docker info &> /dev/null; then
echo -e "${RED}Error: Docker daemon is not running${NC}"
echo "Please start Docker and try again"
exit 1
fi
echo -e "${GREEN}${NC} Docker is running"
# Check if Docker Compose is installed
if ! command -v docker compose &> /dev/null; then
echo "Warning: Docker Compose V2 not found, trying docker-compose command..."

View File

@@ -2,66 +2,75 @@
NODE_ENV=development
PORT=3000
SESSION_NAME=worklenz.sid
SESSION_SECRET="your-session-secret"
COOKIE_SECRET="your-cookie-secret"
SESSION_SECRET="your_session_secret"
COOKIE_SECRET="your_cookie_secret"
# CORS
SOCKET_IO_CORS=http://localhost:4200
SOCKET_IO_CORS=http://localhost:5000
SERVER_CORS=*
# Database
DB_USER=postgres
DB_PASSWORD=password
DB_PASSWORD=your_db_password
DB_NAME=worklenz_db
DB_HOST=localhost
DB_PORT=5432
DB_MAX_CLIENTS=50
# Google Login
GOOGLE_CLIENT_ID="client_id"
GOOGLE_CLIENT_SECRET="client_secret"
GOOGLE_CLIENT_ID="your_google_client_id"
GOOGLE_CLIENT_SECRET="your_google_client_secret"
GOOGLE_CALLBACK_URL="http://localhost:3000/secure/google/verify"
LOGIN_FAILURE_REDIRECT="/"
LOGIN_SUCCESS_REDIRECT="http://localhost:4200/auth/authenticate"
LOGIN_SUCCESS_REDIRECT="http://localhost:5000/auth/authenticate"
# SENDGRID
SENDGRID_API_KEY="your-sendgrid-api-key"
EMAIL_NOTIFICATIONS=your-email@example.com
# CLI
ANGULAR_DIST_DIR="path/to/frontend/dist"
ANGULAR_SRC_DIR="path/to/frontend"
BACKEND_PUBLIC_DIR="path/to/backend/public"
BACKEND_VIEWS_DIR="path/to/backend/views"
COMMIT_BUILD_IMMEDIATELY=false
# HOST
HOSTNAME=localhost:4200
HOSTNAME=localhost:5000
SLACK_WEBHOOK=your-slack-webhook-url
USE_PG_NATIVE=true
# SLACK
SLACK_WEBHOOK=your_slack_webhook_url
USE_PG_NATIVE=false
# JWT SECRET
JWT_SECRET=your-jwt-secret
JWT_SECRET=your_jwt_secret
# FRONTEND_URL
FRONTEND_URL=https://example.com/
# AWS
AWS_REGION="us-west-2"
AWS_ACCESS_KEY_ID="YOUR_AWS_ACCESS_KEY_ID"
AWS_SECRET_ACCESS_KEY="YOUR_AWS_SECRET_ACCESS_KEY"
AWS_BUCKET="your-s3-bucket"
S3_URL="https://s3.your-region.amazonaws.com/your-bucket"
FRONTEND_URL=http://localhost:5000
# STORAGE
STORAGE_PROVIDER=s3 # s3 or azure
AZURE_STORAGE_ACCOUNT_NAME=yourstorageaccount
AZURE_STORAGE_CONTAINER=yourcontainer
AZURE_STORAGE_ACCOUNT_KEY=yourstorageaccountkey
AZURE_STORAGE_URL=https://yourstorageaccount.blob.core.windows.net
STORAGE_PROVIDER=s3 # values s3 or azure
# AWS
AWS_REGION="your_aws_region"
AWS_ACCESS_KEY_ID="your_aws_access_key_id"
AWS_SECRET_ACCESS_KEY="your_aws_secret_access_key"
AWS_BUCKET="your_s3_bucket"
S3_URL="your_s3_url"
# Azure Storage
AZURE_STORAGE_ACCOUNT_NAME="your_storage_account_name"
AZURE_STORAGE_CONTAINER="your_storage_container"
AZURE_STORAGE_ACCOUNT_KEY="your_storage_account_key"
AZURE_STORAGE_URL="your_storage_url"
# DIRECTPAY
DP_STAGE=DEV
DP_URL=https://dev.directpay.lk/v1/mpg/api/external/cardManagement
DP_MERCHANT_ID=YOUR_MERCHANT_ID
DP_SECRET_KEY=YOUR_SECRET_KEY
DP_API_KEY=YOUR_API_KEY
DP_URL=your_url
DP_MERCHANT_ID=your_merchant_id
DP_SECRET_KEY=your_secret_key
DP_API_KEY=your_api_key
CONTACT_US_EMAIL=support@example.com
GOOGLE_CAPTCHA_SECRET_KEY=YOUR_SECRET_KEY
GOOGLE_CAPTCHA_SECRET_KEY=your_captcha_secret_key
GOOGLE_CAPTCHA_PASS_SCORE=0.8
# Email Cronjobs
ENABLE_EMAIL_CRONJOBS=true

View File

@@ -2,74 +2,75 @@
NODE_ENV=development
PORT=3000
SESSION_NAME=worklenz.sid
SESSION_SECRET="YOUR_SESSION_SECRET_HERE"
COOKIE_SECRET="YOUR_COOKIE_SECRET_HERE"
SESSION_SECRET="your_session_secret"
COOKIE_SECRET="your_cookie_secret"
# CORS
SOCKET_IO_CORS=http://localhost:4200
SOCKET_IO_CORS=http://localhost:5000
SERVER_CORS=*
# Database
DB_USER=DATABASE_USER_HERE # default : worklenz_backend (update "user-permission.sql" if needed)
DB_PASSWORD=DATABASE_PASSWORD_HERE
DB_NAME=DATABASE_NAME_HERE # default : worklenz_db
DB_HOST=DATABASE_HOST_HERE # default : localhost
DB_PORT=DATABASE_PORT_HERE # default : 5432
DB_USER=postgres
DB_PASSWORD=your_db_password
DB_NAME=worklenz_db
DB_HOST=localhost
DB_PORT=5432
DB_MAX_CLIENTS=50
# Google Login
GOOGLE_CLIENT_ID="GOOGLE_CLIENT_ID_HERE"
GOOGLE_CLIENT_SECRET="GOOGLE_CLIENT_SECRET_HERE"
GOOGLE_CLIENT_ID="your_google_client_id"
GOOGLE_CLIENT_SECRET="your_google_client_secret"
GOOGLE_CALLBACK_URL="http://localhost:3000/secure/google/verify"
LOGIN_FAILURE_REDIRECT="/"
LOGIN_SUCCESS_REDIRECT="http://localhost:4200/auth/authenticate"
LOGIN_SUCCESS_REDIRECT="http://localhost:5000/auth/authenticate"
# CLI
ANGULAR_DIST_DIR="/path/worklenz_frontend/dist/worklenz"
ANGULAR_SRC_DIR="/path/worklenz_frontend"
BACKEND_PUBLIC_DIR="/path/worklenz_backend/src/public"
BACKEND_VIEWS_DIR="/path/worklenz_backend/src/views/admin"
COMMIT_BUILD_IMMEDIATELY=true
ANGULAR_DIST_DIR="path/to/frontend/dist"
ANGULAR_SRC_DIR="path/to/frontend"
BACKEND_PUBLIC_DIR="path/to/backend/public"
BACKEND_VIEWS_DIR="path/to/backend/views"
COMMIT_BUILD_IMMEDIATELY=false
# HOST
HOSTNAME=localhost:4200
HOSTNAME=localhost:5000
# SLACK
SLACK_WEBHOOK=SLACK_WEBHOOK_HERE
SLACK_WEBHOOK=your_slack_webhook_url
USE_PG_NATIVE=false
# JWT SECRET
JWT_SECRET=JWT_SECRET_HERE
JWT_SECRET=your_jwt_secret
# FRONTEND_URL
FRONTEND_URL=FRONTEND_URL_HERE
FRONTEND_URL=http://localhost:5000
# STORAGE
STORAGE_PROVIDER=STORAGE_PROVIDER_HERE # values s3 or azure, if s3 is selected, then the following AWS credentials are required. if azure is selected, then the following Azure credentials are required.
STORAGE_PROVIDER=s3 # values s3 or azure
# AWS
AWS_REGION="AWS_REGION_HERE"
AWS_ACCESS_KEY_ID="AWS_ACCESS_KEY_ID_HERE"
AWS_SECRET_ACCESS_KEY="AWS_SECRET_ACCESS_KEY_HERE"
AWS_BUCKET="AWS_BUCKET_HERE"
S3_URL="S3_URL_HERE"
AWS_REGION="your_aws_region"
AWS_ACCESS_KEY_ID="your_aws_access_key_id"
AWS_SECRET_ACCESS_KEY="your_aws_secret_access_key"
AWS_BUCKET="your_s3_bucket"
S3_URL="your_s3_url"
# STORAGE
AZURE_STORAGE_ACCOUNT_NAME="AZURE_STORAGE_ACCOUNT_NAME_HERE"
AZURE_STORAGE_CONTAINER="AZURE_STORAGE_CONTAINER_HERE"
AZURE_STORAGE_ACCOUNT_KEY="AZURE_STORAGE_ACCOUNT_KEY_HERE"
AZURE_STORAGE_URL="AZURE_STORAGE_URL_HERE"
# Azure Storage
AZURE_STORAGE_ACCOUNT_NAME="your_storage_account_name"
AZURE_STORAGE_CONTAINER="your_storage_container"
AZURE_STORAGE_ACCOUNT_KEY="your_storage_account_key"
AZURE_STORAGE_URL="your_storage_url"
# DIRECTPAY
DP_STAGE=DP_STAGE_HERE #DEV or
DP_URL=DP_URL_HERE
DP_MERCHANT_ID=DP_MERCHANT_ID_HERE
DP_SECRET_KEY=DP_SECRET_KEY_HERE
DP_API_KEY=DP_API_KEY_HERE
DP_STAGE=DEV
DP_URL=your_url
DP_MERCHANT_ID=your_merchant_id
DP_SECRET_KEY=your_secret_key
DP_API_KEY=your_api_key
CONTACT_US_EMAIL=CONTACT_US_EMAIL_HERE
CONTACT_US_EMAIL=support@example.com
GOOGLE_CAPTCHA_SECRET_KEY=GOOGLE_CAPTCHA_SECRET_KEY_HERE
GOOGLE_CAPTCHA_SECRET_KEY=your_captcha_secret_key
GOOGLE_CAPTCHA_PASS_SCORE=0.8
# Email Cronjobs
ENABLE_EMAIL_CRONJOBS=true

View File

@@ -1,5 +1,5 @@
# Use the official Node.js 18 image as a base
FROM node:18
# Use the official Node.js 20 image as a base
FROM node:20
# Create and set the working directory
WORKDIR /usr/src/app

View File

@@ -1,81 +1,96 @@
# Worklenz Backend
1. **Open your IDE:**
This is the Express.js backend for the Worklenz project management application.
Open the project directory in your preferred code editor or IDE like Visual Studio Code.
## Getting Started
2. **Configure Environment Variables:**
Follow these steps to set up the backend for development:
- Create a copy of the `.env.template` file and name it `.env`.
- Update the required fields in `.env` with the specific information.
1. **Configure Environment Variables:**
3. **Restore Database**
- Create a copy of the `.env.example` file and name it `.env`.
- Update the required fields in `.env` with your specific configuration.
2. **Set up Database:**
- Create a new database named `worklenz_db` on your local PostgreSQL server.
- Update the `DATABASE_NAME` and `PASSWORD` in the `database/6_user_permission.sql` with your DB credentials.
- Open a query console and execute the queries from the .sql files in the `database` directories, following the provided order.
- Update the database connection details in your `.env` file.
- Execute the SQL setup files in the correct order:
4. **Install Dependencies:**
```bash
# From your PostgreSQL client or command line
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
```
Alternatively, you can use the provided shell script:
```bash
# Make sure the script is executable
chmod +x database/00-init-db.sh
# Run the script (may need modifications for local execution)
./database/00-init-db.sh
```
3. **Install Dependencies:**
```bash
npm install
```
This command installs all the necessary libraries required to run the project.
5. **Run the Development Server:**
**a. Start the TypeScript compiler:**
Open a new terminal window and run the following command:
4. **Run the Development Server:**
```bash
grunt dev
npm run dev
```
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript.
This starts the development server with hot reloading enabled.
**b. Start the development server:**
5. **Build for Production:**
Open another separate terminal window and run the following command:
```bash
npm run build
```
This will compile the TypeScript code into JavaScript for production use.
6. **Start Production Server:**
```bash
npm start
```
This starts the development server allowing you to work on the project.
## API Documentation
6. **Run the Production Server:**
The API endpoints are organized into logical controllers and follow RESTful design principles. The main API routes are prefixed with `/api/v1`.
**a. Compile TypeScript to JavaScript:**
### Authentication
Open a new terminal window and run the following command:
Authentication is handled via JWT tokens. Protected routes require a valid token in the Authorization header.
### File Storage
The application supports both S3-compatible storage and Azure Blob Storage for file uploads. Configure your preferred storage option in the `.env` file.
## Development Guidelines
- Code should be written in TypeScript
- Follow the established patterns for controllers, services, and middlewares
- Add proper error handling for all API endpoints
- Write unit tests for critical functionality
- Document API endpoints with clear descriptions and examples
## Running Tests
```bash
grunt build
npm test
```
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript for production use.
## Docker Support
**b. Start the production server:**
Once the compilation is complete, run the following command in the same terminal window:
```bash
npm start
```
This starts the production server for your application.
### CLI
- Create controller: `$ node new controller Test`
- Create angular release: `$ node new release`
### Developement Rules
- Controllers should only generate/create using the CLI (`node new controller Projects`)
- Validations should only be done using a middleware placed under src/validators/ and used inside the routers (E.g., api-router.ts)
- Validators should only generate/create using the CLI (`node new vaidator projects-params`)
## Pull submodules
- git submodule update --init --recursive
The backend can be run in a Docker container. See the main project README for Docker setup instructions.

View File

@@ -0,0 +1,55 @@
#!/bin/bash
set -e
# This script controls the order of SQL file execution during database initialization
echo "Starting database initialization..."
# Check if we have SQL files in expected locations
if [ -f "/docker-entrypoint-initdb.d/sql/0_extensions.sql" ]; then
SQL_DIR="/docker-entrypoint-initdb.d/sql"
echo "Using SQL files from sql/ subdirectory"
elif [ -f "/docker-entrypoint-initdb.d/0_extensions.sql" ]; then
# First time setup - move files to subdirectory
echo "Moving SQL files to sql/ subdirectory..."
mkdir -p /docker-entrypoint-initdb.d/sql
# Move all SQL files (except this script) to the subdirectory
for f in /docker-entrypoint-initdb.d/*.sql; do
if [ -f "$f" ]; then
cp "$f" /docker-entrypoint-initdb.d/sql/
echo "Copied $f to sql/ subdirectory"
fi
done
SQL_DIR="/docker-entrypoint-initdb.d/sql"
else
echo "SQL files not found in expected locations!"
exit 1
fi
# Execute SQL files in the correct order
echo "Executing 0_extensions.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/0_extensions.sql"
echo "Executing 1_tables.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/1_tables.sql"
echo "Executing indexes.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/indexes.sql"
echo "Executing 4_functions.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/4_functions.sql"
echo "Executing triggers.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/triggers.sql"
echo "Executing 3_views.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/3_views.sql"
echo "Executing 2_dml.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/2_dml.sql"
echo "Executing 5_database_user.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/5_database_user.sql"
echo "Database initialization completed successfully"

View File

@@ -1 +1,36 @@
All database DDLs, DMLs and migrations relates to the application should be stored here as well.
# Worklenz Database
## Directory Structure
- `sql/` - Contains all SQL files needed for database initialization
- `migrations/` - Contains database migration scripts
- `00-init-db.sh` - Initialization script that executes SQL files in the correct order
## SQL File Execution Order
The database initialization files should be executed in the following order:
1. `sql/0_extensions.sql` - PostgreSQL extensions
2. `sql/1_tables.sql` - Table definitions and constraints
3. `sql/indexes.sql` - All database indexes
4. `sql/4_functions.sql` - Database functions
5. `sql/triggers.sql` - Database triggers
6. `sql/3_views.sql` - Database views
7. `sql/2_dml.sql` - Data Manipulation Language statements (inserts, updates)
8. `sql/5_database_user.sql` - Database user setup
## Docker-based Setup
In the Docker environment, we use a shell script called `00-init-db.sh` to control the SQL file execution order:
1. The shell script creates a `sql/` subdirectory if it doesn't exist
2. It copies all .sql files into this subdirectory
3. It executes the SQL files from the subdirectory in the correct order
This approach prevents the SQL files from being executed twice by Docker's automatic initialization mechanism, which would cause errors for objects that already exist.
## Manual Setup
If you're setting up the database manually, please follow the execution order listed above. Ensure your SQL files are in the `sql/` subdirectory before executing the script.

View File

@@ -0,0 +1,3 @@
-- Extensions
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE EXTENSION IF NOT EXISTS "unaccent";

View File

@@ -1,7 +1,3 @@
-- Extensions
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE EXTENSION IF NOT EXISTS "unaccent";
-- Domains
CREATE DOMAIN WL_HEX_COLOR AS TEXT CHECK (value ~* '^#([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$');
CREATE DOMAIN WL_EMAIL AS TEXT CHECK (value ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$');
@@ -18,7 +14,27 @@ CREATE TYPE SCHEDULE_TYPE AS ENUM ('daily', 'weekly', 'yearly', 'monthly', 'ever
CREATE TYPE LANGUAGE_TYPE AS ENUM ('en', 'es', 'pt');
-- START: Users
CREATE SEQUENCE IF NOT EXISTS users_user_no_seq START 1;
-- Utility and referenced tables
-- Create sessions table for connect-pg-simple session store
CREATE TABLE IF NOT EXISTS pg_sessions (
sid VARCHAR NOT NULL PRIMARY KEY,
sess JSON NOT NULL,
expire TIMESTAMP(6) NOT NULL
);
CREATE TABLE IF NOT EXISTS project_access_levels (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
key TEXT NOT NULL
);
ALTER TABLE project_access_levels
ADD CONSTRAINT project_access_levels_pk
PRIMARY KEY (id);
CREATE TABLE IF NOT EXISTS countries (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
code CHAR(2) NOT NULL,
@@ -40,7 +56,6 @@ ALTER TABLE permissions
ADD CONSTRAINT permissions_pk
PRIMARY KEY (id);
-- Tables that reference utility tables
CREATE TABLE IF NOT EXISTS archived_projects (
user_id UUID NOT NULL,
project_id UUID NOT NULL
@@ -77,7 +92,6 @@ ALTER TABLE clients
ADD CONSTRAINT clients_name_check
CHECK (CHAR_LENGTH(name) <= 60);
-- Remaining tables
CREATE TABLE IF NOT EXISTS cpt_phases (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
@@ -232,11 +246,6 @@ ALTER TABLE email_invitations
ADD CONSTRAINT email_invitations_pk
PRIMARY KEY (id);
CREATE TRIGGER email_invitations_email_lower
BEFORE INSERT OR UPDATE
ON email_invitations
EXECUTE PROCEDURE lower_email();
CREATE TABLE IF NOT EXISTS favorite_projects (
user_id UUID NOT NULL,
project_id UUID NOT NULL
@@ -260,6 +269,35 @@ ALTER TABLE job_titles
ADD CONSTRAINT job_titles_name_check
CHECK (CHAR_LENGTH(name) <= 55);
CREATE TABLE IF NOT EXISTS licensing_admin_users (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
username TEXT NOT NULL,
phone_no TEXT NOT NULL,
otp TEXT,
otp_expiry TIMESTAMP WITH TIME ZONE,
active BOOLEAN DEFAULT TRUE NOT NULL
);
ALTER TABLE licensing_admin_users
ADD CONSTRAINT licensing_admin_users_id_pk
PRIMARY KEY (id);
CREATE TABLE IF NOT EXISTS licensing_app_sumo_batches (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
created_by UUID NOT NULL
);
ALTER TABLE licensing_app_sumo_batches
ADD CONSTRAINT licensing_app_sumo_batches_pk
PRIMARY KEY (id);
ALTER TABLE licensing_app_sumo_batches
ADD CONSTRAINT licensing_app_sumo_batches_created_by_fk
FOREIGN KEY (created_by) REFERENCES licensing_admin_users;
CREATE TABLE IF NOT EXISTS licensing_coupon_codes (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
coupon_code TEXT NOT NULL,
@@ -283,11 +321,6 @@ ALTER TABLE licensing_coupon_codes
ADD CONSTRAINT licensing_coupon_codes_pk
PRIMARY KEY (id);
ALTER TABLE licensing_coupon_codes
ADD CONSTRAINT licensing_coupon_codes_app_sumo_batches__fk
FOREIGN KEY (batch_id) REFERENCES licensing_app_sumo_batches
ON DELETE CASCADE;
ALTER TABLE licensing_coupon_codes
ADD CONSTRAINT licensing_coupon_codes_created_by_fk
FOREIGN KEY (created_by) REFERENCES licensing_admin_users;
@@ -1466,33 +1499,6 @@ ALTER TABLE tasks
ADD CONSTRAINT tasks_total_minutes_check
CHECK ((total_minutes >= (0)::NUMERIC) AND (total_minutes <= (999999)::NUMERIC));
CREATE TRIGGER projects_tasks_counter_trigger
BEFORE INSERT
ON tasks
FOR EACH ROW
EXECUTE PROCEDURE update_project_tasks_counter_trigger_fn();
CREATE TRIGGER set_task_updated_at
BEFORE UPDATE
ON tasks
FOR EACH ROW
EXECUTE PROCEDURE set_task_updated_at_trigger_fn();
CREATE TRIGGER tasks_status_id_change
AFTER UPDATE
OF status_id
ON tasks
FOR EACH ROW
EXECUTE PROCEDURE task_status_change_trigger_fn();
CREATE TRIGGER tasks_task_subscriber_notify_done
BEFORE UPDATE
OF status_id
ON tasks
FOR EACH ROW
WHEN (old.status_id IS DISTINCT FROM new.status_id)
EXECUTE PROCEDURE tasks_task_subscriber_notify_done_trigger();
CREATE TABLE IF NOT EXISTS tasks_assignees (
task_id UUID NOT NULL,
project_member_id UUID NOT NULL,
@@ -1579,18 +1585,6 @@ ALTER TABLE team_members
ADD CONSTRAINT team_members_role_id_fk
FOREIGN KEY (role_id) REFERENCES roles;
CREATE TRIGGER insert_notification_settings
AFTER INSERT
ON team_members
FOR EACH ROW
EXECUTE PROCEDURE notification_settings_insert_trigger_fn();
CREATE TRIGGER remove_notification_settings
BEFORE DELETE
ON team_members
FOR EACH ROW
EXECUTE PROCEDURE notification_settings_delete_trigger_fn();
CREATE TABLE IF NOT EXISTS users (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
@@ -1640,18 +1634,10 @@ ALTER TABLE licensing_payment_details
ADD CONSTRAINT licensing_payment_details_users_id_fk
FOREIGN KEY (user_id) REFERENCES users;
ALTER TABLE licensing_user_payment_methods
ADD CONSTRAINT licensing_user_payment_methods_users_id_fk
FOREIGN KEY (user_id) REFERENCES users;
ALTER TABLE licensing_user_subscriptions
ADD CONSTRAINT licensing_user_subscriptions_users_id_fk
FOREIGN KEY (user_id) REFERENCES users;
ALTER TABLE licensing_user_subscriptions_log
ADD CONSTRAINT licensing_user_subscriptions_log_users_id_fk
FOREIGN KEY (user_id) REFERENCES users;
ALTER TABLE notification_settings
ADD CONSTRAINT notification_settings_user_id_fk
FOREIGN KEY (user_id) REFERENCES users
@@ -1751,11 +1737,6 @@ ALTER TABLE users
ADD CONSTRAINT users_name_check
CHECK (CHAR_LENGTH(name) <= 55);
CREATE TRIGGER users_email_lower
BEFORE INSERT OR UPDATE
ON users
EXECUTE PROCEDURE lower_email();
CREATE TABLE IF NOT EXISTS teams (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,

View File

@@ -388,14 +388,14 @@ SELECT sys_insert_project_access_levels();
SELECT sys_insert_task_status_categories();
SELECT sys_insert_project_statuses();
SELECT sys_insert_project_healths();
SELECT sys_insert_project_templates();
-- SELECT sys_insert_project_templates();
DROP FUNCTION sys_insert_task_priorities();
DROP FUNCTION sys_insert_project_access_levels();
DROP FUNCTION sys_insert_task_status_categories();
DROP FUNCTION sys_insert_project_statuses();
DROP FUNCTION sys_insert_project_healths();
DROP FUNCTION sys_insert_project_templates();
-- DROP FUNCTION sys_insert_project_templates();
INSERT INTO timezones (name, abbrev, utc_offset)
SELECT name, abbrev, utc_offset

View File

@@ -26,12 +26,25 @@ CREATE UNIQUE INDEX IF NOT EXISTS cpt_task_statuses_template_id_name_uindex
CREATE UNIQUE INDEX IF NOT EXISTS custom_project_templates_name_team_id_uindex
ON custom_project_templates (name, team_id);
-- Create index on expire field
CREATE INDEX IF NOT EXISTS idx_pg_sessions_expire
ON pg_sessions (expire);
CREATE UNIQUE INDEX IF NOT EXISTS job_titles_name_team_id_uindex
ON job_titles (name, team_id);
CREATE INDEX IF NOT EXISTS job_titles_team_id_index
ON job_titles (team_id);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_name_uindex
ON licensing_admin_users (name);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_phone_no_uindex
ON licensing_admin_users (phone_no);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_username_uindex
ON licensing_admin_users (username);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_coupon_codes_coupon_code_uindex
ON licensing_coupon_codes (coupon_code);
@@ -53,6 +66,12 @@ CREATE INDEX IF NOT EXISTS notification_settings_team_user_id_index
CREATE UNIQUE INDEX IF NOT EXISTS personal_todo_list_index_uindex
ON personal_todo_list (user_id, index);
CREATE UNIQUE INDEX IF NOT EXISTS project_access_levels_key_uindex
ON project_access_levels (key);
CREATE UNIQUE INDEX IF NOT EXISTS project_access_levels_name_uindex
ON project_access_levels (name);
CREATE UNIQUE INDEX IF NOT EXISTS project_categories_name_team_id_uindex
ON project_categories (name, team_id);

View File

@@ -113,6 +113,7 @@
"grunt-contrib-watch": "^1.1.0",
"grunt-shell": "^4.0.0",
"grunt-sync": "^0.8.2",
"highcharts": "^11.1.0",
"jest": "^28.1.3",
"jest-sonar-reporter": "^2.0.0",
"ncp": "^2.0.0",
@@ -10138,6 +10139,13 @@
"node": ">=14.0.0"
}
},
"node_modules/highcharts": {
"version": "11.4.8",
"resolved": "https://registry.npmjs.org/highcharts/-/highcharts-11.4.8.tgz",
"integrity": "sha512-5Tke9LuzZszC4osaFisxLIcw7xgNGz4Sy3Jc9pRMV+ydm6sYqsPYdU8ELOgpzGNrbrRNDRBtveoR5xS3SzneEA==",
"dev": true,
"license": "https://www.highcharts.com/license"
},
"node_modules/homedir-polyfill": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz",

View File

@@ -134,6 +134,7 @@
"grunt-contrib-watch": "^1.1.0",
"grunt-shell": "^4.0.0",
"grunt-sync": "^0.8.2",
"highcharts": "^11.1.0",
"jest": "^28.1.3",
"jest-sonar-reporter": "^2.0.0",
"ncp": "^2.0.0",

View File

@@ -54,15 +54,16 @@ app.use((_req: Request, res: Response, next: NextFunction) => {
const allowedOrigins = [
isProduction()
? [
`https://react.worklenz.com`,
`https://v2.worklenz.com`,
`https://dev.worklenz.com`
`http://localhost:5000`,
`http://127.0.0.1:5000`
]
: [
"http://localhost:3000",
"http://localhost:5173",
"http://127.0.0.1:5173",
"http://127.0.0.1:3000"
"http://127.0.0.1:3000",
"http://127.0.0.1:5000",
`http://localhost:5000`
]
].flat();
@@ -71,7 +72,7 @@ app.use(cors({
if (!origin || allowedOrigins.includes(origin)) {
callback(null, true);
} else {
console.log("Blocked origin:", origin);
console.log("Blocked origin:", origin, process.env.NODE_ENV);
callback(new Error("Not allowed by CORS"));
}
},

View File

@@ -118,10 +118,10 @@ export const DEFAULT_PAGE_SIZE = 20;
// S3 Credentials
export const REGION = process.env.AWS_REGION || "us-east-1";
export const BUCKET = process.env.AWS_BUCKET || "worklenz-bucket";
export const S3_URL = process.env.S3_URL || "http://minio:9000/worklenz-bucket";
export const S3_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID || "minioadmin";
export const S3_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY || "minioadmin";
export const BUCKET = process.env.AWS_BUCKET || "your-bucket-name";
export const S3_URL = process.env.S3_URL || "https://your-s3-url";
export const S3_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID || "";
export const S3_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY || "";
// Azure Blob Storage Credentials
export const STORAGE_PROVIDER = process.env.STORAGE_PROVIDER || "s3";
@@ -150,9 +150,9 @@ export const TEAM_MEMBER_TREE_MAP_COLOR_ALPHA = "40";
// LICENSING SERVER URLS
export const LOCAL_URL = "http://localhost:3001";
export const UAT_SERVER_URL = "https://uat.admin.worklenz.com";
export const DEV_SERVER_URL = "https://dev.admin.worklenz.com";
export const PRODUCTION_SERVER_URL = "https://admin.worklenz.com";
export const UAT_SERVER_URL = process.env.UAT_SERVER_URL || "https://your-uat-server-url";
export const DEV_SERVER_URL = process.env.DEV_SERVER_URL || "https://your-dev-server-url";
export const PRODUCTION_SERVER_URL = process.env.PRODUCTION_SERVER_URL || "https://your-production-server-url";
// *Sync with the client
export const PASSWORD_POLICY = "Minimum of 8 characters, with upper and lowercase and a number and a symbol.";

View File

@@ -5,13 +5,14 @@ VITE_APP_TITLE=Worklenz
VITE_APP_ENV=development
# Mixpanel
VITE_MIXPANEL_TOKEN=bb330b6bd25db4a6c988da89046f4b80
VITE_MIXPANEL_TOKEN=mixpanel-token
# Recaptcha
VITE_RECAPTCHA_SITE_KEY=6LeUWjYqAAAAAFhi9Z8KPeiix3RRjxoZtJhLJZXb
VITE_ENABLE_RECAPTCHA=false
VITE_RECAPTCHA_SITE_KEY=recaptcha-site-key
# Session ID
VITE_WORKLENZ_SESSION_ID=worklenz.sid
VITE_WORKLENZ_SESSION_ID=worklenz-session-id
# Google Login
VITE_ENABLE_GOOGLE_LOGIN=true
VITE_ENABLE_GOOGLE_LOGIN=false

View File

@@ -1,4 +1,4 @@
FROM node:18-alpine AS build
FROM node:22-alpine AS build
WORKDIR /app
@@ -9,7 +9,7 @@ RUN npm ci
COPY . .
RUN npm run build
FROM node:18-alpine AS production
FROM node:22-alpine AS production
WORKDIR /app

View File

@@ -1,6 +1,6 @@
# Worklenz - React Application
# Worklenz - React Frontend
Worklenz is a task management application built with React and bundled using [Vite](https://vitejs.dev/).
Worklenz is a project management application built with React, TypeScript, and Ant Design. The project is bundled using [Vite](https://vitejs.dev/).
## Table of Contents
- [Getting Started](#getting-started)
@@ -15,11 +15,11 @@ To get started with the project, follow these steps:
1. **Clone the repository**:
```bash
git clone https://github.com/Worklenz/worklenz-v2.git
git clone https://github.com/Worklenz/worklenz.git
```
2. **Navigate to the project directory**:
```bash
cd worklenz-v2
cd worklenz/worklenz-frontend
```
3. **Install dependencies**:
```bash
@@ -29,7 +29,7 @@ To get started with the project, follow these steps:
```bash
npm run dev
```
5. Open [http://localhost:3000](http://localhost:3000) in your browser to view the application.
5. Open [http://localhost:5000](http://localhost:5000) in your browser to view the application.
## Available Scripts
@@ -38,7 +38,7 @@ In the project directory, you can run:
### `npm run dev`
Runs the app in the development mode.\
Open [http://localhost:5173](http://localhost:5173) to view it in the browser.
Open [http://localhost:5000](http://localhost:5000) to view it in the browser.
The page will reload if you make edits.\
You will also see any lint errors in the console.
@@ -58,7 +58,22 @@ Open [http://localhost:4173](http://localhost:4173) to preview the build.
## Project Structure
The project structure is organized as follows:
The project is organized around a feature-based structure:
```
src/
├── components/ # Reusable UI components
├── hooks/ # Custom React hooks
├── lib/ # Feature-specific logic
├── pages/ # Route components
├── services/ # API services
├── shared/ # Shared utilities, constants, and types
├── store/ # Global state management
├── types/ # TypeScript type definitions
├── utils/ # Utility functions
├── App.tsx # Main application component
└── main.tsx # Application entry point
```
## Contributing
@@ -72,6 +87,9 @@ Contributions are welcome! If you'd like to contribute, please follow these step
## Learn More
To learn more about Vite, check out the [Vite documentation](https://vitejs.dev/guide/).
To learn more about the technologies used in this project:
To learn React, check out the [React documentation](https://reactjs.org/).
- [React Documentation](https://react.dev/)
- [TypeScript Documentation](https://www.typescriptlang.org/docs/)
- [Ant Design Documentation](https://ant.design/docs/react/introduce)
- [Vite Documentation](https://vitejs.dev/guide/)

View File

@@ -5,7 +5,7 @@
"scripts": {
"start": "vite",
"prebuild": "node scripts/copy-tinymce.js",
"build": "node --max-old-space-size=4096 node_modules/.bin/vite build",
"build": "vite build",
"dev-build": "vite build",
"serve": "vite preview",
"format": "prettier --write ."

View File

@@ -8,7 +8,7 @@ type EmptyListPlaceholderProps = {
};
const EmptyListPlaceholder = ({
imageSrc = 'https://app.worklenz.com/assets/images/empty-box.webp',
imageSrc = '/assets/images/empty-box.webp',
imageHeight = 60,
text,
}: EmptyListPlaceholderProps) => {

View File

@@ -24,6 +24,16 @@ import logger from '@/utils/errorLogger';
import alertService from '@/services/alerts/alertService';
import { WORKLENZ_REDIRECT_PROJ_KEY } from '@/shared/constants';
// Define the global grecaptcha type
declare global {
interface Window {
grecaptcha?: {
ready: (callback: () => void) => void;
execute: (siteKey: string, options: { action: string }) => Promise<string>;
};
}
}
const SignupPage = () => {
const [form] = Form.useForm();
const navigate = useNavigate();
@@ -58,6 +68,7 @@ const SignupPage = () => {
};
const enableGoogleLogin = import.meta.env.VITE_ENABLE_GOOGLE_LOGIN === 'true' || false;
const enableRecaptcha = import.meta.env.VITE_ENABLE_RECAPTCHA === 'true' && import.meta.env.VITE_RECAPTCHA_SITE_KEY && import.meta.env.VITE_RECAPTCHA_SITE_KEY !== 'recaptcha-site-key';
useEffect(() => {
trackMixpanelEvent(evt_signup_page_visit);
@@ -79,6 +90,14 @@ const SignupPage = () => {
}, [trackMixpanelEvent]);
useEffect(() => {
// Only load recaptcha script if recaptcha is enabled and site key is valid
if (enableRecaptcha && import.meta.env.VITE_RECAPTCHA_SITE_KEY) {
// Check if site key is not the placeholder value
if (import.meta.env.VITE_RECAPTCHA_SITE_KEY === 'recaptcha-site-key') {
console.warn('Using placeholder reCAPTCHA site key. Please set a valid key in your environment variables.');
return;
}
const script = document.createElement('script');
script.src = `https://www.google.com/recaptcha/api.js?render=${import.meta.env.VITE_RECAPTCHA_SITE_KEY}`;
script.async = true;
@@ -98,7 +117,8 @@ const SignupPage = () => {
}
}
};
}, []);
}
}, [enableRecaptcha]);
const getInvitationQueryParams = () => {
const params = [`team=${urlParams.teamId}`, `teamMember=${urlParams.teamMemberId}`];
@@ -109,20 +129,45 @@ const SignupPage = () => {
};
const getRecaptchaToken = async () => {
return new Promise<string>(resolve => {
window.grecaptcha?.ready(() => {
window.grecaptcha
?.execute(import.meta.env.VITE_RECAPTCHA_SITE_KEY, { action: 'signup' })
if (!enableRecaptcha) return '';
// Check if site key is valid
if (!import.meta.env.VITE_RECAPTCHA_SITE_KEY || import.meta.env.VITE_RECAPTCHA_SITE_KEY === 'recaptcha-site-key') {
console.warn('Invalid reCAPTCHA site key. Skipping reCAPTCHA verification.');
return 'skip-verification';
}
try {
return new Promise<string>((resolve, reject) => {
if (!window.grecaptcha) {
reject('reCAPTCHA not loaded');
return;
}
window.grecaptcha.ready(() => {
window.grecaptcha!
.execute(import.meta.env.VITE_RECAPTCHA_SITE_KEY, { action: 'signup' })
.then((token: string) => {
resolve(token);
})
.catch((error: any) => {
console.error('reCAPTCHA execution error:', error);
reject(error);
});
});
});
} catch (error) {
console.error('Error getting reCAPTCHA token:', error);
return '';
}
};
const onFinish = async (values: IUserSignUpRequest) => {
try {
setValidating(true);
if (enableRecaptcha) {
try {
const token = await getRecaptchaToken();
if (!token) {
@@ -131,12 +176,26 @@ const SignupPage = () => {
return;
}
const veriftToken = await authApiService.verifyRecaptchaToken(token);
// Skip verification if we're using the special token due to invalid site key
if (token !== 'skip-verification') {
const verifyToken = await authApiService.verifyRecaptchaToken(token);
if (!veriftToken.done) {
if (!verifyToken.done) {
logger.error('Failed to verify reCAPTCHA token');
return;
}
}
} catch (error) {
logger.error('reCAPTCHA error:', error);
// Continue with sign up even if reCAPTCHA fails in development
if (import.meta.env.DEV) {
console.warn('Continuing signup despite reCAPTCHA error in development mode');
} else {
alertService.error(t('reCAPTCHAVerificationError'), t('reCAPTCHAVerificationErrorMessage'));
return;
}
}
}
const body = {
name: values.name,