Update environment configuration, Docker setup, and frontend/backend dependencies

- Updated .env.example and .env files for backend and frontend with placeholder values.
- Enhanced .gitignore to include additional files and directories.
- Modified docker-compose.yml to change image names and improve service health checks.
- Updated README.md and SETUP_THE_PROJECT.md for clearer setup instructions.
- Added database initialization scripts and SQL files for structured database setup.
- Updated frontend Dockerfile to use Node.js 22 and adjusted package.json scripts.
- Improved error handling and logging in start scripts for better debugging.
- Added reCAPTCHA support in the signup page with conditional loading based on environment variables.
This commit is contained in:
chamikaJ
2025-04-18 17:10:56 +05:30
parent 8825b0410a
commit e42819ef64
34 changed files with 948 additions and 376 deletions

View File

@@ -2,66 +2,75 @@
NODE_ENV=development
PORT=3000
SESSION_NAME=worklenz.sid
SESSION_SECRET="your-session-secret"
COOKIE_SECRET="your-cookie-secret"
SESSION_SECRET="your_session_secret"
COOKIE_SECRET="your_cookie_secret"
# CORS
SOCKET_IO_CORS=http://localhost:4200
SOCKET_IO_CORS=http://localhost:5000
SERVER_CORS=*
# Database
DB_USER=postgres
DB_PASSWORD=password
DB_PASSWORD=your_db_password
DB_NAME=worklenz_db
DB_HOST=localhost
DB_PORT=5432
DB_MAX_CLIENTS=50
# Google Login
GOOGLE_CLIENT_ID="client_id"
GOOGLE_CLIENT_SECRET="client_secret"
GOOGLE_CLIENT_ID="your_google_client_id"
GOOGLE_CLIENT_SECRET="your_google_client_secret"
GOOGLE_CALLBACK_URL="http://localhost:3000/secure/google/verify"
LOGIN_FAILURE_REDIRECT="/"
LOGIN_SUCCESS_REDIRECT="http://localhost:4200/auth/authenticate"
LOGIN_SUCCESS_REDIRECT="http://localhost:5000/auth/authenticate"
# SENDGRID
SENDGRID_API_KEY="your-sendgrid-api-key"
EMAIL_NOTIFICATIONS=your-email@example.com
# CLI
ANGULAR_DIST_DIR="path/to/frontend/dist"
ANGULAR_SRC_DIR="path/to/frontend"
BACKEND_PUBLIC_DIR="path/to/backend/public"
BACKEND_VIEWS_DIR="path/to/backend/views"
COMMIT_BUILD_IMMEDIATELY=false
# HOST
HOSTNAME=localhost:4200
HOSTNAME=localhost:5000
SLACK_WEBHOOK=your-slack-webhook-url
USE_PG_NATIVE=true
# SLACK
SLACK_WEBHOOK=your_slack_webhook_url
USE_PG_NATIVE=false
# JWT SECRET
JWT_SECRET=your-jwt-secret
JWT_SECRET=your_jwt_secret
# FRONTEND_URL
FRONTEND_URL=https://example.com/
# AWS
AWS_REGION="us-west-2"
AWS_ACCESS_KEY_ID="YOUR_AWS_ACCESS_KEY_ID"
AWS_SECRET_ACCESS_KEY="YOUR_AWS_SECRET_ACCESS_KEY"
AWS_BUCKET="your-s3-bucket"
S3_URL="https://s3.your-region.amazonaws.com/your-bucket"
FRONTEND_URL=http://localhost:5000
# STORAGE
STORAGE_PROVIDER=s3 # s3 or azure
AZURE_STORAGE_ACCOUNT_NAME=yourstorageaccount
AZURE_STORAGE_CONTAINER=yourcontainer
AZURE_STORAGE_ACCOUNT_KEY=yourstorageaccountkey
AZURE_STORAGE_URL=https://yourstorageaccount.blob.core.windows.net
STORAGE_PROVIDER=s3 # values s3 or azure
# AWS
AWS_REGION="your_aws_region"
AWS_ACCESS_KEY_ID="your_aws_access_key_id"
AWS_SECRET_ACCESS_KEY="your_aws_secret_access_key"
AWS_BUCKET="your_s3_bucket"
S3_URL="your_s3_url"
# Azure Storage
AZURE_STORAGE_ACCOUNT_NAME="your_storage_account_name"
AZURE_STORAGE_CONTAINER="your_storage_container"
AZURE_STORAGE_ACCOUNT_KEY="your_storage_account_key"
AZURE_STORAGE_URL="your_storage_url"
# DIRECTPAY
DP_STAGE=DEV
DP_URL=https://dev.directpay.lk/v1/mpg/api/external/cardManagement
DP_MERCHANT_ID=YOUR_MERCHANT_ID
DP_SECRET_KEY=YOUR_SECRET_KEY
DP_API_KEY=YOUR_API_KEY
DP_URL=your_url
DP_MERCHANT_ID=your_merchant_id
DP_SECRET_KEY=your_secret_key
DP_API_KEY=your_api_key
CONTACT_US_EMAIL=support@example.com
GOOGLE_CAPTCHA_SECRET_KEY=YOUR_SECRET_KEY
GOOGLE_CAPTCHA_PASS_SCORE=0.8
GOOGLE_CAPTCHA_SECRET_KEY=your_captcha_secret_key
GOOGLE_CAPTCHA_PASS_SCORE=0.8
# Email Cronjobs
ENABLE_EMAIL_CRONJOBS=true

View File

@@ -2,74 +2,75 @@
NODE_ENV=development
PORT=3000
SESSION_NAME=worklenz.sid
SESSION_SECRET="YOUR_SESSION_SECRET_HERE"
COOKIE_SECRET="YOUR_COOKIE_SECRET_HERE"
SESSION_SECRET="your_session_secret"
COOKIE_SECRET="your_cookie_secret"
# CORS
SOCKET_IO_CORS=http://localhost:4200
SOCKET_IO_CORS=http://localhost:5000
SERVER_CORS=*
# Database
DB_USER=DATABASE_USER_HERE # default : worklenz_backend (update "user-permission.sql" if needed)
DB_PASSWORD=DATABASE_PASSWORD_HERE
DB_NAME=DATABASE_NAME_HERE # default : worklenz_db
DB_HOST=DATABASE_HOST_HERE # default : localhost
DB_PORT=DATABASE_PORT_HERE # default : 5432
DB_USER=postgres
DB_PASSWORD=your_db_password
DB_NAME=worklenz_db
DB_HOST=localhost
DB_PORT=5432
DB_MAX_CLIENTS=50
# Google Login
GOOGLE_CLIENT_ID="GOOGLE_CLIENT_ID_HERE"
GOOGLE_CLIENT_SECRET="GOOGLE_CLIENT_SECRET_HERE"
GOOGLE_CLIENT_ID="your_google_client_id"
GOOGLE_CLIENT_SECRET="your_google_client_secret"
GOOGLE_CALLBACK_URL="http://localhost:3000/secure/google/verify"
LOGIN_FAILURE_REDIRECT="/"
LOGIN_SUCCESS_REDIRECT="http://localhost:4200/auth/authenticate"
LOGIN_SUCCESS_REDIRECT="http://localhost:5000/auth/authenticate"
# CLI
ANGULAR_DIST_DIR="/path/worklenz_frontend/dist/worklenz"
ANGULAR_SRC_DIR="/path/worklenz_frontend"
BACKEND_PUBLIC_DIR="/path/worklenz_backend/src/public"
BACKEND_VIEWS_DIR="/path/worklenz_backend/src/views/admin"
COMMIT_BUILD_IMMEDIATELY=true
ANGULAR_DIST_DIR="path/to/frontend/dist"
ANGULAR_SRC_DIR="path/to/frontend"
BACKEND_PUBLIC_DIR="path/to/backend/public"
BACKEND_VIEWS_DIR="path/to/backend/views"
COMMIT_BUILD_IMMEDIATELY=false
# HOST
HOSTNAME=localhost:4200
HOSTNAME=localhost:5000
# SLACK
SLACK_WEBHOOK=SLACK_WEBHOOK_HERE
SLACK_WEBHOOK=your_slack_webhook_url
USE_PG_NATIVE=false
# JWT SECRET
JWT_SECRET=JWT_SECRET_HERE
JWT_SECRET=your_jwt_secret
# FRONTEND_URL
FRONTEND_URL=FRONTEND_URL_HERE
FRONTEND_URL=http://localhost:5000
# STORAGE
STORAGE_PROVIDER=STORAGE_PROVIDER_HERE # values s3 or azure, if s3 is selected, then the following AWS credentials are required. if azure is selected, then the following Azure credentials are required.
STORAGE_PROVIDER=s3 # values s3 or azure
# AWS
AWS_REGION="AWS_REGION_HERE"
AWS_ACCESS_KEY_ID="AWS_ACCESS_KEY_ID_HERE"
AWS_SECRET_ACCESS_KEY="AWS_SECRET_ACCESS_KEY_HERE"
AWS_BUCKET="AWS_BUCKET_HERE"
S3_URL="S3_URL_HERE"
AWS_REGION="your_aws_region"
AWS_ACCESS_KEY_ID="your_aws_access_key_id"
AWS_SECRET_ACCESS_KEY="your_aws_secret_access_key"
AWS_BUCKET="your_s3_bucket"
S3_URL="your_s3_url"
# STORAGE
AZURE_STORAGE_ACCOUNT_NAME="AZURE_STORAGE_ACCOUNT_NAME_HERE"
AZURE_STORAGE_CONTAINER="AZURE_STORAGE_CONTAINER_HERE"
AZURE_STORAGE_ACCOUNT_KEY="AZURE_STORAGE_ACCOUNT_KEY_HERE"
AZURE_STORAGE_URL="AZURE_STORAGE_URL_HERE"
# Azure Storage
AZURE_STORAGE_ACCOUNT_NAME="your_storage_account_name"
AZURE_STORAGE_CONTAINER="your_storage_container"
AZURE_STORAGE_ACCOUNT_KEY="your_storage_account_key"
AZURE_STORAGE_URL="your_storage_url"
# DIRECTPAY
DP_STAGE=DP_STAGE_HERE #DEV or
DP_URL=DP_URL_HERE
DP_MERCHANT_ID=DP_MERCHANT_ID_HERE
DP_SECRET_KEY=DP_SECRET_KEY_HERE
DP_API_KEY=DP_API_KEY_HERE
DP_STAGE=DEV
DP_URL=your_url
DP_MERCHANT_ID=your_merchant_id
DP_SECRET_KEY=your_secret_key
DP_API_KEY=your_api_key
CONTACT_US_EMAIL=CONTACT_US_EMAIL_HERE
CONTACT_US_EMAIL=support@example.com
GOOGLE_CAPTCHA_SECRET_KEY=GOOGLE_CAPTCHA_SECRET_KEY_HERE
GOOGLE_CAPTCHA_SECRET_KEY=your_captcha_secret_key
GOOGLE_CAPTCHA_PASS_SCORE=0.8
# Email Cronjobs
ENABLE_EMAIL_CRONJOBS=true

View File

@@ -1,5 +1,5 @@
# Use the official Node.js 18 image as a base
FROM node:18
# Use the official Node.js 20 image as a base
FROM node:20
# Create and set the working directory
WORKDIR /usr/src/app

View File

@@ -1,81 +1,96 @@
# Worklenz Backend
1. **Open your IDE:**
This is the Express.js backend for the Worklenz project management application.
Open the project directory in your preferred code editor or IDE like Visual Studio Code.
## Getting Started
2. **Configure Environment Variables:**
Follow these steps to set up the backend for development:
- Create a copy of the `.env.template` file and name it `.env`.
- Update the required fields in `.env` with the specific information.
1. **Configure Environment Variables:**
3. **Restore Database**
- Create a new database named `worklenz_db` on your local PostgreSQL server.
- Update the `DATABASE_NAME` and `PASSWORD` in the `database/6_user_permission.sql` with your DB credentials.
- Open a query console and execute the queries from the .sql files in the `database` directories, following the provided order.
- Create a copy of the `.env.example` file and name it `.env`.
- Update the required fields in `.env` with your specific configuration.
4. **Install Dependencies:**
2. **Set up Database:**
- Create a new database named `worklenz_db` on your local PostgreSQL server.
- Update the database connection details in your `.env` file.
- Execute the SQL setup files in the correct order:
```bash
# From your PostgreSQL client or command line
psql -U your_username -d worklenz_db -f database/sql/0_extensions.sql
psql -U your_username -d worklenz_db -f database/sql/1_tables.sql
psql -U your_username -d worklenz_db -f database/sql/indexes.sql
psql -U your_username -d worklenz_db -f database/sql/4_functions.sql
psql -U your_username -d worklenz_db -f database/sql/triggers.sql
psql -U your_username -d worklenz_db -f database/sql/3_views.sql
psql -U your_username -d worklenz_db -f database/sql/2_dml.sql
psql -U your_username -d worklenz_db -f database/sql/5_database_user.sql
```
Alternatively, you can use the provided shell script:
```bash
# Make sure the script is executable
chmod +x database/00-init-db.sh
# Run the script (may need modifications for local execution)
./database/00-init-db.sh
```
3. **Install Dependencies:**
```bash
npm install
```
This command installs all the necessary libraries required to run the project.
4. **Run the Development Server:**
5. **Run the Development Server:**
```bash
npm run dev
```
**a. Start the TypeScript compiler:**
This starts the development server with hot reloading enabled.
Open a new terminal window and run the following command:
5. **Build for Production:**
```bash
grunt dev
```
```bash
npm run build
```
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript.
This will compile the TypeScript code into JavaScript for production use.
**b. Start the development server:**
6. **Start Production Server:**
Open another separate terminal window and run the following command:
```bash
npm start
```
```bash
npm start
```
## API Documentation
This starts the development server allowing you to work on the project.
The API endpoints are organized into logical controllers and follow RESTful design principles. The main API routes are prefixed with `/api/v1`.
6. **Run the Production Server:**
### Authentication
**a. Compile TypeScript to JavaScript:**
Authentication is handled via JWT tokens. Protected routes require a valid token in the Authorization header.
Open a new terminal window and run the following command:
### File Storage
```bash
grunt build
```
The application supports both S3-compatible storage and Azure Blob Storage for file uploads. Configure your preferred storage option in the `.env` file.
This starts the `grunt` task runner, which compiles TypeScript code into JavaScript for production use.
## Development Guidelines
**b. Start the production server:**
- Code should be written in TypeScript
- Follow the established patterns for controllers, services, and middlewares
- Add proper error handling for all API endpoints
- Write unit tests for critical functionality
- Document API endpoints with clear descriptions and examples
Once the compilation is complete, run the following command in the same terminal window:
## Running Tests
```bash
npm start
```
```bash
npm test
```
This starts the production server for your application.
## Docker Support
### CLI
- Create controller: `$ node new controller Test`
- Create angular release: `$ node new release`
### Developement Rules
- Controllers should only generate/create using the CLI (`node new controller Projects`)
- Validations should only be done using a middleware placed under src/validators/ and used inside the routers (E.g., api-router.ts)
- Validators should only generate/create using the CLI (`node new vaidator projects-params`)
## Pull submodules
- git submodule update --init --recursive
The backend can be run in a Docker container. See the main project README for Docker setup instructions.

View File

@@ -0,0 +1,55 @@
#!/bin/bash
set -e
# This script controls the order of SQL file execution during database initialization
echo "Starting database initialization..."
# Check if we have SQL files in expected locations
if [ -f "/docker-entrypoint-initdb.d/sql/0_extensions.sql" ]; then
SQL_DIR="/docker-entrypoint-initdb.d/sql"
echo "Using SQL files from sql/ subdirectory"
elif [ -f "/docker-entrypoint-initdb.d/0_extensions.sql" ]; then
# First time setup - move files to subdirectory
echo "Moving SQL files to sql/ subdirectory..."
mkdir -p /docker-entrypoint-initdb.d/sql
# Move all SQL files (except this script) to the subdirectory
for f in /docker-entrypoint-initdb.d/*.sql; do
if [ -f "$f" ]; then
cp "$f" /docker-entrypoint-initdb.d/sql/
echo "Copied $f to sql/ subdirectory"
fi
done
SQL_DIR="/docker-entrypoint-initdb.d/sql"
else
echo "SQL files not found in expected locations!"
exit 1
fi
# Execute SQL files in the correct order
echo "Executing 0_extensions.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/0_extensions.sql"
echo "Executing 1_tables.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/1_tables.sql"
echo "Executing indexes.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/indexes.sql"
echo "Executing 4_functions.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/4_functions.sql"
echo "Executing triggers.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/triggers.sql"
echo "Executing 3_views.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/3_views.sql"
echo "Executing 2_dml.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/2_dml.sql"
echo "Executing 5_database_user.sql..."
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -f "$SQL_DIR/5_database_user.sql"
echo "Database initialization completed successfully"

View File

@@ -1 +1,36 @@
All database DDLs, DMLs and migrations relates to the application should be stored here as well.
# Worklenz Database
## Directory Structure
- `sql/` - Contains all SQL files needed for database initialization
- `migrations/` - Contains database migration scripts
- `00-init-db.sh` - Initialization script that executes SQL files in the correct order
## SQL File Execution Order
The database initialization files should be executed in the following order:
1. `sql/0_extensions.sql` - PostgreSQL extensions
2. `sql/1_tables.sql` - Table definitions and constraints
3. `sql/indexes.sql` - All database indexes
4. `sql/4_functions.sql` - Database functions
5. `sql/triggers.sql` - Database triggers
6. `sql/3_views.sql` - Database views
7. `sql/2_dml.sql` - Data Manipulation Language statements (inserts, updates)
8. `sql/5_database_user.sql` - Database user setup
## Docker-based Setup
In the Docker environment, we use a shell script called `00-init-db.sh` to control the SQL file execution order:
1. The shell script creates a `sql/` subdirectory if it doesn't exist
2. It copies all .sql files into this subdirectory
3. It executes the SQL files from the subdirectory in the correct order
This approach prevents the SQL files from being executed twice by Docker's automatic initialization mechanism, which would cause errors for objects that already exist.
## Manual Setup
If you're setting up the database manually, please follow the execution order listed above. Ensure your SQL files are in the `sql/` subdirectory before executing the script.

View File

@@ -0,0 +1,3 @@
-- Extensions
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE EXTENSION IF NOT EXISTS "unaccent";

View File

@@ -1,7 +1,3 @@
-- Extensions
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE EXTENSION IF NOT EXISTS "unaccent";
-- Domains
CREATE DOMAIN WL_HEX_COLOR AS TEXT CHECK (value ~* '^#([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$');
CREATE DOMAIN WL_EMAIL AS TEXT CHECK (value ~* '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Za-z]{2,}$');
@@ -18,7 +14,27 @@ CREATE TYPE SCHEDULE_TYPE AS ENUM ('daily', 'weekly', 'yearly', 'monthly', 'ever
CREATE TYPE LANGUAGE_TYPE AS ENUM ('en', 'es', 'pt');
-- START: Users
CREATE SEQUENCE IF NOT EXISTS users_user_no_seq START 1;
-- Utility and referenced tables
-- Create sessions table for connect-pg-simple session store
CREATE TABLE IF NOT EXISTS pg_sessions (
sid VARCHAR NOT NULL PRIMARY KEY,
sess JSON NOT NULL,
expire TIMESTAMP(6) NOT NULL
);
CREATE TABLE IF NOT EXISTS project_access_levels (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
key TEXT NOT NULL
);
ALTER TABLE project_access_levels
ADD CONSTRAINT project_access_levels_pk
PRIMARY KEY (id);
CREATE TABLE IF NOT EXISTS countries (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
code CHAR(2) NOT NULL,
@@ -40,7 +56,6 @@ ALTER TABLE permissions
ADD CONSTRAINT permissions_pk
PRIMARY KEY (id);
-- Tables that reference utility tables
CREATE TABLE IF NOT EXISTS archived_projects (
user_id UUID NOT NULL,
project_id UUID NOT NULL
@@ -77,7 +92,6 @@ ALTER TABLE clients
ADD CONSTRAINT clients_name_check
CHECK (CHAR_LENGTH(name) <= 60);
-- Remaining tables
CREATE TABLE IF NOT EXISTS cpt_phases (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
@@ -232,11 +246,6 @@ ALTER TABLE email_invitations
ADD CONSTRAINT email_invitations_pk
PRIMARY KEY (id);
CREATE TRIGGER email_invitations_email_lower
BEFORE INSERT OR UPDATE
ON email_invitations
EXECUTE PROCEDURE lower_email();
CREATE TABLE IF NOT EXISTS favorite_projects (
user_id UUID NOT NULL,
project_id UUID NOT NULL
@@ -260,6 +269,35 @@ ALTER TABLE job_titles
ADD CONSTRAINT job_titles_name_check
CHECK (CHAR_LENGTH(name) <= 55);
CREATE TABLE IF NOT EXISTS licensing_admin_users (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
username TEXT NOT NULL,
phone_no TEXT NOT NULL,
otp TEXT,
otp_expiry TIMESTAMP WITH TIME ZONE,
active BOOLEAN DEFAULT TRUE NOT NULL
);
ALTER TABLE licensing_admin_users
ADD CONSTRAINT licensing_admin_users_id_pk
PRIMARY KEY (id);
CREATE TABLE IF NOT EXISTS licensing_app_sumo_batches (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
created_at TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP,
created_by UUID NOT NULL
);
ALTER TABLE licensing_app_sumo_batches
ADD CONSTRAINT licensing_app_sumo_batches_pk
PRIMARY KEY (id);
ALTER TABLE licensing_app_sumo_batches
ADD CONSTRAINT licensing_app_sumo_batches_created_by_fk
FOREIGN KEY (created_by) REFERENCES licensing_admin_users;
CREATE TABLE IF NOT EXISTS licensing_coupon_codes (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
coupon_code TEXT NOT NULL,
@@ -283,11 +321,6 @@ ALTER TABLE licensing_coupon_codes
ADD CONSTRAINT licensing_coupon_codes_pk
PRIMARY KEY (id);
ALTER TABLE licensing_coupon_codes
ADD CONSTRAINT licensing_coupon_codes_app_sumo_batches__fk
FOREIGN KEY (batch_id) REFERENCES licensing_app_sumo_batches
ON DELETE CASCADE;
ALTER TABLE licensing_coupon_codes
ADD CONSTRAINT licensing_coupon_codes_created_by_fk
FOREIGN KEY (created_by) REFERENCES licensing_admin_users;
@@ -1466,33 +1499,6 @@ ALTER TABLE tasks
ADD CONSTRAINT tasks_total_minutes_check
CHECK ((total_minutes >= (0)::NUMERIC) AND (total_minutes <= (999999)::NUMERIC));
CREATE TRIGGER projects_tasks_counter_trigger
BEFORE INSERT
ON tasks
FOR EACH ROW
EXECUTE PROCEDURE update_project_tasks_counter_trigger_fn();
CREATE TRIGGER set_task_updated_at
BEFORE UPDATE
ON tasks
FOR EACH ROW
EXECUTE PROCEDURE set_task_updated_at_trigger_fn();
CREATE TRIGGER tasks_status_id_change
AFTER UPDATE
OF status_id
ON tasks
FOR EACH ROW
EXECUTE PROCEDURE task_status_change_trigger_fn();
CREATE TRIGGER tasks_task_subscriber_notify_done
BEFORE UPDATE
OF status_id
ON tasks
FOR EACH ROW
WHEN (old.status_id IS DISTINCT FROM new.status_id)
EXECUTE PROCEDURE tasks_task_subscriber_notify_done_trigger();
CREATE TABLE IF NOT EXISTS tasks_assignees (
task_id UUID NOT NULL,
project_member_id UUID NOT NULL,
@@ -1579,18 +1585,6 @@ ALTER TABLE team_members
ADD CONSTRAINT team_members_role_id_fk
FOREIGN KEY (role_id) REFERENCES roles;
CREATE TRIGGER insert_notification_settings
AFTER INSERT
ON team_members
FOR EACH ROW
EXECUTE PROCEDURE notification_settings_insert_trigger_fn();
CREATE TRIGGER remove_notification_settings
BEFORE DELETE
ON team_members
FOR EACH ROW
EXECUTE PROCEDURE notification_settings_delete_trigger_fn();
CREATE TABLE IF NOT EXISTS users (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,
@@ -1640,18 +1634,10 @@ ALTER TABLE licensing_payment_details
ADD CONSTRAINT licensing_payment_details_users_id_fk
FOREIGN KEY (user_id) REFERENCES users;
ALTER TABLE licensing_user_payment_methods
ADD CONSTRAINT licensing_user_payment_methods_users_id_fk
FOREIGN KEY (user_id) REFERENCES users;
ALTER TABLE licensing_user_subscriptions
ADD CONSTRAINT licensing_user_subscriptions_users_id_fk
FOREIGN KEY (user_id) REFERENCES users;
ALTER TABLE licensing_user_subscriptions_log
ADD CONSTRAINT licensing_user_subscriptions_log_users_id_fk
FOREIGN KEY (user_id) REFERENCES users;
ALTER TABLE notification_settings
ADD CONSTRAINT notification_settings_user_id_fk
FOREIGN KEY (user_id) REFERENCES users
@@ -1751,11 +1737,6 @@ ALTER TABLE users
ADD CONSTRAINT users_name_check
CHECK (CHAR_LENGTH(name) <= 55);
CREATE TRIGGER users_email_lower
BEFORE INSERT OR UPDATE
ON users
EXECUTE PROCEDURE lower_email();
CREATE TABLE IF NOT EXISTS teams (
id UUID DEFAULT uuid_generate_v4() NOT NULL,
name TEXT NOT NULL,

View File

@@ -388,14 +388,14 @@ SELECT sys_insert_project_access_levels();
SELECT sys_insert_task_status_categories();
SELECT sys_insert_project_statuses();
SELECT sys_insert_project_healths();
SELECT sys_insert_project_templates();
-- SELECT sys_insert_project_templates();
DROP FUNCTION sys_insert_task_priorities();
DROP FUNCTION sys_insert_project_access_levels();
DROP FUNCTION sys_insert_task_status_categories();
DROP FUNCTION sys_insert_project_statuses();
DROP FUNCTION sys_insert_project_healths();
DROP FUNCTION sys_insert_project_templates();
-- DROP FUNCTION sys_insert_project_templates();
INSERT INTO timezones (name, abbrev, utc_offset)
SELECT name, abbrev, utc_offset

View File

@@ -26,12 +26,25 @@ CREATE UNIQUE INDEX IF NOT EXISTS cpt_task_statuses_template_id_name_uindex
CREATE UNIQUE INDEX IF NOT EXISTS custom_project_templates_name_team_id_uindex
ON custom_project_templates (name, team_id);
-- Create index on expire field
CREATE INDEX IF NOT EXISTS idx_pg_sessions_expire
ON pg_sessions (expire);
CREATE UNIQUE INDEX IF NOT EXISTS job_titles_name_team_id_uindex
ON job_titles (name, team_id);
CREATE INDEX IF NOT EXISTS job_titles_team_id_index
ON job_titles (team_id);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_name_uindex
ON licensing_admin_users (name);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_phone_no_uindex
ON licensing_admin_users (phone_no);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_admin_users_username_uindex
ON licensing_admin_users (username);
CREATE UNIQUE INDEX IF NOT EXISTS licensing_coupon_codes_coupon_code_uindex
ON licensing_coupon_codes (coupon_code);
@@ -53,6 +66,12 @@ CREATE INDEX IF NOT EXISTS notification_settings_team_user_id_index
CREATE UNIQUE INDEX IF NOT EXISTS personal_todo_list_index_uindex
ON personal_todo_list (user_id, index);
CREATE UNIQUE INDEX IF NOT EXISTS project_access_levels_key_uindex
ON project_access_levels (key);
CREATE UNIQUE INDEX IF NOT EXISTS project_access_levels_name_uindex
ON project_access_levels (name);
CREATE UNIQUE INDEX IF NOT EXISTS project_categories_name_team_id_uindex
ON project_categories (name, team_id);

View File

@@ -113,6 +113,7 @@
"grunt-contrib-watch": "^1.1.0",
"grunt-shell": "^4.0.0",
"grunt-sync": "^0.8.2",
"highcharts": "^11.1.0",
"jest": "^28.1.3",
"jest-sonar-reporter": "^2.0.0",
"ncp": "^2.0.0",
@@ -10138,6 +10139,13 @@
"node": ">=14.0.0"
}
},
"node_modules/highcharts": {
"version": "11.4.8",
"resolved": "https://registry.npmjs.org/highcharts/-/highcharts-11.4.8.tgz",
"integrity": "sha512-5Tke9LuzZszC4osaFisxLIcw7xgNGz4Sy3Jc9pRMV+ydm6sYqsPYdU8ELOgpzGNrbrRNDRBtveoR5xS3SzneEA==",
"dev": true,
"license": "https://www.highcharts.com/license"
},
"node_modules/homedir-polyfill": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz",

View File

@@ -134,6 +134,7 @@
"grunt-contrib-watch": "^1.1.0",
"grunt-shell": "^4.0.0",
"grunt-sync": "^0.8.2",
"highcharts": "^11.1.0",
"jest": "^28.1.3",
"jest-sonar-reporter": "^2.0.0",
"ncp": "^2.0.0",

View File

@@ -54,15 +54,16 @@ app.use((_req: Request, res: Response, next: NextFunction) => {
const allowedOrigins = [
isProduction()
? [
`https://react.worklenz.com`,
`https://v2.worklenz.com`,
`https://dev.worklenz.com`
`http://localhost:5000`,
`http://127.0.0.1:5000`
]
: [
"http://localhost:3000",
"http://localhost:5173",
"http://127.0.0.1:5173",
"http://127.0.0.1:3000"
"http://127.0.0.1:3000",
"http://127.0.0.1:5000",
`http://localhost:5000`
]
].flat();
@@ -71,7 +72,7 @@ app.use(cors({
if (!origin || allowedOrigins.includes(origin)) {
callback(null, true);
} else {
console.log("Blocked origin:", origin);
console.log("Blocked origin:", origin, process.env.NODE_ENV);
callback(new Error("Not allowed by CORS"));
}
},

View File

@@ -118,10 +118,10 @@ export const DEFAULT_PAGE_SIZE = 20;
// S3 Credentials
export const REGION = process.env.AWS_REGION || "us-east-1";
export const BUCKET = process.env.AWS_BUCKET || "worklenz-bucket";
export const S3_URL = process.env.S3_URL || "http://minio:9000/worklenz-bucket";
export const S3_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID || "minioadmin";
export const S3_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY || "minioadmin";
export const BUCKET = process.env.AWS_BUCKET || "your-bucket-name";
export const S3_URL = process.env.S3_URL || "https://your-s3-url";
export const S3_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID || "";
export const S3_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY || "";
// Azure Blob Storage Credentials
export const STORAGE_PROVIDER = process.env.STORAGE_PROVIDER || "s3";
@@ -150,9 +150,9 @@ export const TEAM_MEMBER_TREE_MAP_COLOR_ALPHA = "40";
// LICENSING SERVER URLS
export const LOCAL_URL = "http://localhost:3001";
export const UAT_SERVER_URL = "https://uat.admin.worklenz.com";
export const DEV_SERVER_URL = "https://dev.admin.worklenz.com";
export const PRODUCTION_SERVER_URL = "https://admin.worklenz.com";
export const UAT_SERVER_URL = process.env.UAT_SERVER_URL || "https://your-uat-server-url";
export const DEV_SERVER_URL = process.env.DEV_SERVER_URL || "https://your-dev-server-url";
export const PRODUCTION_SERVER_URL = process.env.PRODUCTION_SERVER_URL || "https://your-production-server-url";
// *Sync with the client
export const PASSWORD_POLICY = "Minimum of 8 characters, with upper and lowercase and a number and a symbol.";