Merge branch 'stress-test-socket' into dev-it3-it4-PFEA2024

This commit is contained in:
MathieuSevignyLavallee 2024-12-08 21:45:41 -05:00
commit e7eede36be
35 changed files with 2957 additions and 62 deletions

4
.gitignore vendored
View file

@ -130,4 +130,6 @@ dist
.pnp.* .pnp.*
db-backup/ db-backup/
.venv .venv
deployments
/test/stressTest/output

16
.vscode/launch.json vendored
View file

@ -20,6 +20,20 @@
"name": "Debug frontend", "name": "Debug frontend",
"url": "http://localhost:5173", "url": "http://localhost:5173",
"webRoot": "${workspaceFolder}/client/" "webRoot": "${workspaceFolder}/client/"
} },
{
"name": "Docker: Attach to Node",
"type": "node",
"request": "attach",
"restart": true,
"port": 9229,
"address": "localhost",
"localRoot": "${workspaceFolder}",
"remoteRoot": "/app",
"protocol": "inspector",
"skipFiles": [
"<node_internals>/**"
]
}
] ]
} }

View file

@ -12,6 +12,10 @@ RUN npm install
RUN npm run build RUN npm run build
EXPOSE 5173 ENV PORT=5173
EXPOSE ${PORT}
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
CMD curl -f http://localhost:${PORT} || exit 1
CMD [ "npm", "run", "preview" ] CMD [ "npm", "run", "preview" ]

74
create-branch-image.bat Normal file
View file

@ -0,0 +1,74 @@
@echo off
setlocal EnableDelayedExpansion
:: Check if gh is installed
where gh >nul 2>&1
if %errorlevel% neq 0 (
echo GitHub CLI not found. Installing...
winget install --id GitHub.cli
if %errorlevel% neq 0 (
echo Failed to install GitHub CLI. Exiting...
exit /b 1
)
echo GitHub CLI installed successfully.
)
:: Check if user is authenticated
gh auth status >nul 2>&1
if %errorlevel% neq 0 (
echo GitHub CLI not authenticated. Please authenticate...
gh auth login
if %errorlevel% neq 0 (
echo Failed to authenticate. Exiting...
exit /b 1
)
echo Authentication successful.
)
:: Get the current branch name
for /f "tokens=*" %%i in ('git rev-parse --abbrev-ref HEAD') do set BRANCH_NAME=%%i
:: Run the GitHub workflow with the current branch name
echo Running GitHub workflow with branch %BRANCH_NAME%...
gh workflow run 119194149 --ref %BRANCH_NAME%
:: Wait and validate workflow launch
set /a attempts=0
set /a max_attempts=12
echo Waiting for workflow to start...
:wait_for_workflow
timeout /t 15 >nul
set /a attempts+=1
:: Get recent workflow run matching our criteria with in_progress status
for /f "tokens=*" %%i in ('gh run list --branch %BRANCH_NAME% --status in_progress --limit 1 --json databaseId --jq ".[0].databaseId"') do set WORKFLOW_RUN_ID=%%i
if "%WORKFLOW_RUN_ID%"=="" (
if !attempts! lss !max_attempts! (
echo Attempt !attempts! of !max_attempts!: No running workflow found yet...
goto wait_for_workflow
) else (
echo Timeout waiting for workflow to start running.
exit /b 1
)
)
echo Found running workflow ID: %WORKFLOW_RUN_ID%
:monitor_progress
cls
echo Workflow Progress:
echo ----------------
gh run view %WORKFLOW_RUN_ID% --json jobs --jq ".jobs[] | \"Job: \" + .name + \" - Status: \" + .status + if .conclusion != null then \" (\" + .conclusion + \")\" else \"\" end"
echo.
:: Check if workflow is still running
for /f "tokens=*" %%i in ('gh run view %WORKFLOW_RUN_ID% --json status --jq ".status"') do set CURRENT_STATUS=%%i
if "%CURRENT_STATUS%" == "completed" (
echo Workflow completed.
exit /b 0
)
timeout /t 5 >nul
goto monitor_progress

View file

@ -3,21 +3,29 @@ version: '3'
services: services:
frontend: frontend:
container_name: frontend
build: build:
context: ./client context: ./client
dockerfile: Dockerfile dockerfile: Dockerfile
container_name: frontend
ports: ports:
- "5173:5173" - "5173:5173"
networks: networks:
- quiz_network - quiz_network
restart: always restart: always
healthcheck:
test: ["CMD-SHELL", "curl -f http://localhost:$${PORT} || exit 1"]
interval: 5s
timeout: 10s
start_period: 5s
retries: 6
backend: backend:
build: build:
context: ./server context: ./server
dockerfile: Dockerfile dockerfile: Dockerfile
container_name: backend container_name: backend
networks:
- quiz_network
ports: ports:
- "3000:3000" - "3000:3000"
volumes: volumes:
@ -30,12 +38,15 @@ services:
SENDER_EMAIL: infoevaluetonsavoir@gmail.com SENDER_EMAIL: infoevaluetonsavoir@gmail.com
EMAIL_PSW: 'vvml wmfr dkzb vjzb' EMAIL_PSW: 'vvml wmfr dkzb vjzb'
JWT_SECRET: haQdgd2jp09qb897GeBZyJetC8ECSpbFJe JWT_SECRET: haQdgd2jp09qb897GeBZyJetC8ECSpbFJe
FRONTEND_URL: "http://localhost:5173"
depends_on: depends_on:
- mongo mongo:
networks: condition: service_healthy
- quiz_network healthcheck:
restart: always test: ["CMD-SHELL", "curl -f http://localhost:$${PORT}/health || exit 1"]
interval: 5s
timeout: 10s
start_period: 5s
retries: 6
quizroom: # Forces image to update quizroom: # Forces image to update
build: build:
@ -44,11 +55,17 @@ services:
container_name: quizroom container_name: quizroom
ports: ports:
- "4500:4500" - "4500:4500"
depends_on: volumes:
- backend - /var/run/docker.sock:/var/run/docker.sock
networks: networks:
- quiz_network - quiz_network
restart: always restart: always
healthcheck:
test: ["CMD", "/usr/src/app/healthcheck.sh"]
interval: 5s
timeout: 10s
start_period: 5s
retries: 6
nginx: nginx:
build: build:
@ -58,11 +75,25 @@ services:
ports: ports:
- "80:80" - "80:80"
depends_on: depends_on:
- backend frontend:
- frontend condition: service_healthy
backend:
condition: service_healthy
networks: networks:
- quiz_network - quiz_network
restart: always restart: always
#environment:
# - PORT=8000
# - FRONTEND_HOST=frontend
# - FRONTEND_PORT=5173
# - BACKEND_HOST=backend
# - BACKEND_PORT=3000
healthcheck:
test: ["CMD-SHELL", "wget --spider http://0.0.0.0:$${PORT}/health || exit 1"]
interval: 5s
timeout: 10s
start_period: 5s
retries: 6
mongo: mongo:
image: mongo image: mongo
@ -75,6 +106,12 @@ services:
networks: networks:
- quiz_network - quiz_network
restart: always restart: always
healthcheck:
test: ["CMD", "mongosh", "--eval", "db.adminCommand('ping')"]
interval: 10s
timeout: 5s
retries: 3
start_period: 20s
watchtower: watchtower:
image: containrrr/watchtower image: containrrr/watchtower

5
nginx/.env.example Normal file
View file

@ -0,0 +1,5 @@
PORT=80
FRONTEND_HOST=frontend
FRONTEND_PORT=5173
BACKEND_HOST=backend
BACKEND_PORT=3000

View file

@ -1,20 +1,17 @@
# Stage 1: Build stage # Stage 1: Build stage
FROM nginx:1.27-alpine AS builder FROM nginx:1.27-alpine AS builder
# Install required packages # Install required packages
RUN apk add --no-cache nginx-mod-http-js nginx-mod-http-keyval RUN apk add --no-cache nginx-mod-http-js nginx-mod-http-keyval
# Stage 2: Final stage # Stage 2: Final stage
FROM alpine:3.19 FROM alpine:3.19
# Copy Nginx and NJS modules from builder # Install gettext for envsubst and other dependencies
COPY --from=builder /usr/sbin/nginx /usr/sbin/
COPY --from=builder /usr/lib/nginx/modules/ /usr/lib/nginx/modules/
COPY --from=builder /etc/nginx/ /etc/nginx/
COPY --from=builder /usr/lib/nginx/ /usr/lib/nginx/
# Install required runtime dependencies
RUN apk add --no-cache \ RUN apk add --no-cache \
gettext \
curl \
nginx-mod-http-js \
nginx-mod-http-keyval \
pcre2 \ pcre2 \
ca-certificates \ ca-certificates \
pcre \ pcre \
@ -24,15 +21,30 @@ RUN apk add --no-cache \
libxml2 \ libxml2 \
libedit \ libedit \
geoip \ geoip \
libxslt \ libxslt
&& mkdir -p /var/cache/nginx \
# Create base nginx directory
RUN mkdir -p /etc/nginx
# Copy Nginx and NJS modules from builder
COPY --from=builder /usr/sbin/nginx /usr/sbin/
COPY --from=builder /usr/lib/nginx/modules/ /usr/lib/nginx/modules/
RUN rm -rf /etc/nginx/*
COPY --from=builder /etc/nginx/ /etc/nginx/
COPY --from=builder /usr/lib/nginx/ /usr/lib/nginx/
# Setup directories and permissions
RUN mkdir -p /var/cache/nginx \
&& mkdir -p /var/log/nginx \ && mkdir -p /var/log/nginx \
&& mkdir -p /etc/nginx/conf.d \ && mkdir -p /etc/nginx/conf.d \
&& mkdir -p /etc/nginx/njs \ && mkdir -p /etc/nginx/njs \
&& ln -sf /dev/stdout /var/log/nginx/access.log \ && mkdir -p /etc/nginx/templates \
&& ln -sf /dev/stderr /var/log/nginx/error.log \ && chown -R nginx:nginx /var/cache/nginx \
&& addgroup -S nginx \ && chown -R nginx:nginx /var/log/nginx \
&& adduser -D -S -h /var/cache/nginx -s /sbin/nologin -G nginx nginx && chown -R nginx:nginx /etc/nginx \
&& touch /var/run/nginx.pid \
&& chown nginx:nginx /var/run/nginx.pid \
&& chmod 777 /var/log/nginx
# Copy necessary libraries from builder # Copy necessary libraries from builder
COPY --from=builder /usr/lib/libxml2.so* /usr/lib/ COPY --from=builder /usr/lib/libxml2.so* /usr/lib/
@ -45,25 +57,34 @@ RUN echo 'load_module modules/ngx_http_js_module.so;' > /tmp/nginx.conf && \
cat /etc/nginx/nginx.conf >> /tmp/nginx.conf && \ cat /etc/nginx/nginx.conf >> /tmp/nginx.conf && \
mv /tmp/nginx.conf /etc/nginx/nginx.conf mv /tmp/nginx.conf /etc/nginx/nginx.conf
# Copy our configuration # Copy configurations
COPY conf.d/default.conf /etc/nginx/conf.d/ COPY templates/default.conf /etc/nginx/templates/
COPY njs/main.js /etc/nginx/njs/ COPY njs/main.js /etc/nginx/njs/
COPY entrypoint.sh /entrypoint.sh
RUN dos2unix /entrypoint.sh
# Set proper permissions ENV PORT=80 \
RUN chown -R nginx:nginx /var/cache/nginx \ FRONTEND_HOST=frontend \
&& chown -R nginx:nginx /var/log/nginx \ FRONTEND_PORT=5173 \
&& chown -R nginx:nginx /etc/nginx/conf.d \ BACKEND_HOST=backend \
&& touch /var/run/nginx.pid \ BACKEND_PORT=3000
&& chown -R nginx:nginx /var/run/nginx.pid
# Verify the configuration # Set final permissions
# RUN nginx -t --dry-run RUN chmod +x /entrypoint.sh && \
chown -R nginx:nginx /etc/nginx && \
chown -R nginx:nginx /var/log/nginx && \
chown -R nginx:nginx /var/cache/nginx && \
chmod 755 /etc/nginx && \
chmod 777 /etc/nginx/conf.d && \
chmod 644 /etc/nginx/templates/default.conf && \
chmod 644 /etc/nginx/conf.d/default.conf
# Switch to non-root user # Switch to nginx user
USER nginx USER nginx
# Expose HTTP port HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
EXPOSE 80 CMD wget -q --spider http://0.0.0.0:${PORT}/health || exit 1
# Start Nginx # Start Nginx using entrypoint script
CMD ["nginx", "-g", "daemon off;"] # CMD [ "/bin/sh","-c","sleep 3600" ] # For debugging
ENTRYPOINT [ "/entrypoint.sh" ]

10
nginx/entrypoint.sh Normal file
View file

@ -0,0 +1,10 @@
#!/bin/sh
# entrypoint.sh
# We are already running as nginx user
envsubst '${PORT} ${FRONTEND_HOST} ${FRONTEND_PORT} ${BACKEND_HOST} ${BACKEND_PORT}' \
< /etc/nginx/templates/default.conf \
> /etc/nginx/conf.d/default.conf
# Start nginx
exec nginx -g "daemon off;"

View file

@ -8,17 +8,37 @@ map $http_upgrade $connection_upgrade {
} }
upstream frontend { upstream frontend {
server frontend:5173; server ${FRONTEND_HOST}:${FRONTEND_PORT};
} }
upstream backend { upstream backend {
server backend:3000; server ${BACKEND_HOST}:${BACKEND_PORT};
} }
server { server {
listen 80; listen ${PORT};
set $proxy_target ""; set $proxy_target "";
location /health {
access_log off;
add_header Content-Type text/plain;
return 200 'healthy';
}
location /backend-health {
proxy_pass http://backend/health;
proxy_http_version 1.1;
proxy_set_header Host $host;
access_log off;
}
location /frontend-health {
proxy_pass http://frontend;
proxy_http_version 1.1;
proxy_set_header Host $host;
access_log off;
}
location /api { location /api {
proxy_pass http://backend; proxy_pass http://backend;

View file

@ -1,9 +1,8 @@
# Use the Node base image # Use the Node base image
FROM node:18 AS quizroom FROM node:18 AS quizroom
ARG PORT=4500 ENV PORT=4500
ENV PORT=${PORT} ENV ROOM_ID=000000
ENV ROOM_ID=${ROOM_ID}
# Create a working directory # Create a working directory
WORKDIR /usr/src/app WORKDIR /usr/src/app
@ -15,6 +14,10 @@ RUN npm install
# Copy the rest of the source code to the container # Copy the rest of the source code to the container
COPY . . COPY . .
# Ensure healthcheck.sh has execution permissions
COPY healthcheck.sh /usr/src/app/healthcheck.sh
RUN chmod +x /usr/src/app/healthcheck.sh
# Build the TypeScript code # Build the TypeScript code
RUN npm run build RUN npm run build
@ -26,4 +29,4 @@ HEALTHCHECK --interval=30s --timeout=30s --start-period=30s --retries=3 \
CMD /usr/src/app/healthcheck.sh CMD /usr/src/app/healthcheck.sh
# Start the server using the compiled JavaScript file # Start the server using the compiled JavaScript file
CMD ["node", "dist/app.js"] CMD ["node", "dist/app.js"]

View file

@ -2,7 +2,8 @@ import http from "http";
import { Server, ServerOptions } from "socket.io"; import { Server, ServerOptions } from "socket.io";
import { setupWebsocket } from "./socket/setupWebSocket"; import { setupWebsocket } from "./socket/setupWebSocket";
import dotenv from "dotenv"; import dotenv from "dotenv";
import express from 'express'; import express from "express";
import os from "os"; // Import the os module
// Load environment variables // Load environment variables
dotenv.config(); dotenv.config();
@ -36,6 +37,7 @@ app.get('/health', (_, res) => {
} }
}); });
const ioOptions: Partial<ServerOptions> = { const ioOptions: Partial<ServerOptions> = {
path: `/api/room/${roomId}/socket`, path: `/api/room/${roomId}/socket`,
cors: { cors: {
@ -52,4 +54,4 @@ setupWebsocket(io);
server.listen(port, () => { server.listen(port, () => {
console.log(`WebSocket server is running on port ${port}`); console.log(`WebSocket server is running on port ${port}`);
}); });

View file

@ -8,6 +8,8 @@ services:
- PORT=${PORT:-4500} - PORT=${PORT:-4500}
ports: ports:
- "${PORT:-4500}:${PORT:-4500}" - "${PORT:-4500}:${PORT:-4500}"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment: environment:
- PORT=${PORT:-4500} - PORT=${PORT:-4500}
- ROOM_ID=${ROOM_ID} - ROOM_ID=${ROOM_ID}

View file

@ -1,2 +1,2 @@
#!/bin/sh #!/bin/bash
curl -f "http://0.0.0.0:${PORT}/health" || exit 1 curl -f "http://0.0.0.0:${PORT}/health" || exit 1

View file

@ -9,17 +9,24 @@
"version": "1.0.0", "version": "1.0.0",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"dockerode": "^4.0.2",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"express": "^4.21.1", "express": "^4.21.1",
"http": "^0.0.1-security", "http": "^0.0.1-security",
"socket.io": "^4.8.1" "socket.io": "^4.8.1"
}, },
"devDependencies": { "devDependencies": {
"@types/dockerode": "^3.3.32",
"@types/express": "^5.0.0", "@types/express": "^5.0.0",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"typescript": "^5.6.3" "typescript": "^5.6.3"
} }
}, },
"node_modules/@balena/dockerignore": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@balena/dockerignore/-/dockerignore-1.0.2.tgz",
"integrity": "sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q=="
},
"node_modules/@cspotcode/source-map-support": { "node_modules/@cspotcode/source-map-support": {
"version": "0.8.1", "version": "0.8.1",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz", "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
@ -118,6 +125,27 @@
"@types/node": "*" "@types/node": "*"
} }
}, },
"node_modules/@types/docker-modem": {
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/@types/docker-modem/-/docker-modem-3.0.6.tgz",
"integrity": "sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==",
"dev": true,
"dependencies": {
"@types/node": "*",
"@types/ssh2": "*"
}
},
"node_modules/@types/dockerode": {
"version": "3.3.32",
"resolved": "https://registry.npmjs.org/@types/dockerode/-/dockerode-3.3.32.tgz",
"integrity": "sha512-xxcG0g5AWKtNyh7I7wswLdFvym4Mlqks5ZlKzxEUrGHS0r0PUOfxm2T0mspwu10mHQqu3Ck3MI3V2HqvLWE1fg==",
"dev": true,
"dependencies": {
"@types/docker-modem": "*",
"@types/node": "*",
"@types/ssh2": "*"
}
},
"node_modules/@types/express": { "node_modules/@types/express": {
"version": "5.0.0", "version": "5.0.0",
"resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.0.tgz", "resolved": "https://registry.npmjs.org/@types/express/-/express-5.0.0.tgz",
@ -195,6 +223,30 @@
"@types/send": "*" "@types/send": "*"
} }
}, },
"node_modules/@types/ssh2": {
"version": "1.15.1",
"resolved": "https://registry.npmjs.org/@types/ssh2/-/ssh2-1.15.1.tgz",
"integrity": "sha512-ZIbEqKAsi5gj35y4P4vkJYly642wIbY6PqoN0xiyQGshKUGXR9WQjF/iF9mXBQ8uBKy3ezfsCkcoHKhd0BzuDA==",
"dev": true,
"dependencies": {
"@types/node": "^18.11.18"
}
},
"node_modules/@types/ssh2/node_modules/@types/node": {
"version": "18.19.67",
"resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.67.tgz",
"integrity": "sha512-wI8uHusga+0ZugNp0Ol/3BqQfEcCCNfojtO6Oou9iVNGPTL6QNSdnUdqq85fRgIorLhLMuPIKpsN98QE9Nh+KQ==",
"dev": true,
"dependencies": {
"undici-types": "~5.26.4"
}
},
"node_modules/@types/ssh2/node_modules/undici-types": {
"version": "5.26.5",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
"integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
"dev": true
},
"node_modules/accepts": { "node_modules/accepts": {
"version": "1.3.8", "version": "1.3.8",
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz",
@ -243,6 +295,33 @@
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/asn1": {
"version": "0.2.6",
"resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.6.tgz",
"integrity": "sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==",
"dependencies": {
"safer-buffer": "~2.1.0"
}
},
"node_modules/base64-js": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz",
"integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/base64id": { "node_modules/base64id": {
"version": "2.0.0", "version": "2.0.0",
"resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz", "resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz",
@ -251,6 +330,24 @@
"node": "^4.5.0 || >= 5.9" "node": "^4.5.0 || >= 5.9"
} }
}, },
"node_modules/bcrypt-pbkdf": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz",
"integrity": "sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==",
"dependencies": {
"tweetnacl": "^0.14.3"
}
},
"node_modules/bl": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz",
"integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==",
"dependencies": {
"buffer": "^5.5.0",
"inherits": "^2.0.4",
"readable-stream": "^3.4.0"
}
},
"node_modules/body-parser": { "node_modules/body-parser": {
"version": "1.20.3", "version": "1.20.3",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.3.tgz",
@ -290,6 +387,38 @@
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"license": "MIT" "license": "MIT"
}, },
"node_modules/buffer": {
"version": "5.7.1",
"resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz",
"integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"dependencies": {
"base64-js": "^1.3.1",
"ieee754": "^1.1.13"
}
},
"node_modules/buildcheck": {
"version": "0.0.6",
"resolved": "https://registry.npmjs.org/buildcheck/-/buildcheck-0.0.6.tgz",
"integrity": "sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A==",
"optional": true,
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/bytes": { "node_modules/bytes": {
"version": "3.1.2", "version": "3.1.2",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
@ -318,6 +447,11 @@
"url": "https://github.com/sponsors/ljharb" "url": "https://github.com/sponsors/ljharb"
} }
}, },
"node_modules/chownr": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.4.tgz",
"integrity": "sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg=="
},
"node_modules/content-disposition": { "node_modules/content-disposition": {
"version": "0.5.4", "version": "0.5.4",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz",
@ -365,6 +499,20 @@
"node": ">= 0.10" "node": ">= 0.10"
} }
}, },
"node_modules/cpu-features": {
"version": "0.0.10",
"resolved": "https://registry.npmjs.org/cpu-features/-/cpu-features-0.0.10.tgz",
"integrity": "sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==",
"hasInstallScript": true,
"optional": true,
"dependencies": {
"buildcheck": "~0.0.6",
"nan": "^2.19.0"
},
"engines": {
"node": ">=10.0.0"
}
},
"node_modules/create-require": { "node_modules/create-require": {
"version": "1.1.1", "version": "1.1.1",
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
@ -432,6 +580,33 @@
"node": ">=0.3.1" "node": ">=0.3.1"
} }
}, },
"node_modules/docker-modem": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/docker-modem/-/docker-modem-5.0.3.tgz",
"integrity": "sha512-89zhop5YVhcPEt5FpUFGr3cDyceGhq/F9J+ZndQ4KfqNvfbJpPMfgeixFgUj5OjCYAboElqODxY5Z1EBsSa6sg==",
"dependencies": {
"debug": "^4.1.1",
"readable-stream": "^3.5.0",
"split-ca": "^1.0.1",
"ssh2": "^1.15.0"
},
"engines": {
"node": ">= 8.0"
}
},
"node_modules/dockerode": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/dockerode/-/dockerode-4.0.2.tgz",
"integrity": "sha512-9wM1BVpVMFr2Pw3eJNXrYYt6DT9k0xMcsSCjtPvyQ+xa1iPg/Mo3T/gUcwI0B2cczqCeCYRPF8yFYDwtFXT0+w==",
"dependencies": {
"@balena/dockerignore": "^1.0.2",
"docker-modem": "^5.0.3",
"tar-fs": "~2.0.1"
},
"engines": {
"node": ">= 8.0"
}
},
"node_modules/dotenv": { "node_modules/dotenv": {
"version": "16.4.5", "version": "16.4.5",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz",
@ -458,6 +633,14 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/end-of-stream": {
"version": "1.4.4",
"resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz",
"integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==",
"dependencies": {
"once": "^1.4.0"
}
},
"node_modules/engine.io": { "node_modules/engine.io": {
"version": "6.6.2", "version": "6.6.2",
"resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.2.tgz", "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.2.tgz",
@ -639,6 +822,11 @@
"node": ">= 0.6" "node": ">= 0.6"
} }
}, },
"node_modules/fs-constants": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz",
"integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow=="
},
"node_modules/function-bind": { "node_modules/function-bind": {
"version": "1.1.2", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
@ -760,6 +948,25 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/ieee754": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz",
"integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
]
},
"node_modules/inherits": { "node_modules/inherits": {
"version": "2.0.4", "version": "2.0.4",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
@ -839,11 +1046,22 @@
"node": ">= 0.6" "node": ">= 0.6"
} }
}, },
"node_modules/mkdirp-classic": {
"version": "0.5.3",
"resolved": "https://registry.npmjs.org/mkdirp-classic/-/mkdirp-classic-0.5.3.tgz",
"integrity": "sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A=="
},
"node_modules/ms": { "node_modules/ms": {
"version": "2.1.3", "version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="
}, },
"node_modules/nan": {
"version": "2.22.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.22.0.tgz",
"integrity": "sha512-nbajikzWTMwsW+eSsNm3QwlOs7het9gGJU5dDZzRTQGk03vyBOauxgI4VakDzE0PtsGTmXPsXTbbjVhRwR5mpw==",
"optional": true
},
"node_modules/negotiator": { "node_modules/negotiator": {
"version": "0.6.3", "version": "0.6.3",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
@ -884,6 +1102,14 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/once": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
"integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
"dependencies": {
"wrappy": "1"
}
},
"node_modules/parseurl": { "node_modules/parseurl": {
"version": "1.3.3", "version": "1.3.3",
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
@ -912,6 +1138,15 @@
"node": ">= 0.10" "node": ">= 0.10"
} }
}, },
"node_modules/pump": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz",
"integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==",
"dependencies": {
"end-of-stream": "^1.1.0",
"once": "^1.3.1"
}
},
"node_modules/qs": { "node_modules/qs": {
"version": "6.13.0", "version": "6.13.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz",
@ -951,6 +1186,19 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/readable-stream": {
"version": "3.6.2",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz",
"integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==",
"dependencies": {
"inherits": "^2.0.3",
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/safe-buffer": { "node_modules/safe-buffer": {
"version": "5.2.1", "version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@ -1119,6 +1367,28 @@
"node": ">=10.0.0" "node": ">=10.0.0"
} }
}, },
"node_modules/split-ca": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/split-ca/-/split-ca-1.0.1.tgz",
"integrity": "sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ=="
},
"node_modules/ssh2": {
"version": "1.16.0",
"resolved": "https://registry.npmjs.org/ssh2/-/ssh2-1.16.0.tgz",
"integrity": "sha512-r1X4KsBGedJqo7h8F5c4Ybpcr5RjyP+aWIG007uBPRjmdQWfEiVLzSK71Zji1B9sKxwaCvD8y8cwSkYrlLiRRg==",
"hasInstallScript": true,
"dependencies": {
"asn1": "^0.2.6",
"bcrypt-pbkdf": "^1.0.2"
},
"engines": {
"node": ">=10.16.0"
},
"optionalDependencies": {
"cpu-features": "~0.0.10",
"nan": "^2.20.0"
}
},
"node_modules/statuses": { "node_modules/statuses": {
"version": "2.0.1", "version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
@ -1128,6 +1398,40 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"dependencies": {
"safe-buffer": "~5.2.0"
}
},
"node_modules/tar-fs": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/tar-fs/-/tar-fs-2.0.1.tgz",
"integrity": "sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA==",
"dependencies": {
"chownr": "^1.1.1",
"mkdirp-classic": "^0.5.2",
"pump": "^3.0.0",
"tar-stream": "^2.0.0"
}
},
"node_modules/tar-stream": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-2.2.0.tgz",
"integrity": "sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==",
"dependencies": {
"bl": "^4.0.3",
"end-of-stream": "^1.4.1",
"fs-constants": "^1.0.0",
"inherits": "^2.0.3",
"readable-stream": "^3.1.1"
},
"engines": {
"node": ">=6"
}
},
"node_modules/toidentifier": { "node_modules/toidentifier": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz",
@ -1180,6 +1484,11 @@
} }
} }
}, },
"node_modules/tweetnacl": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA=="
},
"node_modules/type-is": { "node_modules/type-is": {
"version": "1.6.18", "version": "1.6.18",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz",
@ -1220,6 +1529,11 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"node_modules/utils-merge": { "node_modules/utils-merge": {
"version": "1.0.1", "version": "1.0.1",
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
@ -1243,6 +1557,11 @@
"node": ">= 0.8" "node": ">= 0.8"
} }
}, },
"node_modules/wrappy": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
},
"node_modules/ws": { "node_modules/ws": {
"version": "8.17.1", "version": "8.17.1",
"resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz",

View file

@ -12,11 +12,13 @@
"license": "ISC", "license": "ISC",
"description": "", "description": "",
"devDependencies": { "devDependencies": {
"@types/dockerode": "^3.3.32",
"@types/express": "^5.0.0", "@types/express": "^5.0.0",
"ts-node": "^10.9.2", "ts-node": "^10.9.2",
"typescript": "^5.6.3" "typescript": "^5.6.3"
}, },
"dependencies": { "dependencies": {
"dockerode": "^4.0.2",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"express": "^4.21.1", "express": "^4.21.1",
"http": "^0.0.1-security", "http": "^0.0.1-security",

View file

@ -1,4 +1,6 @@
import { Server, Socket } from "socket.io"; import { Server, Socket } from "socket.io";
import Docker from 'dockerode';
import fs from 'fs';
const MAX_USERS_PER_ROOM = 60; const MAX_USERS_PER_ROOM = 60;
const MAX_TOTAL_CONNECTIONS = 2000; const MAX_TOTAL_CONNECTIONS = 2000;
@ -19,10 +21,11 @@ export const setupWebsocket = (io: Server): void => {
socket.on("create-room", (sentRoomName) => { socket.on("create-room", (sentRoomName) => {
// Ensure sentRoomName is a string before applying toUpperCase() // Ensure sentRoomName is a string before applying toUpperCase()
const roomName = (typeof sentRoomName === "string" && sentRoomName.trim() !== "") const roomName = (typeof sentRoomName === "string" && sentRoomName.trim() !== "")
? sentRoomName.toUpperCase() ? sentRoomName.toUpperCase()
: generateRoomName(); : generateRoomName();
console.log(`Created room with name: ${roomName}`);
if (!io.sockets.adapter.rooms.get(roomName)) { if (!io.sockets.adapter.rooms.get(roomName)) {
socket.join(roomName); socket.join(roomName);
socket.emit("create-success", roomName); socket.emit("create-success", roomName);
@ -96,10 +99,138 @@ export const setupWebsocket = (io: Server): void => {
socket.on("error", (error) => { socket.on("error", (error) => {
console.error("WebSocket server error:", error); console.error("WebSocket server error:", error);
}); });
// Stress Testing
socket.on("message-from-teacher", ({ roomName, message }: { roomName: string; message: string }) => {
console.log(`Message reçu dans la salle ${roomName} : ${message}`);
socket.to(roomName).emit("message-sent-teacher", { message });
});
socket.on("message-from-student", ({ roomName, message }: { roomName: string; message: string }) => {
console.log(`Message reçu dans la salle ${roomName} : ${message}`);
socket.to(roomName).emit("message-sent-student", { message });
});
interface ContainerStats {
containerId: string;
containerName: string;
memoryUsedMB: number | null;
memoryUsedPercentage: number | null;
cpuUsedPercentage: number | null;
error?: string;
}
class ContainerMetrics {
private docker: Docker;
private containerName: string;
private bytesToMB(bytes: number): number {
return Math.round(bytes / (1024 * 1024));
}
constructor() {
this.docker = new Docker({
socketPath: process.platform === 'win32' ? '//./pipe/docker_engine' : '/var/run/docker.sock'
});
this.containerName = `room_${process.env.ROOM_ID}`;
}
private async getContainerNetworks(containerId: string): Promise<string[]> {
const container = this.docker.getContainer(containerId);
const info = await container.inspect();
return Object.keys(info.NetworkSettings.Networks);
}
public async getAllContainerStats(): Promise<ContainerStats[]> {
try {
// First get our container to find its networks
const ourContainer = await this.docker.listContainers({
all: true,
filters: { name: [this.containerName] }
});
if (!ourContainer.length) {
throw new Error(`Container ${this.containerName} not found`);
}
const ourNetworks = await this.getContainerNetworks(ourContainer[0].Id);
// Get all containers
const allContainers = await this.docker.listContainers();
// Get stats for containers on the same networks
const containerStats = await Promise.all(
allContainers.map(async (container): Promise<ContainerStats | null> => {
try {
const containerNetworks = await this.getContainerNetworks(container.Id);
// Check if container shares any network with our container
if (!containerNetworks.some(network => ourNetworks.includes(network))) {
return null;
}
const stats = await this.docker.getContainer(container.Id).stats({ stream: false });
const memoryStats = {
usage: stats.memory_stats.usage,
limit: stats.memory_stats.limit || 0,
percent: stats.memory_stats.limit ? (stats.memory_stats.usage / stats.memory_stats.limit) * 100 : 0
};
const cpuDelta = stats.cpu_stats?.cpu_usage?.total_usage - (stats.precpu_stats?.cpu_usage?.total_usage || 0);
const systemDelta = stats.cpu_stats?.system_cpu_usage - (stats.precpu_stats?.system_cpu_usage || 0);
const cpuPercent = systemDelta > 0 ? (cpuDelta / systemDelta) * (stats.cpu_stats?.online_cpus || 1) * 100 : 0;
return {
containerId: container.Id,
containerName: container.Names[0].replace(/^\//, ''),
memoryUsedMB: this.bytesToMB(memoryStats.usage),
memoryUsedPercentage: memoryStats.percent,
cpuUsedPercentage: cpuPercent
};
} catch (error) {
return {
containerId: container.Id,
containerName: container.Names[0].replace(/^\//, ''),
memoryUsedMB: null,
memoryUsedPercentage: null,
cpuUsedPercentage: null,
error: error instanceof Error ? error.message : String(error)
};
}
})
);
// Change the filter to use proper type predicate
return containerStats.filter((stats): stats is ContainerStats => stats !== null);
} catch (error) {
console.error('Stats error:', error);
return [{
containerId: 'unknown',
containerName: 'unknown',
memoryUsedMB: null,
memoryUsedPercentage: null,
cpuUsedPercentage: null,
error: error instanceof Error ? error.message : String(error)
}];
}
}
}
const containerMetrics = new ContainerMetrics();
socket.on("get-usage", async () => {
try {
const usageData = await containerMetrics.getAllContainerStats();
socket.emit("usage-data", usageData);
} catch (error) {
socket.emit("error", { message: "Failed to retrieve usage data" });
}
});
}); });
const generateRoomName = (length = 6): string => { const generateRoomName = (length = 6): string => {
const characters = "0123456789"; const characters = "0123456789";
let result = ""; let result = "";

View file

@ -8,6 +8,10 @@ RUN npm install
COPY ./ . COPY ./ .
EXPOSE 4400 ENV PORT=3000
EXPOSE ${PORT}
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
CMD curl -f http://localhost:${PORT}/health || exit 1
CMD ["npm", "run", "start"] CMD ["npm", "run", "start"]

View file

@ -47,6 +47,7 @@ const folderRouter = require('./routers/folders.js');
const quizRouter = require('./routers/quiz.js'); const quizRouter = require('./routers/quiz.js');
const imagesRouter = require('./routers/images.js'); const imagesRouter = require('./routers/images.js');
const roomRouter = require('./routers/rooms.js'); const roomRouter = require('./routers/rooms.js');
const healthRouter = require('./routers/health.js');
// Setup environment // Setup environment
dotenv.config(); dotenv.config();
@ -71,6 +72,7 @@ app.use('/api/folder', folderRouter);
app.use('/api/quiz', quizRouter); app.use('/api/quiz', quizRouter);
app.use('/api/image', imagesRouter); app.use('/api/image', imagesRouter);
app.use('/api/room', roomRouter); app.use('/api/room', roomRouter);
app.use('/health', healthRouter);
app.use(errorHandler); app.use(errorHandler);

View file

@ -8,7 +8,7 @@ class DockerRoomProvider extends BaseRoomProvider {
const dockerSocket = process.env.DOCKER_SOCKET || "/var/run/docker.sock"; const dockerSocket = process.env.DOCKER_SOCKET || "/var/run/docker.sock";
this.docker = new Docker({ socketPath: dockerSocket }); this.docker = new Docker({ socketPath: dockerSocket });
this.docker_network = 'evaluetonsavoir_quiz_network'; this.docker_network = process.env.QUIZ_NETWORK_NAME || 'evaluetonsavoir_quiz_network';
} }
async syncInstantiatedRooms() { async syncInstantiatedRooms() {
@ -46,10 +46,52 @@ class DockerRoomProvider extends BaseRoomProvider {
} }
} }
async checkAndPullImage(imageName) {
try {
const images = await this.docker.listImages({ all: true });
//console.log('Images disponibles:', images.map(img => ({
// RepoTags: img.RepoTags || [],
// Id: img.Id
//})));
const imageExists = images.some(img => {
const tags = img.RepoTags || [];
return tags.includes(imageName) ||
tags.includes(`${imageName}:latest`) ||
img.Id.includes(imageName);
});
if (!imageExists) {
console.log(`L'image ${imageName} n'a pas été trouvée localement, tentative de téléchargement...`);
try {
await this.docker.pull(imageName);
console.log(`L'image ${imageName} a été téléchargée avec succès`);
} catch (pullError) {
const localImages = await this.docker.listImages({ all: true });
const foundLocally = localImages.some(img =>
(img.RepoTags || []).includes(imageName) ||
(img.RepoTags || []).includes(`${imageName}:latest`)
);
if (!foundLocally) {
throw new Error(`Impossible de trouver ou de télécharger l'image ${imageName}: ${pullError.message}`);
} else {
console.log(`L'image ${imageName} a été trouvée localement après vérification supplémentaire`);
}
}
} else {
console.log(`L'image ${imageName} a été trouvée localement`);
}
} catch (error) {
throw new Error(`Une erreur est survenue lors de la vérification/téléchargement de l'image ${imageName}: ${error.message}`);
}
}
async createRoom(roomId, options) { async createRoom(roomId, options) {
const container_name = `room_${roomId}`; const container_name = `room_${roomId}`;
try { try {
await this.checkAndPullImage(this.quiz_docker_image);
const containerConfig = { const containerConfig = {
Image: this.quiz_docker_image, Image: this.quiz_docker_image,
name: container_name, name: container_name,
@ -57,7 +99,10 @@ class DockerRoomProvider extends BaseRoomProvider {
NetworkMode: this.docker_network, NetworkMode: this.docker_network,
RestartPolicy: { RestartPolicy: {
Name: 'unless-stopped' Name: 'unless-stopped'
} },
Binds: [
'/var/run/docker.sock:/var/run/docker.sock'
]
}, },
Env: [ Env: [
`ROOM_ID=${roomId}`, `ROOM_ID=${roomId}`,

20
server/routers/health.js Normal file
View file

@ -0,0 +1,20 @@
const express = require('express');
const router = express.Router();
router.get('/', async (req, res) => {
try {
const dbStatus = await require('../config/db.js').getConnection() ? 'connected' : 'disconnected';
res.json({
status: 'healthy',
timestamp: new Date(),
db: dbStatus
});
} catch (error) {
res.status(500).json({
status: 'unhealthy',
error: error.message
});
}
});
module.exports = router;

View file

@ -0,0 +1 @@
node_modules

View file

@ -0,0 +1,19 @@
# Target url
BASE_URL=http://msevignyl.duckdns.org
# Connection account
USER_EMAIL=admin@admin.com
USER_PASSWORD=admin
# Stress test parameters
NUMBER_ROOMS=5
USERS_PER_ROOM=60
# Optionnal
MAX_MESSAGES_ROUND=20
CONVERSATION_INTERVAL=1000
MESSAGE_RESPONSE_TIMEOUT=5000
BATCH_DELAY=1000
BATCH_SIZE=10

View file

@ -0,0 +1,13 @@
FROM node:18
WORKDIR /app
COPY package*.json ./
RUN npm install
COPY . .
VOLUME /app/output
CMD ["node", "main.js"]

51
test/stressTest/README.md Normal file
View file

@ -0,0 +1,51 @@
# Test de Charge - EvalueTonSavoir
Ce conteneur permet d'exécuter des tests de charge sur l'application EvalueTonSavoir.
## Prérequis
- Docker
- Docker Compose
## Configuration
1. Créez un fichier `.env` à partir du modèle `.env.example`:
```bash
copy .env.example .env
```
2. Modifiez les variables dans le fichier .env:
```bash
# URL de l'application cible
BASE_URL=http://votre-url.com
# Compte de connexion
USER_EMAIL=admin@admin.com
USER_PASSWORD=admin
# Paramètres du test de charge
NUMBER_ROOMS=5 # Nombre de salles à créer
USERS_PER_ROOM=60 # Nombre d'utilisateurs par salle
```
#### Paramètres optionnels
Dans le fichier .env, vous pouvez aussi configurer:
```bash
MAX_MESSAGES_ROUND=20 # Messages maximum par cycle
CONVERSATION_INTERVAL=1000 # Intervalle entre les messages (ms)
MESSAGE_RESPONSE_TIMEOUT=5000 # Timeout des réponses (ms)
BATCH_DELAY=1000 # Délai entre les lots (ms)
BATCH_SIZE=10 # Taille des lots d'utilisateurs
```
## Démarrage
Pour lancer le test de charge:
Les résultats seront disponibles dans le dossier output/.
```bash
docker compose up
```

View file

@ -0,0 +1,46 @@
export class TestMetrics {
constructor() {
this.reset();
}
reset() {
this.roomsCreated = 0;
this.roomsFailed = 0;
this.usersConnected = 0;
this.userConnectionsFailed = 0;
this.messagesAttempted = 0;
this.messagesSent = 0;
this.messagesReceived = 0;
this.errors = new Map();
}
logError(category, error) {
if (!this.errors.has(category)) {
this.errors.set(category, []);
}
this.errors.get(category).push(error);
}
getSummary() {
return {
rooms: {
created: this.roomsCreated,
failed: this.roomsFailed,
total: this.roomsCreated + this.roomsFailed
},
users: {
connected: this.usersConnected,
failed: this.userConnectionsFailed,
total: this.usersConnected + this.userConnectionsFailed
},
messages: {
attempted: this.messagesAttempted,
sent: this.messagesSent,
received: this.messagesReceived
},
errors: Object.fromEntries(
Array.from(this.errors.entries()).map(([k, v]) => [k, v.length])
)
};
}
}

View file

@ -0,0 +1,83 @@
import { io } from "socket.io-client";
export class RoomParticipant {
constructor(username, roomName) {
this.username = username;
this.roomName = roomName;
this.socket = null;
this.maxRetries = 3;
this.retryDelay = 1000;
}
async connectToRoom(baseUrl) {
let retries = 0;
const maxRetries = 2;
const retryDelay = 2000;
const cleanup = () => {
if (this.socket) {
this.socket.removeAllListeners();
this.socket.disconnect();
this.socket = null;
}
};
while (retries < maxRetries) {
try {
const socket = io(baseUrl, {
path: `/api/room/${this.roomName}/socket`,
transports: ['websocket'],
timeout: 8000,
reconnection: false,
forceNew: true
});
const result = await new Promise((resolve, reject) => {
const timeout = setTimeout(() => {
cleanup();
reject(new Error('Connection timeout'));
}, 8000);
socket.on('connect', () => {
clearTimeout(timeout);
this.socket = socket;
this.onConnected(); // Add this line
resolve(socket);
});
socket.on('connect_error', (error) => {
clearTimeout(timeout);
cleanup();
reject(new Error(`Connection error: ${error.message}`));
});
socket.on('error', (error) => {
clearTimeout(timeout);
cleanup();
reject(new Error(`Socket error: ${error.message}`));
});
});
return result;
} catch (error) {
retries++;
if (retries === maxRetries) {
throw error;
}
await new Promise(resolve => setTimeout(resolve, retryDelay));
}
}
}
onConnected() {
// To be implemented by child classes
}
disconnect() {
if (this.socket) {
this.socket.disconnect();
this.socket = null;
}
}
}

View file

@ -0,0 +1,48 @@
// student.js
import { RoomParticipant } from './roomParticipant.js';
export class Student extends RoomParticipant {
nbrMessageReceived = 0;
constructor(username, roomName) {
super(username, roomName);
}
connectToRoom(baseUrl) {
return super.connectToRoom(baseUrl);
}
onConnected() {
this.joinRoom();
this.listenForTeacherMessage();
}
joinRoom() {
if (this.socket) {
this.socket.emit('join-room', {
enteredRoomName: this.roomName,
username: this.username
});
}
}
listenForTeacherMessage() {
if (this.socket) {
this.socket.on('message-sent-teacher', ({ message }) => {
this.nbrMessageReceived++;
this.respondToTeacher(message);
});
}
}
respondToTeacher(message) {
const reply = `${this.username} replying to: "${message}"`;
if (this.socket) {
this.socket.emit('message-from-student', {
roomName: this.roomName,
message: reply
});
}
}
}

View file

@ -0,0 +1,46 @@
import { RoomParticipant } from './roomParticipant.js';
export class Teacher extends RoomParticipant {
nbrMessageReceived = 0;
constructor(username, roomName) {
super(username, roomName);
this.ready = false;
}
connectToRoom(baseUrl) {
return super.connectToRoom(baseUrl);
}
onConnected() {
this.createRoom();
this.listenForStudentMessage();
}
createRoom() {
if (this.socket) {
this.socket.emit('create-room', this.roomName);
}
}
broadcastMessage(message) {
if (this.socket) {
this.socket.emit('message-from-teacher', {
roomName: this.roomName,
message
});
} else {
console.warn(`Teacher ${this.username} not ready to broadcast yet`);
}
}
listenForStudentMessage() {
if (this.socket) {
this.socket.on('message-sent-student', ({ message }) => {
//console.log(`Teacher ${this.username} received: "${message}"`);
this.nbrMessageReceived++;
});
}
}
}

View file

@ -0,0 +1,72 @@
import { RoomParticipant } from './roomParticipant.js';
export class Watcher extends RoomParticipant {
roomRessourcesData = [];
checkRessourceInterval = null;
constructor(username, roomName) {
super(username, roomName);
}
connectToRoom(baseUrl) {
return super.connectToRoom(baseUrl);
}
onConnected() {
this.startCheckingResources();
}
checkRessource() {
if (this.socket?.connected) {
try {
this.socket.emit("get-usage");
this.socket.once("usage-data", (data) => {
const timestamp = Date.now();
// Store each container's metrics separately with timestamp
data.forEach(containerStat => {
const existingData = this.roomRessourcesData.find(d => d.containerId === containerStat.containerId);
if (existingData) {
existingData.metrics.push({
timestamp,
...containerStat
});
} else {
this.roomRessourcesData.push({
containerId: containerStat.containerId,
containerName: containerStat.containerName,
metrics: [{
timestamp,
...containerStat
}]
});
}
});
});
} catch (error) {
console.warn(`Error capturing metrics for room ${this.roomName}:`, error.message);
}
}
}
startCheckingResources(intervalMs = 500) {
if (this.checkRessourceInterval) {
console.warn(`Resource checking is already running for room ${this.roomName}.`);
return;
}
this.checkRessourceInterval = setInterval(() => this.checkRessource(), intervalMs);
}
stopCheckingResources() {
if (this.checkRessourceInterval) {
clearInterval(this.checkRessourceInterval);
this.checkRessourceInterval = null;
}
}
disconnect() {
this.stopCheckingResources();
super.disconnect();
}
}

View file

@ -0,0 +1,16 @@
version: '3'
services:
stress-test:
build:
context: .
dockerfile: Dockerfile
container_name: stress-test
network_mode: host
env_file:
- .env
volumes:
- ./output:/app/output
tty: true
stdin_open: true

201
test/stressTest/main.js Normal file
View file

@ -0,0 +1,201 @@
import { attemptLoginOrRegister, createRoomContainer } from './utility/apiServices.js';
import { Student } from './class/student.js';
import { Teacher } from './class/teacher.js';
import { Watcher } from './class/watcher.js';
import { TestMetrics } from './class/metrics.js';
import dotenv from 'dotenv';
import generateMetricsReport from './utility/metrics_generator.js';
dotenv.config();
const config = {
baseUrl: process.env.BASE_URL || 'http://localhost',
auth: {
username: process.env.USER_EMAIL || 'admin@admin.com',
password: process.env.USER_PASSWORD || 'admin'
},
rooms: {
count: parseInt(process.env.NUMBER_ROOMS || '15'),
usersPerRoom: parseInt(process.env.USERS_PER_ROOM || '60'),
batchSize: parseInt(process.env.BATCH_SIZE || 5),
batchDelay: parseInt(process.env.BATCH_DELAY || 250)
},
simulation: {
maxMessages: parseInt(process.env.MAX_MESSAGES_ROUND || '20'),
messageInterval: parseInt(process.env.CONVERSATION_INTERVAL || '1000'),
responseTimeout: parseInt(process.env.MESSAGE_RESPONSE_TIMEOUT || 5000)
}
};
const rooms = new Map();
const metrics = new TestMetrics();
// Changes to setupRoom function
async function setupRoom(token, index) {
try {
const room = await createRoomContainer(config.baseUrl, token);
if (!room?.id) throw new Error('Room creation failed');
metrics.roomsCreated++;
const teacher = new Teacher(`teacher_${index}`, room.id);
// Only create watcher for first room (index 0)
const watcher = index === 0 ? new Watcher(`watcher_${index}`, room.id) : null;
await Promise.all([
teacher.connectToRoom(config.baseUrl)
.then(() => metrics.usersConnected++)
.catch(err => {
metrics.userConnectionsFailed++;
metrics.logError('teacherConnection', err);
console.warn(`Teacher ${index} connection failed:`, err.message);
}),
// Only connect watcher if it exists
...(watcher ? [
watcher.connectToRoom(config.baseUrl)
.then(() => metrics.usersConnected++)
.catch(err => {
metrics.userConnectionsFailed++;
metrics.logError('watcherConnection', err);
console.warn(`Watcher ${index} connection failed:`, err.message);
})
] : [])
]);
// Adjust number of students based on whether room has a watcher
const studentCount = watcher ?
config.rooms.usersPerRoom - 2 : // Room with watcher: subtract teacher and watcher
config.rooms.usersPerRoom - 1; // Rooms without watcher: subtract only teacher
const students = Array.from({ length: studentCount },
(_, i) => new Student(`student_${index}_${i}`, room.id));
rooms.set(room.id, { teacher, watcher, students });
return room.id;
} catch (err) {
metrics.roomsFailed++;
metrics.logError('roomSetup', err);
console.warn(`Room ${index} setup failed:`, err.message);
return null;
}
}
async function connectParticipants(roomId) {
const { students } = rooms.get(roomId);
const participants = [...students];
for (let i = 0; i < participants.length; i += config.rooms.batchSize) {
const batch = participants.slice(i, i + config.rooms.batchSize);
await Promise.all(batch.map(p =>
Promise.race([
p.connectToRoom(config.baseUrl).then(() => {
metrics.usersConnected++;
}),
new Promise((_, reject) => setTimeout(() => reject(new Error('Timeout')), 10000))
]).catch(err => {
metrics.userConnectionsFailed++;
metrics.logError('studentConnection', err);
console.warn(`Connection failed for ${p.username}:`, err.message);
})
));
await new Promise(resolve => setTimeout(resolve, config.rooms.batchDelay));
}
}
async function simulate() {
const simulations = Array.from(rooms.entries()).map(async ([roomId, { teacher, students }]) => {
const connectedStudents = students.filter(student => student.socket?.connected);
const expectedResponses = connectedStudents.length;
for (let i = 0; i < config.simulation.maxMessages; i++) {
metrics.messagesAttempted++;
const initialMessages = teacher.nbrMessageReceived;
try {
teacher.broadcastMessage(`Message ${i + 1} from ${teacher.username}`);
metrics.messagesSent++;
await Promise.race([
new Promise(resolve => {
const checkResponses = setInterval(() => {
const receivedResponses = teacher.nbrMessageReceived - initialMessages;
if (receivedResponses >= expectedResponses) {
metrics.messagesReceived += receivedResponses;
clearInterval(checkResponses);
resolve();
}
}, 100);
}),
new Promise((_, reject) =>
setTimeout(() => reject(new Error('Response timeout')), config.simulation.responseTimeout)
)
]);
} catch (error) {
metrics.logError('messaging', error);
console.error(`Error in room ${roomId} message ${i + 1}:`, error);
}
await new Promise(resolve => setTimeout(resolve, config.simulation.messageInterval));
}
});
await Promise.all(simulations);
console.log('All room simulations completed');
}
async function generateReport() {
const watcherRoom = Array.from(rooms.entries()).find(([_, room]) => room.watcher);
if (!watcherRoom) {
throw new Error('No watcher found in any room');
}
const data = {
[watcherRoom[0]]: watcherRoom[1].watcher.roomRessourcesData
};
return generateMetricsReport(data, metrics);
}
function cleanup() {
for (const { teacher, watcher, students } of rooms.values()) {
[teacher, watcher, ...students].forEach(p => p?.disconnect());
}
}
async function main() {
try {
const token = await attemptLoginOrRegister(config.baseUrl, config.auth.username, config.auth.password);
if (!token) throw new Error('Authentication failed');
console.log('Creating rooms...');
const roomIds = await Promise.all(
Array.from({ length: config.rooms.count }, (_, i) => setupRoom(token, i))
);
console.log('Connecting participants...');
await Promise.all(roomIds.filter(Boolean).map(connectParticipants));
console.log('Retrieving baseline metrics...');
await new Promise(resolve => setTimeout(resolve, 10000));
console.log('Starting simulation across all rooms...');
await simulate();
console.log('Simulation complete. Waiting for system stabilization...');
await new Promise(resolve => setTimeout(resolve, 10000));
console.log('Generating final report...');
const folderName = await generateReport();
console.log(`Metrics report generated in ${folderName.outputDir}`);
console.log('All done!');
} catch (error) {
metrics.logError('main', error);
console.error('Error:', error.message);
} finally {
cleanup();
}
}
['SIGINT', 'exit', 'uncaughtException', 'unhandledRejection'].forEach(event => {
process.on(event, cleanup);
});
main();

1230
test/stressTest/package-lock.json generated Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,22 @@
{
"name": "stresstest",
"version": "1.0.0",
"description": "main.js",
"type": "module",
"main": "main.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"author": "",
"license": "ISC",
"dependencies": {
"axios": "^1.7.7",
"chart.js": "^3.9.1",
"chartjs-node-canvas": "^4.1.6",
"dockerode": "^4.0.2",
"dotenv": "^16.4.5",
"p-limit": "^6.1.0",
"socket.io": "^4.8.1",
"socket.io-client": "^4.8.1"
}
}

View file

@ -0,0 +1,77 @@
import axios from "axios";
// Logs in a user.
async function login(baseUrl, email, password) {
if (!email || !password) throw new Error("Email and password are required.");
try {
const res = await axios.post(`${baseUrl}/api/user/login`, { email, password }, {
headers: { "Content-Type": "application/json" },
});
if (res.status === 200 && res.data.token) {
console.log(`Login successful for ${email}`);
return res.data.token;
}
throw new Error(`Login failed. Status: ${res.status}`);
} catch (error) {
console.error(`Login error for ${email}:`, error.message);
throw error;
}
}
// Registers a new user.
async function register(baseUrl, email, password) {
if (!email || !password) throw new Error("Email and password are required.");
try {
const res = await axios.post(`${baseUrl}/api/user/register`, { email, password }, {
headers: { "Content-Type": "application/json" },
});
if (res.status === 200) {
console.log(`Registration successful for ${email}`);
return res.data.message || "Registration completed successfully.";
}
throw new Error(`Registration failed. Status: ${res.status}`);
} catch (error) {
console.error(`Registration error for ${email}:`, error.message);
throw error;
}
}
// Attempts to log in a user, or registers and logs in if the login fails.
export async function attemptLoginOrRegister(baseUrl, username, password) {
try {
return await login(baseUrl, username, password);
} catch (loginError) {
console.log(`Login failed for ${username}. Attempting registration...`);
try {
await register(baseUrl, username, password);
return await login(baseUrl, username, password);
} catch (registerError) {
console.error(`Registration and login failed for ${username}:`, registerError.message);
return null;
}
}
}
// Creates a new room
export async function createRoomContainer(baseUrl, token) {
if (!token) throw new Error("Authorization token is required.");
try {
const res = await axios.post(`${baseUrl}/api/room`, {}, {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
});
if (res.status === 200) return res.data;
throw new Error(`Room creation failed. Status: ${res.status}`);
} catch (error) {
console.error("Room creation error:", error.message);
throw error;
}
}

View file

@ -0,0 +1,253 @@
import fs from 'fs';
import path from 'path';
import { ChartJSNodeCanvas } from 'chartjs-node-canvas';
async function saveMetricsSummary(metrics, baseOutputDir) {
const metricsData = metrics.getSummary();
// Save as JSON
fs.writeFileSync(
path.join(baseOutputDir, 'metrics-summary.json'),
JSON.stringify(metricsData, null, 2)
);
// Save as formatted text
const textSummary = `
Load Test Summary
================
Rooms
-----
Created: ${metricsData.rooms.created}
Failed: ${metricsData.rooms.failed}
Total: ${metricsData.rooms.total}
Users
-----
Connected: ${metricsData.users.connected}
Failed: ${metricsData.users.failed}
Total: ${metricsData.users.total}
Messages
--------
Attempted: ${metricsData.messages.attempted}
Sent: ${metricsData.messages.sent}
Received: ${metricsData.messages.received}
Errors by Category
----------------
${Object.entries(metricsData.errors)
.map(([category, count]) => `${category}: ${count}`)
.join('\n')}
`;
fs.writeFileSync(
path.join(baseOutputDir, 'metrics-summary.txt'),
textSummary.trim()
);
}
// Common chart configurations
const CHART_CONFIG = {
width: 800,
height: 400,
chartStyles: {
memory: {
borderColor: 'blue',
backgroundColor: 'rgba(54, 162, 235, 0.2)'
},
memoryPercent: {
borderColor: 'green',
backgroundColor: 'rgba(75, 192, 192, 0.2)'
},
cpu: {
borderColor: 'red',
backgroundColor: 'rgba(255, 99, 132, 0.2)'
}
}
};
const createBaseChartConfig = (labels, dataset, xLabel, yLabel) => ({
type: 'line',
data: {
labels,
datasets: [dataset]
},
options: {
scales: {
x: { title: { display: true, text: xLabel }},
y: { title: { display: true, text: yLabel }}
},
plugins: {
legend: { display: true, position: 'top' }
}
}
});
function ensureDirectoryExists(directory) {
!fs.existsSync(directory) && fs.mkdirSync(directory, { recursive: true });
}
async function generateMetricsChart(chartJSNodeCanvas, data, style, label, timeLabels, metric, outputPath) {
const dataset = {
label,
data: data.map(m => m[metric] || 0),
...CHART_CONFIG.chartStyles[style],
fill: true,
tension: 0.4
};
const buffer = await chartJSNodeCanvas.renderToBuffer(
createBaseChartConfig(timeLabels, dataset, 'Time', label)
);
return fs.promises.writeFile(outputPath, buffer);
}
async function generateContainerCharts(chartJSNodeCanvas, containerData, outputDir) {
const timeLabels = containerData.metrics.map(m =>
new Date(m.timestamp).toLocaleTimeString()
);
const chartPromises = [
generateMetricsChart(
chartJSNodeCanvas,
containerData.metrics,
'memory',
`${containerData.containerName} Memory (MB)`,
timeLabels,
'memoryUsedMB',
path.join(outputDir, 'memory-usage-mb.png')
),
generateMetricsChart(
chartJSNodeCanvas,
containerData.metrics,
'memoryPercent',
`${containerData.containerName} Memory %`,
timeLabels,
'memoryUsedPercentage',
path.join(outputDir, 'memory-usage-percent.png')
),
generateMetricsChart(
chartJSNodeCanvas,
containerData.metrics,
'cpu',
`${containerData.containerName} CPU %`,
timeLabels,
'cpuUsedPercentage',
path.join(outputDir, 'cpu-usage.png')
)
];
await Promise.all(chartPromises);
}
async function processSummaryMetrics(containers, timeLabels) {
return timeLabels.map((_, timeIndex) => ({
memoryUsedMB: containers.reduce((sum, container) =>
sum + (container.metrics[timeIndex]?.memoryUsedMB || 0), 0),
memoryUsedPercentage: containers.reduce((sum, container) =>
sum + (container.metrics[timeIndex]?.memoryUsedPercentage || 0), 0),
cpuUsedPercentage: containers.reduce((sum, container) =>
sum + (container.metrics[timeIndex]?.cpuUsedPercentage || 0), 0)
}));
}
export default async function generateMetricsReport(allRoomsData, testMetrics) {
try {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const baseOutputDir = `./output/${timestamp}`;
const dirs = [
baseOutputDir,
path.join(baseOutputDir, 'all-containers'),
path.join(baseOutputDir, 'all-rooms')
];
dirs.forEach(ensureDirectoryExists);
if (testMetrics) {
await saveMetricsSummary(testMetrics, baseOutputDir);
}
const chartJSNodeCanvas = new ChartJSNodeCanvas(CHART_CONFIG);
const allContainers = Object.values(allRoomsData).flat();
const roomContainers = allContainers.filter(c =>
c.containerName.startsWith('room_')
);
if (allContainers.length > 0) {
const timeLabels = allContainers[0].metrics.map(m =>
new Date(m.timestamp).toLocaleTimeString()
);
const summedMetrics = await processSummaryMetrics(allContainers, timeLabels);
await generateContainerSummaryCharts(
chartJSNodeCanvas,
summedMetrics,
timeLabels,
path.join(baseOutputDir, 'all-containers')
);
}
if (roomContainers.length > 0) {
const timeLabels = roomContainers[0].metrics.map(m =>
new Date(m.timestamp).toLocaleTimeString()
);
const summedMetrics = await processSummaryMetrics(roomContainers, timeLabels);
await generateContainerSummaryCharts(
chartJSNodeCanvas,
summedMetrics,
timeLabels,
path.join(baseOutputDir, 'all-rooms')
);
}
// Process individual containers
const containerPromises = Object.values(allRoomsData)
.flat()
.filter(container => !container.containerName.startsWith('room_'))
.map(async containerData => {
const containerDir = path.join(baseOutputDir, containerData.containerName);
ensureDirectoryExists(containerDir);
await generateContainerCharts(chartJSNodeCanvas, containerData, containerDir);
});
await Promise.all(containerPromises);
return { outputDir: baseOutputDir };
} catch (error) {
console.error('Error generating metrics report:', error);
throw error;
}
}
async function generateContainerSummaryCharts(chartJSNodeCanvas, metrics, timeLabels, outputDir) {
await Promise.all([
generateMetricsChart(
chartJSNodeCanvas,
metrics,
'memory',
'Total Memory (MB)',
timeLabels,
'memoryUsedMB',
path.join(outputDir, 'total-memory-usage-mb.png')
),
generateMetricsChart(
chartJSNodeCanvas,
metrics,
'memoryPercent',
'Total Memory %',
timeLabels,
'memoryUsedPercentage',
path.join(outputDir, 'total-memory-usage-percent.png')
),
generateMetricsChart(
chartJSNodeCanvas,
metrics,
'cpu',
'Total CPU %',
timeLabels,
'cpuUsedPercentage',
path.join(outputDir, 'total-cpu-usage.png')
)
]);
}